1/*
2 * Copyright (c) 2005, 2016, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "ci/ciArrayKlass.hpp"
27#include "ci/ciEnv.hpp"
28#include "ci/ciKlass.hpp"
29#include "ci/ciMethod.hpp"
30#include "classfile/javaClasses.inline.hpp"
31#include "code/dependencies.hpp"
32#include "compiler/compileLog.hpp"
33#include "memory/resourceArea.hpp"
34#include "oops/oop.inline.hpp"
35#include "oops/objArrayKlass.hpp"
36#include "runtime/handles.hpp"
37#include "runtime/handles.inline.hpp"
38#include "runtime/thread.inline.hpp"
39#include "utilities/copy.hpp"
40
41
42#ifdef ASSERT
43static bool must_be_in_vm() {
44  Thread* thread = Thread::current();
45  if (thread->is_Java_thread())
46    return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
47  else
48    return true;  //something like this: thread->is_VM_thread();
49}
50#endif //ASSERT
51
52void Dependencies::initialize(ciEnv* env) {
53  Arena* arena = env->arena();
54  _oop_recorder = env->oop_recorder();
55  _log = env->log();
56  _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
57#if INCLUDE_JVMCI
58  _using_dep_values = false;
59#endif
60  DEBUG_ONLY(_deps[end_marker] = NULL);
61  for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
62    _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
63  }
64  _content_bytes = NULL;
65  _size_in_bytes = (size_t)-1;
66
67  assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
68}
69
70void Dependencies::assert_evol_method(ciMethod* m) {
71  assert_common_1(evol_method, m);
72}
73
74void Dependencies::assert_leaf_type(ciKlass* ctxk) {
75  if (ctxk->is_array_klass()) {
76    // As a special case, support this assertion on an array type,
77    // which reduces to an assertion on its element type.
78    // Note that this cannot be done with assertions that
79    // relate to concreteness or abstractness.
80    ciType* elemt = ctxk->as_array_klass()->base_element_type();
81    if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
82    ctxk = elemt->as_instance_klass();
83    //if (ctxk->is_final())  return;            // Ex:  String[][]
84  }
85  check_ctxk(ctxk);
86  assert_common_1(leaf_type, ctxk);
87}
88
89void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
90  check_ctxk_abstract(ctxk);
91  assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
92}
93
94void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
95  check_ctxk_abstract(ctxk);
96  assert_common_1(abstract_with_no_concrete_subtype, ctxk);
97}
98
99void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
100  check_ctxk_concrete(ctxk);
101  assert_common_1(concrete_with_no_concrete_subtype, ctxk);
102}
103
104void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
105  check_ctxk(ctxk);
106  assert_common_2(unique_concrete_method, ctxk, uniqm);
107}
108
109void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
110  check_ctxk(ctxk);
111  assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
112}
113
114void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
115  check_ctxk(ctxk);
116  assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
117}
118
119void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
120  check_ctxk(ctxk);
121  assert_common_1(no_finalizable_subclasses, ctxk);
122}
123
124void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
125  assert_common_2(call_site_target_value, call_site, method_handle);
126}
127
128#if INCLUDE_JVMCI
129
130Dependencies::Dependencies(Arena* arena, OopRecorder* oop_recorder, CompileLog* log) {
131  _oop_recorder = oop_recorder;
132  _log = log;
133  _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
134  _using_dep_values = true;
135  DEBUG_ONLY(_dep_values[end_marker] = NULL);
136  for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
137    _dep_values[i] = new(arena) GrowableArray<DepValue>(arena, 10, 0, DepValue());
138  }
139  _content_bytes = NULL;
140  _size_in_bytes = (size_t)-1;
141
142  assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
143}
144
145void Dependencies::assert_evol_method(Method* m) {
146  assert_common_1(evol_method, DepValue(_oop_recorder, m));
147}
148
149void Dependencies::assert_has_no_finalizable_subclasses(Klass* ctxk) {
150  check_ctxk(ctxk);
151  assert_common_1(no_finalizable_subclasses, DepValue(_oop_recorder, ctxk));
152}
153
154void Dependencies::assert_leaf_type(Klass* ctxk) {
155  if (ctxk->is_array_klass()) {
156    // As a special case, support this assertion on an array type,
157    // which reduces to an assertion on its element type.
158    // Note that this cannot be done with assertions that
159    // relate to concreteness or abstractness.
160    BasicType elemt = ArrayKlass::cast(ctxk)->element_type();
161    if (is_java_primitive(elemt))  return;   // Ex:  int[][]
162    ctxk = ObjArrayKlass::cast(ctxk)->bottom_klass();
163    //if (ctxk->is_final())  return;            // Ex:  String[][]
164  }
165  check_ctxk(ctxk);
166  assert_common_1(leaf_type, DepValue(_oop_recorder, ctxk));
167}
168
169void Dependencies::assert_abstract_with_unique_concrete_subtype(Klass* ctxk, Klass* conck) {
170  check_ctxk_abstract(ctxk);
171  DepValue ctxk_dv(_oop_recorder, ctxk);
172  DepValue conck_dv(_oop_recorder, conck, &ctxk_dv);
173  assert_common_2(abstract_with_unique_concrete_subtype, ctxk_dv, conck_dv);
174}
175
176void Dependencies::assert_unique_concrete_method(Klass* ctxk, Method* uniqm) {
177  check_ctxk(ctxk);
178  assert_common_2(unique_concrete_method, DepValue(_oop_recorder, ctxk), DepValue(_oop_recorder, uniqm));
179}
180
181void Dependencies::assert_call_site_target_value(oop call_site, oop method_handle) {
182  assert_common_2(call_site_target_value, DepValue(_oop_recorder, JNIHandles::make_local(call_site)), DepValue(_oop_recorder, JNIHandles::make_local(method_handle)));
183}
184
185#endif // INCLUDE_JVMCI
186
187
188// Helper function.  If we are adding a new dep. under ctxk2,
189// try to find an old dep. under a broader* ctxk1.  If there is
190//
191bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
192                                    int ctxk_i, ciKlass* ctxk2) {
193  ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
194  if (ctxk2->is_subtype_of(ctxk1)) {
195    return true;  // success, and no need to change
196  } else if (ctxk1->is_subtype_of(ctxk2)) {
197    // new context class fully subsumes previous one
198    deps->at_put(ctxk_i, ctxk2);
199    return true;
200  } else {
201    return false;
202  }
203}
204
205void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
206  assert(dep_args(dept) == 1, "sanity");
207  log_dependency(dept, x);
208  GrowableArray<ciBaseObject*>* deps = _deps[dept];
209
210  // see if the same (or a similar) dep is already recorded
211  if (note_dep_seen(dept, x)) {
212    assert(deps->find(x) >= 0, "sanity");
213  } else {
214    deps->append(x);
215  }
216}
217
218void Dependencies::assert_common_2(DepType dept,
219                                   ciBaseObject* x0, ciBaseObject* x1) {
220  assert(dep_args(dept) == 2, "sanity");
221  log_dependency(dept, x0, x1);
222  GrowableArray<ciBaseObject*>* deps = _deps[dept];
223
224  // see if the same (or a similar) dep is already recorded
225  bool has_ctxk = has_explicit_context_arg(dept);
226  if (has_ctxk) {
227    assert(dep_context_arg(dept) == 0, "sanity");
228    if (note_dep_seen(dept, x1)) {
229      // look in this bucket for redundant assertions
230      const int stride = 2;
231      for (int i = deps->length(); (i -= stride) >= 0; ) {
232        ciBaseObject* y1 = deps->at(i+1);
233        if (x1 == y1) {  // same subject; check the context
234          if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
235            return;
236          }
237        }
238      }
239    }
240  } else {
241    if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
242      // look in this bucket for redundant assertions
243      const int stride = 2;
244      for (int i = deps->length(); (i -= stride) >= 0; ) {
245        ciBaseObject* y0 = deps->at(i+0);
246        ciBaseObject* y1 = deps->at(i+1);
247        if (x0 == y0 && x1 == y1) {
248          return;
249        }
250      }
251    }
252  }
253
254  // append the assertion in the correct bucket:
255  deps->append(x0);
256  deps->append(x1);
257}
258
259void Dependencies::assert_common_3(DepType dept,
260                                   ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
261  assert(dep_context_arg(dept) == 0, "sanity");
262  assert(dep_args(dept) == 3, "sanity");
263  log_dependency(dept, ctxk, x, x2);
264  GrowableArray<ciBaseObject*>* deps = _deps[dept];
265
266  // try to normalize an unordered pair:
267  bool swap = false;
268  switch (dept) {
269  case abstract_with_exclusive_concrete_subtypes_2:
270    swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
271    break;
272  case exclusive_concrete_methods_2:
273    swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
274    break;
275  }
276  if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
277
278  // see if the same (or a similar) dep is already recorded
279  if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
280    // look in this bucket for redundant assertions
281    const int stride = 3;
282    for (int i = deps->length(); (i -= stride) >= 0; ) {
283      ciBaseObject* y  = deps->at(i+1);
284      ciBaseObject* y2 = deps->at(i+2);
285      if (x == y && x2 == y2) {  // same subjects; check the context
286        if (maybe_merge_ctxk(deps, i+0, ctxk)) {
287          return;
288        }
289      }
290    }
291  }
292  // append the assertion in the correct bucket:
293  deps->append(ctxk);
294  deps->append(x);
295  deps->append(x2);
296}
297
298#if INCLUDE_JVMCI
299bool Dependencies::maybe_merge_ctxk(GrowableArray<DepValue>* deps,
300                                    int ctxk_i, DepValue ctxk2_dv) {
301  Klass* ctxk1 = deps->at(ctxk_i).as_klass(_oop_recorder);
302  Klass* ctxk2 = ctxk2_dv.as_klass(_oop_recorder);
303  if (ctxk2->is_subtype_of(ctxk1)) {
304    return true;  // success, and no need to change
305  } else if (ctxk1->is_subtype_of(ctxk2)) {
306    // new context class fully subsumes previous one
307    deps->at_put(ctxk_i, ctxk2_dv);
308    return true;
309  } else {
310    return false;
311  }
312}
313
314void Dependencies::assert_common_1(DepType dept, DepValue x) {
315  assert(dep_args(dept) == 1, "sanity");
316  //log_dependency(dept, x);
317  GrowableArray<DepValue>* deps = _dep_values[dept];
318
319  // see if the same (or a similar) dep is already recorded
320  if (note_dep_seen(dept, x)) {
321    assert(deps->find(x) >= 0, "sanity");
322  } else {
323    deps->append(x);
324  }
325}
326
327void Dependencies::assert_common_2(DepType dept,
328                                   DepValue x0, DepValue x1) {
329  assert(dep_args(dept) == 2, "sanity");
330  //log_dependency(dept, x0, x1);
331  GrowableArray<DepValue>* deps = _dep_values[dept];
332
333  // see if the same (or a similar) dep is already recorded
334  bool has_ctxk = has_explicit_context_arg(dept);
335  if (has_ctxk) {
336    assert(dep_context_arg(dept) == 0, "sanity");
337    if (note_dep_seen(dept, x1)) {
338      // look in this bucket for redundant assertions
339      const int stride = 2;
340      for (int i = deps->length(); (i -= stride) >= 0; ) {
341        DepValue y1 = deps->at(i+1);
342        if (x1 == y1) {  // same subject; check the context
343          if (maybe_merge_ctxk(deps, i+0, x0)) {
344            return;
345          }
346        }
347      }
348    }
349  } else {
350    if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
351      // look in this bucket for redundant assertions
352      const int stride = 2;
353      for (int i = deps->length(); (i -= stride) >= 0; ) {
354        DepValue y0 = deps->at(i+0);
355        DepValue y1 = deps->at(i+1);
356        if (x0 == y0 && x1 == y1) {
357          return;
358        }
359      }
360    }
361  }
362
363  // append the assertion in the correct bucket:
364  deps->append(x0);
365  deps->append(x1);
366}
367#endif // INCLUDE_JVMCI
368
369/// Support for encoding dependencies into an nmethod:
370
371void Dependencies::copy_to(nmethod* nm) {
372  address beg = nm->dependencies_begin();
373  address end = nm->dependencies_end();
374  guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
375  Copy::disjoint_words((HeapWord*) content_bytes(),
376                       (HeapWord*) beg,
377                       size_in_bytes() / sizeof(HeapWord));
378  assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
379}
380
381static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
382  for (int i = 0; i < narg; i++) {
383    int diff = p1[i]->ident() - p2[i]->ident();
384    if (diff != 0)  return diff;
385  }
386  return 0;
387}
388static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
389{ return sort_dep(p1, p2, 1); }
390static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
391{ return sort_dep(p1, p2, 2); }
392static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
393{ return sort_dep(p1, p2, 3); }
394
395#if INCLUDE_JVMCI
396// metadata deps are sorted before object deps
397static int sort_dep_value(Dependencies::DepValue* p1, Dependencies::DepValue* p2, int narg) {
398  for (int i = 0; i < narg; i++) {
399    int diff = p1[i].sort_key() - p2[i].sort_key();
400    if (diff != 0)  return diff;
401  }
402  return 0;
403}
404static int sort_dep_value_arg_1(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
405{ return sort_dep_value(p1, p2, 1); }
406static int sort_dep_value_arg_2(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
407{ return sort_dep_value(p1, p2, 2); }
408static int sort_dep_value_arg_3(Dependencies::DepValue* p1, Dependencies::DepValue* p2)
409{ return sort_dep_value(p1, p2, 3); }
410#endif // INCLUDE_JVMCI
411
412void Dependencies::sort_all_deps() {
413#if INCLUDE_JVMCI
414  if (_using_dep_values) {
415    for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
416      DepType dept = (DepType)deptv;
417      GrowableArray<DepValue>* deps = _dep_values[dept];
418      if (deps->length() <= 1)  continue;
419      switch (dep_args(dept)) {
420      case 1: deps->sort(sort_dep_value_arg_1, 1); break;
421      case 2: deps->sort(sort_dep_value_arg_2, 2); break;
422      case 3: deps->sort(sort_dep_value_arg_3, 3); break;
423      default: ShouldNotReachHere();
424      }
425    }
426    return;
427  }
428#endif // INCLUDE_JVMCI
429  for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
430    DepType dept = (DepType)deptv;
431    GrowableArray<ciBaseObject*>* deps = _deps[dept];
432    if (deps->length() <= 1)  continue;
433    switch (dep_args(dept)) {
434    case 1: deps->sort(sort_dep_arg_1, 1); break;
435    case 2: deps->sort(sort_dep_arg_2, 2); break;
436    case 3: deps->sort(sort_dep_arg_3, 3); break;
437    default: ShouldNotReachHere();
438    }
439  }
440}
441
442size_t Dependencies::estimate_size_in_bytes() {
443  size_t est_size = 100;
444#if INCLUDE_JVMCI
445  if (_using_dep_values) {
446    for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
447      DepType dept = (DepType)deptv;
448      GrowableArray<DepValue>* deps = _dep_values[dept];
449      est_size += deps->length() * 2;  // tags and argument(s)
450    }
451    return est_size;
452  }
453#endif // INCLUDE_JVMCI
454  for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
455    DepType dept = (DepType)deptv;
456    GrowableArray<ciBaseObject*>* deps = _deps[dept];
457    est_size += deps->length()*2;  // tags and argument(s)
458  }
459  return est_size;
460}
461
462ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
463  switch (dept) {
464  case abstract_with_exclusive_concrete_subtypes_2:
465    return x->as_metadata()->as_klass();
466  case unique_concrete_method:
467  case exclusive_concrete_methods_2:
468    return x->as_metadata()->as_method()->holder();
469  }
470  return NULL;  // let NULL be NULL
471}
472
473Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
474  assert(must_be_in_vm(), "raw oops here");
475  switch (dept) {
476  case abstract_with_exclusive_concrete_subtypes_2:
477    assert(x->is_klass(), "sanity");
478    return (Klass*) x;
479  case unique_concrete_method:
480  case exclusive_concrete_methods_2:
481    assert(x->is_method(), "sanity");
482    return ((Method*)x)->method_holder();
483  }
484  return NULL;  // let NULL be NULL
485}
486
487void Dependencies::encode_content_bytes() {
488  sort_all_deps();
489
490  // cast is safe, no deps can overflow INT_MAX
491  CompressedWriteStream bytes((int)estimate_size_in_bytes());
492
493#if INCLUDE_JVMCI
494  if (_using_dep_values) {
495    for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
496      DepType dept = (DepType)deptv;
497      GrowableArray<DepValue>* deps = _dep_values[dept];
498      if (deps->length() == 0)  continue;
499      int stride = dep_args(dept);
500      int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
501      assert(stride > 0, "sanity");
502      for (int i = 0; i < deps->length(); i += stride) {
503        jbyte code_byte = (jbyte)dept;
504        int skipj = -1;
505        if (ctxkj >= 0 && ctxkj+1 < stride) {
506          Klass*  ctxk = deps->at(i+ctxkj+0).as_klass(_oop_recorder);
507          DepValue x = deps->at(i+ctxkj+1);  // following argument
508          if (ctxk == ctxk_encoded_as_null(dept, x.as_metadata(_oop_recorder))) {
509            skipj = ctxkj;  // we win:  maybe one less oop to keep track of
510            code_byte |= default_context_type_bit;
511          }
512        }
513        bytes.write_byte(code_byte);
514        for (int j = 0; j < stride; j++) {
515          if (j == skipj)  continue;
516          DepValue v = deps->at(i+j);
517          int idx = v.index();
518          bytes.write_int(idx);
519        }
520      }
521    }
522  } else {
523#endif // INCLUDE_JVMCI
524  for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
525    DepType dept = (DepType)deptv;
526    GrowableArray<ciBaseObject*>* deps = _deps[dept];
527    if (deps->length() == 0)  continue;
528    int stride = dep_args(dept);
529    int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
530    assert(stride > 0, "sanity");
531    for (int i = 0; i < deps->length(); i += stride) {
532      jbyte code_byte = (jbyte)dept;
533      int skipj = -1;
534      if (ctxkj >= 0 && ctxkj+1 < stride) {
535        ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
536        ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
537        if (ctxk == ctxk_encoded_as_null(dept, x)) {
538          skipj = ctxkj;  // we win:  maybe one less oop to keep track of
539          code_byte |= default_context_type_bit;
540        }
541      }
542      bytes.write_byte(code_byte);
543      for (int j = 0; j < stride; j++) {
544        if (j == skipj)  continue;
545        ciBaseObject* v = deps->at(i+j);
546        int idx;
547        if (v->is_object()) {
548          idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
549        } else {
550          ciMetadata* meta = v->as_metadata();
551          idx = _oop_recorder->find_index(meta->constant_encoding());
552        }
553        bytes.write_int(idx);
554      }
555    }
556  }
557#if INCLUDE_JVMCI
558  }
559#endif
560
561  // write a sentinel byte to mark the end
562  bytes.write_byte(end_marker);
563
564  // round it out to a word boundary
565  while (bytes.position() % sizeof(HeapWord) != 0) {
566    bytes.write_byte(end_marker);
567  }
568
569  // check whether the dept byte encoding really works
570  assert((jbyte)default_context_type_bit != 0, "byte overflow");
571
572  _content_bytes = bytes.buffer();
573  _size_in_bytes = bytes.position();
574}
575
576
577const char* Dependencies::_dep_name[TYPE_LIMIT] = {
578  "end_marker",
579  "evol_method",
580  "leaf_type",
581  "abstract_with_unique_concrete_subtype",
582  "abstract_with_no_concrete_subtype",
583  "concrete_with_no_concrete_subtype",
584  "unique_concrete_method",
585  "abstract_with_exclusive_concrete_subtypes_2",
586  "exclusive_concrete_methods_2",
587  "no_finalizable_subclasses",
588  "call_site_target_value"
589};
590
591int Dependencies::_dep_args[TYPE_LIMIT] = {
592  -1,// end_marker
593  1, // evol_method m
594  1, // leaf_type ctxk
595  2, // abstract_with_unique_concrete_subtype ctxk, k
596  1, // abstract_with_no_concrete_subtype ctxk
597  1, // concrete_with_no_concrete_subtype ctxk
598  2, // unique_concrete_method ctxk, m
599  3, // unique_concrete_subtypes_2 ctxk, k1, k2
600  3, // unique_concrete_methods_2 ctxk, m1, m2
601  1, // no_finalizable_subclasses ctxk
602  2  // call_site_target_value call_site, method_handle
603};
604
605const char* Dependencies::dep_name(Dependencies::DepType dept) {
606  if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
607  return _dep_name[dept];
608}
609
610int Dependencies::dep_args(Dependencies::DepType dept) {
611  if (!dept_in_mask(dept, all_types))  return -1;
612  return _dep_args[dept];
613}
614
615void Dependencies::check_valid_dependency_type(DepType dept) {
616  guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, "invalid dependency type: %d", (int) dept);
617}
618
619// for the sake of the compiler log, print out current dependencies:
620void Dependencies::log_all_dependencies() {
621  if (log() == NULL)  return;
622  ResourceMark rm;
623  for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
624    DepType dept = (DepType)deptv;
625    GrowableArray<ciBaseObject*>* deps = _deps[dept];
626    int deplen = deps->length();
627    if (deplen == 0) {
628      continue;
629    }
630    int stride = dep_args(dept);
631    GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(stride);
632    for (int i = 0; i < deps->length(); i += stride) {
633      for (int j = 0; j < stride; j++) {
634        // flush out the identities before printing
635        ciargs->push(deps->at(i+j));
636      }
637      write_dependency_to(log(), dept, ciargs);
638      ciargs->clear();
639    }
640    guarantee(deplen == deps->length(), "deps array cannot grow inside nested ResoureMark scope");
641  }
642}
643
644void Dependencies::write_dependency_to(CompileLog* log,
645                                       DepType dept,
646                                       GrowableArray<DepArgument>* args,
647                                       Klass* witness) {
648  if (log == NULL) {
649    return;
650  }
651  ResourceMark rm;
652  ciEnv* env = ciEnv::current();
653  GrowableArray<ciBaseObject*>* ciargs = new GrowableArray<ciBaseObject*>(args->length());
654  for (GrowableArrayIterator<DepArgument> it = args->begin(); it != args->end(); ++it) {
655    DepArgument arg = *it;
656    if (arg.is_oop()) {
657      ciargs->push(env->get_object(arg.oop_value()));
658    } else {
659      ciargs->push(env->get_metadata(arg.metadata_value()));
660    }
661  }
662  int argslen = ciargs->length();
663  Dependencies::write_dependency_to(log, dept, ciargs, witness);
664  guarantee(argslen == ciargs->length(), "ciargs array cannot grow inside nested ResoureMark scope");
665}
666
667void Dependencies::write_dependency_to(CompileLog* log,
668                                       DepType dept,
669                                       GrowableArray<ciBaseObject*>* args,
670                                       Klass* witness) {
671  if (log == NULL) {
672    return;
673  }
674  ResourceMark rm;
675  GrowableArray<int>* argids = new GrowableArray<int>(args->length());
676  for (GrowableArrayIterator<ciBaseObject*> it = args->begin(); it != args->end(); ++it) {
677    ciBaseObject* obj = *it;
678    if (obj->is_object()) {
679      argids->push(log->identify(obj->as_object()));
680    } else {
681      argids->push(log->identify(obj->as_metadata()));
682    }
683  }
684  if (witness != NULL) {
685    log->begin_elem("dependency_failed");
686  } else {
687    log->begin_elem("dependency");
688  }
689  log->print(" type='%s'", dep_name(dept));
690  const int ctxkj = dep_context_arg(dept);  // -1 if no context arg
691  if (ctxkj >= 0 && ctxkj < argids->length()) {
692    log->print(" ctxk='%d'", argids->at(ctxkj));
693  }
694  // write remaining arguments, if any.
695  for (int j = 0; j < argids->length(); j++) {
696    if (j == ctxkj)  continue;  // already logged
697    if (j == 1) {
698      log->print(  " x='%d'",    argids->at(j));
699    } else {
700      log->print(" x%d='%d'", j, argids->at(j));
701    }
702  }
703  if (witness != NULL) {
704    log->object("witness", witness);
705    log->stamp();
706  }
707  log->end_elem();
708}
709
710void Dependencies::write_dependency_to(xmlStream* xtty,
711                                       DepType dept,
712                                       GrowableArray<DepArgument>* args,
713                                       Klass* witness) {
714  if (xtty == NULL) {
715    return;
716  }
717  ResourceMark rm;
718  ttyLocker ttyl;
719  int ctxkj = dep_context_arg(dept);  // -1 if no context arg
720  if (witness != NULL) {
721    xtty->begin_elem("dependency_failed");
722  } else {
723    xtty->begin_elem("dependency");
724  }
725  xtty->print(" type='%s'", dep_name(dept));
726  if (ctxkj >= 0) {
727    xtty->object("ctxk", args->at(ctxkj).metadata_value());
728  }
729  // write remaining arguments, if any.
730  for (int j = 0; j < args->length(); j++) {
731    if (j == ctxkj)  continue;  // already logged
732    DepArgument arg = args->at(j);
733    if (j == 1) {
734      if (arg.is_oop()) {
735        xtty->object("x", arg.oop_value());
736      } else {
737        xtty->object("x", arg.metadata_value());
738      }
739    } else {
740      char xn[10]; sprintf(xn, "x%d", j);
741      if (arg.is_oop()) {
742        xtty->object(xn, arg.oop_value());
743      } else {
744        xtty->object(xn, arg.metadata_value());
745      }
746    }
747  }
748  if (witness != NULL) {
749    xtty->object("witness", witness);
750    xtty->stamp();
751  }
752  xtty->end_elem();
753}
754
755void Dependencies::print_dependency(DepType dept, GrowableArray<DepArgument>* args,
756                                    Klass* witness, outputStream* st) {
757  ResourceMark rm;
758  ttyLocker ttyl;   // keep the following output all in one block
759  st->print_cr("%s of type %s",
760                (witness == NULL)? "Dependency": "Failed dependency",
761                dep_name(dept));
762  // print arguments
763  int ctxkj = dep_context_arg(dept);  // -1 if no context arg
764  for (int j = 0; j < args->length(); j++) {
765    DepArgument arg = args->at(j);
766    bool put_star = false;
767    if (arg.is_null())  continue;
768    const char* what;
769    if (j == ctxkj) {
770      assert(arg.is_metadata(), "must be");
771      what = "context";
772      put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
773    } else if (arg.is_method()) {
774      what = "method ";
775      put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value(), NULL);
776    } else if (arg.is_klass()) {
777      what = "class  ";
778    } else {
779      what = "object ";
780    }
781    st->print("  %s = %s", what, (put_star? "*": ""));
782    if (arg.is_klass()) {
783      st->print("%s", ((Klass*)arg.metadata_value())->external_name());
784    } else if (arg.is_method()) {
785      ((Method*)arg.metadata_value())->print_value_on(st);
786    } else if (arg.is_oop()) {
787      arg.oop_value()->print_value_on(st);
788    } else {
789      ShouldNotReachHere(); // Provide impl for this type.
790    }
791
792    st->cr();
793  }
794  if (witness != NULL) {
795    bool put_star = !Dependencies::is_concrete_klass(witness);
796    st->print_cr("  witness = %s%s",
797                  (put_star? "*": ""),
798                  witness->external_name());
799  }
800}
801
802void Dependencies::DepStream::log_dependency(Klass* witness) {
803  if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
804  ResourceMark rm;
805  const int nargs = argument_count();
806  GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
807  for (int j = 0; j < nargs; j++) {
808    if (is_oop_argument(j)) {
809      args->push(argument_oop(j));
810    } else {
811      args->push(argument(j));
812    }
813  }
814  int argslen = args->length();
815  if (_deps != NULL && _deps->log() != NULL) {
816    if (ciEnv::current() != NULL) {
817      Dependencies::write_dependency_to(_deps->log(), type(), args, witness);
818    } else {
819      // Treat the CompileLog as an xmlstream instead
820      Dependencies::write_dependency_to((xmlStream*)_deps->log(), type(), args, witness);
821    }
822  } else {
823    Dependencies::write_dependency_to(xtty, type(), args, witness);
824  }
825  guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
826}
827
828void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose, outputStream* st) {
829  ResourceMark rm;
830  int nargs = argument_count();
831  GrowableArray<DepArgument>* args = new GrowableArray<DepArgument>(nargs);
832  for (int j = 0; j < nargs; j++) {
833    if (is_oop_argument(j)) {
834      args->push(argument_oop(j));
835    } else {
836      args->push(argument(j));
837    }
838  }
839  int argslen = args->length();
840  Dependencies::print_dependency(type(), args, witness, st);
841  if (verbose) {
842    if (_code != NULL) {
843      st->print("  code: ");
844      _code->print_value_on(st);
845      st->cr();
846    }
847  }
848  guarantee(argslen == args->length(), "args array cannot grow inside nested ResoureMark scope");
849}
850
851
852/// Dependency stream support (decodes dependencies from an nmethod):
853
854#ifdef ASSERT
855void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
856  assert(must_be_in_vm(), "raw oops here");
857  _byte_limit = byte_limit;
858  _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
859  assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
860}
861#endif //ASSERT
862
863bool Dependencies::DepStream::next() {
864  assert(_type != end_marker, "already at end");
865  if (_bytes.position() == 0 && _code != NULL
866      && _code->dependencies_size() == 0) {
867    // Method has no dependencies at all.
868    return false;
869  }
870  int code_byte = (_bytes.read_byte() & 0xFF);
871  if (code_byte == end_marker) {
872    DEBUG_ONLY(_type = end_marker);
873    return false;
874  } else {
875    int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
876    code_byte -= ctxk_bit;
877    DepType dept = (DepType)code_byte;
878    _type = dept;
879    Dependencies::check_valid_dependency_type(dept);
880    int stride = _dep_args[dept];
881    assert(stride == dep_args(dept), "sanity");
882    int skipj = -1;
883    if (ctxk_bit != 0) {
884      skipj = 0;  // currently the only context argument is at zero
885      assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
886    }
887    for (int j = 0; j < stride; j++) {
888      _xi[j] = (j == skipj)? 0: _bytes.read_int();
889    }
890    DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
891    return true;
892  }
893}
894
895inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
896  Metadata* o = NULL;
897  if (_code != NULL) {
898    o = _code->metadata_at(i);
899  } else {
900    o = _deps->oop_recorder()->metadata_at(i);
901  }
902  return o;
903}
904
905inline oop Dependencies::DepStream::recorded_oop_at(int i) {
906  return (_code != NULL)
907         ? _code->oop_at(i)
908    : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
909}
910
911Metadata* Dependencies::DepStream::argument(int i) {
912  Metadata* result = recorded_metadata_at(argument_index(i));
913
914  if (result == NULL) { // Explicit context argument can be compressed
915    int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
916    if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
917      result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
918    }
919  }
920
921  assert(result == NULL || result->is_klass() || result->is_method(), "must be");
922  return result;
923}
924
925/**
926 * Returns a unique identifier for each dependency argument.
927 */
928uintptr_t Dependencies::DepStream::get_identifier(int i) {
929  if (is_oop_argument(i)) {
930    return (uintptr_t)(oopDesc*)argument_oop(i);
931  } else {
932    return (uintptr_t)argument(i);
933  }
934}
935
936oop Dependencies::DepStream::argument_oop(int i) {
937  oop result = recorded_oop_at(argument_index(i));
938  assert(result == NULL || result->is_oop(), "must be");
939  return result;
940}
941
942Klass* Dependencies::DepStream::context_type() {
943  assert(must_be_in_vm(), "raw oops here");
944
945  // Most dependencies have an explicit context type argument.
946  {
947    int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
948    if (ctxkj >= 0) {
949      Metadata* k = argument(ctxkj);
950      assert(k != NULL && k->is_klass(), "type check");
951      return (Klass*)k;
952    }
953  }
954
955  // Some dependencies are using the klass of the first object
956  // argument as implicit context type.
957  {
958    int ctxkj = dep_implicit_context_arg(type());
959    if (ctxkj >= 0) {
960      Klass* k = argument_oop(ctxkj)->klass();
961      assert(k != NULL && k->is_klass(), "type check");
962      return (Klass*) k;
963    }
964  }
965
966  // And some dependencies don't have a context type at all,
967  // e.g. evol_method.
968  return NULL;
969}
970
971// ----------------- DependencySignature --------------------------------------
972bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
973  if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
974    return false;
975  }
976
977  for (int i = 0; i < s1.args_count(); i++) {
978    if (s1.arg(i) != s2.arg(i)) {
979      return false;
980    }
981  }
982  return true;
983}
984
985/// Checking dependencies:
986
987// This hierarchy walker inspects subtypes of a given type,
988// trying to find a "bad" class which breaks a dependency.
989// Such a class is called a "witness" to the broken dependency.
990// While searching around, we ignore "participants", which
991// are already known to the dependency.
992class ClassHierarchyWalker {
993 public:
994  enum { PARTICIPANT_LIMIT = 3 };
995
996 private:
997  // optional method descriptor to check for:
998  Symbol* _name;
999  Symbol* _signature;
1000
1001  // special classes which are not allowed to be witnesses:
1002  Klass*    _participants[PARTICIPANT_LIMIT+1];
1003  int       _num_participants;
1004
1005  // cache of method lookups
1006  Method* _found_methods[PARTICIPANT_LIMIT+1];
1007
1008  // if non-zero, tells how many witnesses to convert to participants
1009  int       _record_witnesses;
1010
1011  void initialize(Klass* participant) {
1012    _record_witnesses = 0;
1013    _participants[0]  = participant;
1014    _found_methods[0] = NULL;
1015    _num_participants = 0;
1016    if (participant != NULL) {
1017      // Terminating NULL.
1018      _participants[1] = NULL;
1019      _found_methods[1] = NULL;
1020      _num_participants = 1;
1021    }
1022  }
1023
1024  void initialize_from_method(Method* m) {
1025    assert(m != NULL && m->is_method(), "sanity");
1026    _name      = m->name();
1027    _signature = m->signature();
1028  }
1029
1030 public:
1031  // The walker is initialized to recognize certain methods and/or types
1032  // as friendly participants.
1033  ClassHierarchyWalker(Klass* participant, Method* m) {
1034    initialize_from_method(m);
1035    initialize(participant);
1036  }
1037  ClassHierarchyWalker(Method* m) {
1038    initialize_from_method(m);
1039    initialize(NULL);
1040  }
1041  ClassHierarchyWalker(Klass* participant = NULL) {
1042    _name      = NULL;
1043    _signature = NULL;
1044    initialize(participant);
1045  }
1046
1047  // This is common code for two searches:  One for concrete subtypes,
1048  // the other for concrete method implementations and overrides.
1049  bool doing_subtype_search() {
1050    return _name == NULL;
1051  }
1052
1053  int num_participants() { return _num_participants; }
1054  Klass* participant(int n) {
1055    assert((uint)n <= (uint)_num_participants, "oob");
1056    return _participants[n];
1057  }
1058
1059  // Note:  If n==num_participants, returns NULL.
1060  Method* found_method(int n) {
1061    assert((uint)n <= (uint)_num_participants, "oob");
1062    Method* fm = _found_methods[n];
1063    assert(n == _num_participants || fm != NULL, "proper usage");
1064    if (fm != NULL && fm->method_holder() != _participants[n]) {
1065      // Default methods from interfaces can be added to classes. In
1066      // that case the holder of the method is not the class but the
1067      // interface where it's defined.
1068      assert(fm->is_default_method(), "sanity");
1069      return NULL;
1070    }
1071    return fm;
1072  }
1073
1074#ifdef ASSERT
1075  // Assert that m is inherited into ctxk, without intervening overrides.
1076  // (May return true even if this is not true, in corner cases where we punt.)
1077  bool check_method_context(Klass* ctxk, Method* m) {
1078    if (m->method_holder() == ctxk)
1079      return true;  // Quick win.
1080    if (m->is_private())
1081      return false; // Quick lose.  Should not happen.
1082    if (!(m->is_public() || m->is_protected()))
1083      // The override story is complex when packages get involved.
1084      return true;  // Must punt the assertion to true.
1085    Method* lm = ctxk->lookup_method(m->name(), m->signature());
1086    if (lm == NULL && ctxk->is_instance_klass()) {
1087      // It might be an interface method
1088      lm = InstanceKlass::cast(ctxk)->lookup_method_in_ordered_interfaces(m->name(),
1089                                                                          m->signature());
1090    }
1091    if (lm == m)
1092      // Method m is inherited into ctxk.
1093      return true;
1094    if (lm != NULL) {
1095      if (!(lm->is_public() || lm->is_protected())) {
1096        // Method is [package-]private, so the override story is complex.
1097        return true;  // Must punt the assertion to true.
1098      }
1099      if (lm->is_static()) {
1100        // Static methods don't override non-static so punt
1101        return true;
1102      }
1103      if (!Dependencies::is_concrete_method(lm, ctxk) &&
1104          !Dependencies::is_concrete_method(m, ctxk)) {
1105        // They are both non-concrete
1106        if (lm->method_holder()->is_subtype_of(m->method_holder())) {
1107          // Method m is overridden by lm, but both are non-concrete.
1108          return true;
1109        }
1110        if (lm->method_holder()->is_interface() && m->method_holder()->is_interface() &&
1111            ctxk->is_subtype_of(m->method_holder()) && ctxk->is_subtype_of(lm->method_holder())) {
1112          // Interface method defined in multiple super interfaces
1113          return true;
1114        }
1115      }
1116    }
1117    ResourceMark rm;
1118    tty->print_cr("Dependency method not found in the associated context:");
1119    tty->print_cr("  context = %s", ctxk->external_name());
1120    tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
1121    if (lm != NULL) {
1122      tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
1123    }
1124    return false;
1125  }
1126#endif
1127
1128  void add_participant(Klass* participant) {
1129    assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
1130    int np = _num_participants++;
1131    _participants[np] = participant;
1132    _participants[np+1] = NULL;
1133    _found_methods[np+1] = NULL;
1134  }
1135
1136  void record_witnesses(int add) {
1137    if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
1138    assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
1139    _record_witnesses = add;
1140  }
1141
1142  bool is_witness(Klass* k) {
1143    if (doing_subtype_search()) {
1144      return Dependencies::is_concrete_klass(k);
1145    } else if (!k->is_instance_klass()) {
1146      return false; // no methods to find in an array type
1147    } else {
1148      // Search class hierarchy first.
1149      Method* m = InstanceKlass::cast(k)->find_instance_method(_name, _signature);
1150      if (!Dependencies::is_concrete_method(m, k)) {
1151        // Check interface defaults also, if any exist.
1152        Array<Method*>* default_methods = InstanceKlass::cast(k)->default_methods();
1153        if (default_methods == NULL)
1154            return false;
1155        m = InstanceKlass::cast(k)->find_method(default_methods, _name, _signature);
1156        if (!Dependencies::is_concrete_method(m, NULL))
1157            return false;
1158      }
1159      _found_methods[_num_participants] = m;
1160      // Note:  If add_participant(k) is called,
1161      // the method m will already be memoized for it.
1162      return true;
1163    }
1164  }
1165
1166  bool is_participant(Klass* k) {
1167    if (k == _participants[0]) {
1168      return true;
1169    } else if (_num_participants <= 1) {
1170      return false;
1171    } else {
1172      return in_list(k, &_participants[1]);
1173    }
1174  }
1175  bool ignore_witness(Klass* witness) {
1176    if (_record_witnesses == 0) {
1177      return false;
1178    } else {
1179      --_record_witnesses;
1180      add_participant(witness);
1181      return true;
1182    }
1183  }
1184  static bool in_list(Klass* x, Klass** list) {
1185    for (int i = 0; ; i++) {
1186      Klass* y = list[i];
1187      if (y == NULL)  break;
1188      if (y == x)  return true;
1189    }
1190    return false;  // not in list
1191  }
1192
1193 private:
1194  // the actual search method:
1195  Klass* find_witness_anywhere(Klass* context_type,
1196                                 bool participants_hide_witnesses,
1197                                 bool top_level_call = true);
1198  // the spot-checking version:
1199  Klass* find_witness_in(KlassDepChange& changes,
1200                         Klass* context_type,
1201                           bool participants_hide_witnesses);
1202 public:
1203  Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
1204    assert(doing_subtype_search(), "must set up a subtype search");
1205    // When looking for unexpected concrete types,
1206    // do not look beneath expected ones.
1207    const bool participants_hide_witnesses = true;
1208    // CX > CC > C' is OK, even if C' is new.
1209    // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
1210    if (changes != NULL) {
1211      return find_witness_in(*changes, context_type, participants_hide_witnesses);
1212    } else {
1213      return find_witness_anywhere(context_type, participants_hide_witnesses);
1214    }
1215  }
1216  Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
1217    assert(!doing_subtype_search(), "must set up a method definer search");
1218    // When looking for unexpected concrete methods,
1219    // look beneath expected ones, to see if there are overrides.
1220    const bool participants_hide_witnesses = true;
1221    // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
1222    if (changes != NULL) {
1223      return find_witness_in(*changes, context_type, !participants_hide_witnesses);
1224    } else {
1225      return find_witness_anywhere(context_type, !participants_hide_witnesses);
1226    }
1227  }
1228};
1229
1230#ifndef PRODUCT
1231static int deps_find_witness_calls = 0;
1232static int deps_find_witness_steps = 0;
1233static int deps_find_witness_recursions = 0;
1234static int deps_find_witness_singles = 0;
1235static int deps_find_witness_print = 0; // set to -1 to force a final print
1236static bool count_find_witness_calls() {
1237  if (TraceDependencies || LogCompilation) {
1238    int pcount = deps_find_witness_print + 1;
1239    bool final_stats      = (pcount == 0);
1240    bool initial_call     = (pcount == 1);
1241    bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
1242    if (pcount < 0)  pcount = 1; // crude overflow protection
1243    deps_find_witness_print = pcount;
1244    if (VerifyDependencies && initial_call) {
1245      tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
1246    }
1247    if (occasional_print || final_stats) {
1248      // Every now and then dump a little info about dependency searching.
1249      if (xtty != NULL) {
1250       ttyLocker ttyl;
1251       xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
1252                   deps_find_witness_calls,
1253                   deps_find_witness_steps,
1254                   deps_find_witness_recursions,
1255                   deps_find_witness_singles);
1256      }
1257      if (final_stats || (TraceDependencies && WizardMode)) {
1258        ttyLocker ttyl;
1259        tty->print_cr("Dependency check (find_witness) "
1260                      "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
1261                      deps_find_witness_calls,
1262                      deps_find_witness_steps,
1263                      (double)deps_find_witness_steps / deps_find_witness_calls,
1264                      deps_find_witness_recursions,
1265                      deps_find_witness_singles);
1266      }
1267    }
1268    return true;
1269  }
1270  return false;
1271}
1272#else
1273#define count_find_witness_calls() (0)
1274#endif //PRODUCT
1275
1276
1277Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1278                                               Klass* context_type,
1279                                               bool participants_hide_witnesses) {
1280  assert(changes.involves_context(context_type), "irrelevant dependency");
1281  Klass* new_type = changes.new_type();
1282
1283  (void)count_find_witness_calls();
1284  NOT_PRODUCT(deps_find_witness_singles++);
1285
1286  // Current thread must be in VM (not native mode, as in CI):
1287  assert(must_be_in_vm(), "raw oops here");
1288  // Must not move the class hierarchy during this check:
1289  assert_locked_or_safepoint(Compile_lock);
1290
1291  int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1292  if (nof_impls > 1) {
1293    // Avoid this case: *I.m > { A.m, C }; B.m > C
1294    // %%% Until this is fixed more systematically, bail out.
1295    // See corresponding comment in find_witness_anywhere.
1296    return context_type;
1297  }
1298
1299  assert(!is_participant(new_type), "only old classes are participants");
1300  if (participants_hide_witnesses) {
1301    // If the new type is a subtype of a participant, we are done.
1302    for (int i = 0; i < num_participants(); i++) {
1303      Klass* part = participant(i);
1304      if (part == NULL)  continue;
1305      assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1306             "correct marking of participants, b/c new_type is unique");
1307      if (changes.involves_context(part)) {
1308        // new guy is protected from this check by previous participant
1309        return NULL;
1310      }
1311    }
1312  }
1313
1314  if (is_witness(new_type) &&
1315      !ignore_witness(new_type)) {
1316    return new_type;
1317  }
1318
1319  return NULL;
1320}
1321
1322
1323// Walk hierarchy under a context type, looking for unexpected types.
1324// Do not report participant types, and recursively walk beneath
1325// them only if participants_hide_witnesses is false.
1326// If top_level_call is false, skip testing the context type,
1327// because the caller has already considered it.
1328Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1329                                                     bool participants_hide_witnesses,
1330                                                     bool top_level_call) {
1331  // Current thread must be in VM (not native mode, as in CI):
1332  assert(must_be_in_vm(), "raw oops here");
1333  // Must not move the class hierarchy during this check:
1334  assert_locked_or_safepoint(Compile_lock);
1335
1336  bool do_counts = count_find_witness_calls();
1337
1338  // Check the root of the sub-hierarchy first.
1339  if (top_level_call) {
1340    if (do_counts) {
1341      NOT_PRODUCT(deps_find_witness_calls++);
1342      NOT_PRODUCT(deps_find_witness_steps++);
1343    }
1344    if (is_participant(context_type)) {
1345      if (participants_hide_witnesses)  return NULL;
1346      // else fall through to search loop...
1347    } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1348      // The context is an abstract class or interface, to start with.
1349      return context_type;
1350    }
1351  }
1352
1353  // Now we must check each implementor and each subclass.
1354  // Use a short worklist to avoid blowing the stack.
1355  // Each worklist entry is a *chain* of subklass siblings to process.
1356  const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1357  Klass* chains[CHAINMAX];
1358  int    chaini = 0;  // index into worklist
1359  Klass* chain;       // scratch variable
1360#define ADD_SUBCLASS_CHAIN(k)                     {  \
1361    assert(chaini < CHAINMAX, "oob");                \
1362    chain = k->subklass();                           \
1363    if (chain != NULL)  chains[chaini++] = chain;    }
1364
1365  // Look for non-abstract subclasses.
1366  // (Note:  Interfaces do not have subclasses.)
1367  ADD_SUBCLASS_CHAIN(context_type);
1368
1369  // If it is an interface, search its direct implementors.
1370  // (Their subclasses are additional indirect implementors.
1371  // See InstanceKlass::add_implementor.)
1372  // (Note:  nof_implementors is always zero for non-interfaces.)
1373  if (top_level_call) {
1374    int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1375    if (nof_impls > 1) {
1376      // Avoid this case: *I.m > { A.m, C }; B.m > C
1377      // Here, I.m has 2 concrete implementations, but m appears unique
1378      // as A.m, because the search misses B.m when checking C.
1379      // The inherited method B.m was getting missed by the walker
1380      // when interface 'I' was the starting point.
1381      // %%% Until this is fixed more systematically, bail out.
1382      // (Old CHA had the same limitation.)
1383      return context_type;
1384    }
1385    if (nof_impls > 0) {
1386      Klass* impl = InstanceKlass::cast(context_type)->implementor();
1387      assert(impl != NULL, "just checking");
1388      // If impl is the same as the context_type, then more than one
1389      // implementor has seen. No exact info in this case.
1390      if (impl == context_type) {
1391        return context_type;  // report an inexact witness to this sad affair
1392      }
1393      if (do_counts)
1394        { NOT_PRODUCT(deps_find_witness_steps++); }
1395      if (is_participant(impl)) {
1396        if (!participants_hide_witnesses) {
1397          ADD_SUBCLASS_CHAIN(impl);
1398        }
1399      } else if (is_witness(impl) && !ignore_witness(impl)) {
1400        return impl;
1401      } else {
1402        ADD_SUBCLASS_CHAIN(impl);
1403      }
1404    }
1405  }
1406
1407  // Recursively process each non-trivial sibling chain.
1408  while (chaini > 0) {
1409    Klass* chain = chains[--chaini];
1410    for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1411      if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1412      if (is_participant(sub)) {
1413        if (participants_hide_witnesses)  continue;
1414        // else fall through to process this guy's subclasses
1415      } else if (is_witness(sub) && !ignore_witness(sub)) {
1416        return sub;
1417      }
1418      if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1419        // Fast path.  (Partially disabled if VerifyDependencies.)
1420        ADD_SUBCLASS_CHAIN(sub);
1421      } else {
1422        // Worklist overflow.  Do a recursive call.  Should be rare.
1423        // The recursive call will have its own worklist, of course.
1424        // (Note that sub has already been tested, so that there is
1425        // no need for the recursive call to re-test.  That's handy,
1426        // since the recursive call sees sub as the context_type.)
1427        if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1428        Klass* witness = find_witness_anywhere(sub,
1429                                                 participants_hide_witnesses,
1430                                                 /*top_level_call=*/ false);
1431        if (witness != NULL)  return witness;
1432      }
1433    }
1434  }
1435
1436  // No witness found.  The dependency remains unbroken.
1437  return NULL;
1438#undef ADD_SUBCLASS_CHAIN
1439}
1440
1441
1442bool Dependencies::is_concrete_klass(Klass* k) {
1443  if (k->is_abstract())  return false;
1444  // %%% We could treat classes which are concrete but
1445  // have not yet been instantiated as virtually abstract.
1446  // This would require a deoptimization barrier on first instantiation.
1447  //if (k->is_not_instantiated())  return false;
1448  return true;
1449}
1450
1451bool Dependencies::is_concrete_method(Method* m, Klass * k) {
1452  // NULL is not a concrete method,
1453  // statics are irrelevant to virtual call sites,
1454  // abstract methods are not concrete,
1455  // overpass (error) methods are not concrete if k is abstract
1456  //
1457  // note "true" is conservative answer --
1458  //     overpass clause is false if k == NULL, implies return true if
1459  //     answer depends on overpass clause.
1460  return ! ( m == NULL || m -> is_static() || m -> is_abstract() ||
1461             m->is_overpass() && k != NULL && k -> is_abstract() );
1462}
1463
1464
1465Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1466  if (k->is_interface())  return NULL;
1467  if (k->has_finalizer()) return k;
1468  k = k->subklass();
1469  while (k != NULL) {
1470    Klass* result = find_finalizable_subclass(k);
1471    if (result != NULL) return result;
1472    k = k->next_sibling();
1473  }
1474  return NULL;
1475}
1476
1477
1478bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1479  if (k->is_abstract())  return false;
1480  // We could also return false if k does not yet appear to be
1481  // instantiated, if the VM version supports this distinction also.
1482  //if (k->is_not_instantiated())  return false;
1483  return true;
1484}
1485
1486bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1487  return k->has_finalizable_subclass();
1488}
1489
1490
1491// Any use of the contents (bytecodes) of a method must be
1492// marked by an "evol_method" dependency, if those contents
1493// can change.  (Note: A method is always dependent on itself.)
1494Klass* Dependencies::check_evol_method(Method* m) {
1495  assert(must_be_in_vm(), "raw oops here");
1496  // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1497  // Or is there a now a breakpoint?
1498  // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1499  if (m->is_old()
1500      || m->number_of_breakpoints() > 0) {
1501    return m->method_holder();
1502  } else {
1503    return NULL;
1504  }
1505}
1506
1507// This is a strong assertion:  It is that the given type
1508// has no subtypes whatever.  It is most useful for
1509// optimizing checks on reflected types or on array types.
1510// (Checks on types which are derived from real instances
1511// can be optimized more strongly than this, because we
1512// know that the checked type comes from a concrete type,
1513// and therefore we can disregard abstract types.)
1514Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1515  assert(must_be_in_vm(), "raw oops here");
1516  assert_locked_or_safepoint(Compile_lock);
1517  InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1518  Klass* sub = ctx->subklass();
1519  if (sub != NULL) {
1520    return sub;
1521  } else if (ctx->nof_implementors() != 0) {
1522    // if it is an interface, it must be unimplemented
1523    // (if it is not an interface, nof_implementors is always zero)
1524    Klass* impl = ctx->implementor();
1525    assert(impl != NULL, "must be set");
1526    return impl;
1527  } else {
1528    return NULL;
1529  }
1530}
1531
1532// Test the assertion that conck is the only concrete subtype* of ctxk.
1533// The type conck itself is allowed to have have further concrete subtypes.
1534// This allows the compiler to narrow occurrences of ctxk by conck,
1535// when dealing with the types of actual instances.
1536Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1537                                                                   Klass* conck,
1538                                                                   KlassDepChange* changes) {
1539  ClassHierarchyWalker wf(conck);
1540  return wf.find_witness_subtype(ctxk, changes);
1541}
1542
1543// If a non-concrete class has no concrete subtypes, it is not (yet)
1544// instantiatable.  This can allow the compiler to make some paths go
1545// dead, if they are gated by a test of the type.
1546Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1547                                                               KlassDepChange* changes) {
1548  // Find any concrete subtype, with no participants:
1549  ClassHierarchyWalker wf;
1550  return wf.find_witness_subtype(ctxk, changes);
1551}
1552
1553
1554// If a concrete class has no concrete subtypes, it can always be
1555// exactly typed.  This allows the use of a cheaper type test.
1556Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1557                                                               KlassDepChange* changes) {
1558  // Find any concrete subtype, with only the ctxk as participant:
1559  ClassHierarchyWalker wf(ctxk);
1560  return wf.find_witness_subtype(ctxk, changes);
1561}
1562
1563
1564// Find the unique concrete proper subtype of ctxk, or NULL if there
1565// is more than one concrete proper subtype.  If there are no concrete
1566// proper subtypes, return ctxk itself, whether it is concrete or not.
1567// The returned subtype is allowed to have have further concrete subtypes.
1568// That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1569Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1570  ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1571  wf.record_witnesses(1);          // Record one other witness when walking.
1572  Klass* wit = wf.find_witness_subtype(ctxk);
1573  if (wit != NULL)  return NULL;   // Too many witnesses.
1574  Klass* conck = wf.participant(0);
1575  if (conck == NULL) {
1576#ifndef PRODUCT
1577    // Make sure the dependency mechanism will pass this discovery:
1578    if (VerifyDependencies) {
1579      // Turn off dependency tracing while actually testing deps.
1580      FlagSetting fs(TraceDependencies, false);
1581      if (!Dependencies::is_concrete_klass(ctxk)) {
1582        guarantee(NULL ==
1583                  (void *)check_abstract_with_no_concrete_subtype(ctxk),
1584                  "verify dep.");
1585      } else {
1586        guarantee(NULL ==
1587                  (void *)check_concrete_with_no_concrete_subtype(ctxk),
1588                  "verify dep.");
1589      }
1590    }
1591#endif //PRODUCT
1592    return ctxk;                   // Return ctxk as a flag for "no subtypes".
1593  } else {
1594#ifndef PRODUCT
1595    // Make sure the dependency mechanism will pass this discovery:
1596    if (VerifyDependencies) {
1597      // Turn off dependency tracing while actually testing deps.
1598      FlagSetting fs(TraceDependencies, false);
1599      if (!Dependencies::is_concrete_klass(ctxk)) {
1600        guarantee(NULL == (void *)
1601                  check_abstract_with_unique_concrete_subtype(ctxk, conck),
1602                  "verify dep.");
1603      }
1604    }
1605#endif //PRODUCT
1606    return conck;
1607  }
1608}
1609
1610// Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1611// except possibly for further subtypes of k[12] themselves.
1612// The context type must be abstract.  The types k1 and k2 are themselves
1613// allowed to have further concrete subtypes.
1614Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1615                                                Klass* ctxk,
1616                                                Klass* k1,
1617                                                Klass* k2,
1618                                                KlassDepChange* changes) {
1619  ClassHierarchyWalker wf;
1620  wf.add_participant(k1);
1621  wf.add_participant(k2);
1622  return wf.find_witness_subtype(ctxk, changes);
1623}
1624
1625// Search ctxk for concrete implementations.  If there are klen or fewer,
1626// pack them into the given array and return the number.
1627// Otherwise, return -1, meaning the given array would overflow.
1628// (Note that a return of 0 means there are exactly no concrete subtypes.)
1629// In this search, if ctxk is concrete, it will be reported alone.
1630// For any type CC reported, no proper subtypes of CC will be reported.
1631int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1632                                                   int klen,
1633                                                   Klass* karray[]) {
1634  ClassHierarchyWalker wf;
1635  wf.record_witnesses(klen);
1636  Klass* wit = wf.find_witness_subtype(ctxk);
1637  if (wit != NULL)  return -1;  // Too many witnesses.
1638  int num = wf.num_participants();
1639  assert(num <= klen, "oob");
1640  // Pack the result array with the good news.
1641  for (int i = 0; i < num; i++)
1642    karray[i] = wf.participant(i);
1643#ifndef PRODUCT
1644  // Make sure the dependency mechanism will pass this discovery:
1645  if (VerifyDependencies) {
1646    // Turn off dependency tracing while actually testing deps.
1647    FlagSetting fs(TraceDependencies, false);
1648    switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1649    case -1: // ctxk was itself concrete
1650      guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1651      break;
1652    case 0:
1653      guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1654                "verify dep.");
1655      break;
1656    case 1:
1657      guarantee(NULL == (void *)
1658                check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1659                "verify dep.");
1660      break;
1661    case 2:
1662      guarantee(NULL == (void *)
1663                check_abstract_with_exclusive_concrete_subtypes(ctxk,
1664                                                                karray[0],
1665                                                                karray[1]),
1666                "verify dep.");
1667      break;
1668    default:
1669      ShouldNotReachHere();  // klen > 2 yet supported
1670    }
1671  }
1672#endif //PRODUCT
1673  return num;
1674}
1675
1676// If a class (or interface) has a unique concrete method uniqm, return NULL.
1677// Otherwise, return a class that contains an interfering method.
1678Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1679                                                    KlassDepChange* changes) {
1680  // Here is a missing optimization:  If uniqm->is_final(),
1681  // we don't really need to search beneath it for overrides.
1682  // This is probably not important, since we don't use dependencies
1683  // to track final methods.  (They can't be "definalized".)
1684  ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1685  return wf.find_witness_definer(ctxk, changes);
1686}
1687
1688// Find the set of all non-abstract methods under ctxk that match m.
1689// (The method m must be defined or inherited in ctxk.)
1690// Include m itself in the set, unless it is abstract.
1691// If this set has exactly one element, return that element.
1692Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1693  // Return NULL if m is marked old; must have been a redefined method.
1694  if (m->is_old()) {
1695    return NULL;
1696  }
1697  ClassHierarchyWalker wf(m);
1698  assert(wf.check_method_context(ctxk, m), "proper context");
1699  wf.record_witnesses(1);
1700  Klass* wit = wf.find_witness_definer(ctxk);
1701  if (wit != NULL)  return NULL;  // Too many witnesses.
1702  Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1703  if (Dependencies::is_concrete_method(m, ctxk)) {
1704    if (fm == NULL) {
1705      // It turns out that m was always the only implementation.
1706      fm = m;
1707    } else if (fm != m) {
1708      // Two conflicting implementations after all.
1709      // (This can happen if m is inherited into ctxk and fm overrides it.)
1710      return NULL;
1711    }
1712  }
1713#ifndef PRODUCT
1714  // Make sure the dependency mechanism will pass this discovery:
1715  if (VerifyDependencies && fm != NULL) {
1716    guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1717              "verify dep.");
1718  }
1719#endif //PRODUCT
1720  return fm;
1721}
1722
1723Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1724                                                        Method* m1,
1725                                                        Method* m2,
1726                                                        KlassDepChange* changes) {
1727  ClassHierarchyWalker wf(m1);
1728  wf.add_participant(m1->method_holder());
1729  wf.add_participant(m2->method_holder());
1730  return wf.find_witness_definer(ctxk, changes);
1731}
1732
1733Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1734  Klass* search_at = ctxk;
1735  if (changes != NULL)
1736    search_at = changes->new_type(); // just look at the new bit
1737  return find_finalizable_subclass(search_at);
1738}
1739
1740Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1741  assert(!oopDesc::is_null(call_site), "sanity");
1742  assert(!oopDesc::is_null(method_handle), "sanity");
1743  assert(call_site->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1744
1745  if (changes == NULL) {
1746    // Validate all CallSites
1747    if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1748      return call_site->klass();  // assertion failed
1749  } else {
1750    // Validate the given CallSite
1751    if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1752      assert(method_handle != changes->method_handle(), "must be");
1753      return call_site->klass();  // assertion failed
1754    }
1755  }
1756  return NULL;  // assertion still valid
1757}
1758
1759void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1760  if (witness != NULL) {
1761    if (TraceDependencies) {
1762      print_dependency(witness, /*verbose=*/ true);
1763    }
1764    // The following is a no-op unless logging is enabled:
1765    log_dependency(witness);
1766  }
1767}
1768
1769
1770Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1771  assert_locked_or_safepoint(Compile_lock);
1772  Dependencies::check_valid_dependency_type(type());
1773
1774  Klass* witness = NULL;
1775  switch (type()) {
1776  case evol_method:
1777    witness = check_evol_method(method_argument(0));
1778    break;
1779  case leaf_type:
1780    witness = check_leaf_type(context_type());
1781    break;
1782  case abstract_with_unique_concrete_subtype:
1783    witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1784    break;
1785  case abstract_with_no_concrete_subtype:
1786    witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1787    break;
1788  case concrete_with_no_concrete_subtype:
1789    witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1790    break;
1791  case unique_concrete_method:
1792    witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1793    break;
1794  case abstract_with_exclusive_concrete_subtypes_2:
1795    witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1796    break;
1797  case exclusive_concrete_methods_2:
1798    witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1799    break;
1800  case no_finalizable_subclasses:
1801    witness = check_has_no_finalizable_subclasses(context_type(), changes);
1802    break;
1803  default:
1804    witness = NULL;
1805    break;
1806  }
1807  trace_and_log_witness(witness);
1808  return witness;
1809}
1810
1811
1812Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1813  assert_locked_or_safepoint(Compile_lock);
1814  Dependencies::check_valid_dependency_type(type());
1815
1816  Klass* witness = NULL;
1817  switch (type()) {
1818  case call_site_target_value:
1819    witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1820    break;
1821  default:
1822    witness = NULL;
1823    break;
1824  }
1825  trace_and_log_witness(witness);
1826  return witness;
1827}
1828
1829
1830Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1831  // Handle klass dependency
1832  if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1833    return check_klass_dependency(changes.as_klass_change());
1834
1835  // Handle CallSite dependency
1836  if (changes.is_call_site_change())
1837    return check_call_site_dependency(changes.as_call_site_change());
1838
1839  // irrelevant dependency; skip it
1840  return NULL;
1841}
1842
1843
1844void DepChange::print() {
1845  int nsup = 0, nint = 0;
1846  for (ContextStream str(*this); str.next(); ) {
1847    Klass* k = str.klass();
1848    switch (str.change_type()) {
1849    case Change_new_type:
1850      tty->print_cr("  dependee = %s", k->external_name());
1851      break;
1852    case Change_new_sub:
1853      if (!WizardMode) {
1854        ++nsup;
1855      } else {
1856        tty->print_cr("  context super = %s", k->external_name());
1857      }
1858      break;
1859    case Change_new_impl:
1860      if (!WizardMode) {
1861        ++nint;
1862      } else {
1863        tty->print_cr("  context interface = %s", k->external_name());
1864      }
1865      break;
1866    }
1867  }
1868  if (nsup + nint != 0) {
1869    tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1870  }
1871}
1872
1873void DepChange::ContextStream::start() {
1874  Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1875  _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1876  _klass = new_type;
1877  _ti_base = NULL;
1878  _ti_index = 0;
1879  _ti_limit = 0;
1880}
1881
1882bool DepChange::ContextStream::next() {
1883  switch (_change_type) {
1884  case Start_Klass:             // initial state; _klass is the new type
1885    _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1886    _ti_index = 0;
1887    _change_type = Change_new_type;
1888    return true;
1889  case Change_new_type:
1890    // fall through:
1891    _change_type = Change_new_sub;
1892  case Change_new_sub:
1893    // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1894    {
1895      _klass = _klass->super();
1896      if (_klass != NULL) {
1897        return true;
1898      }
1899    }
1900    // else set up _ti_limit and fall through:
1901    _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1902    _change_type = Change_new_impl;
1903  case Change_new_impl:
1904    if (_ti_index < _ti_limit) {
1905      _klass = _ti_base->at(_ti_index++);
1906      return true;
1907    }
1908    // fall through:
1909    _change_type = NO_CHANGE;  // iterator is exhausted
1910  case NO_CHANGE:
1911    break;
1912  default:
1913    ShouldNotReachHere();
1914  }
1915  return false;
1916}
1917
1918void KlassDepChange::initialize() {
1919  // entire transaction must be under this lock:
1920  assert_lock_strong(Compile_lock);
1921
1922  // Mark all dependee and all its superclasses
1923  // Mark transitive interfaces
1924  for (ContextStream str(*this); str.next(); ) {
1925    Klass* d = str.klass();
1926    assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
1927    InstanceKlass::cast(d)->set_is_marked_dependent(true);
1928  }
1929}
1930
1931KlassDepChange::~KlassDepChange() {
1932  // Unmark all dependee and all its superclasses
1933  // Unmark transitive interfaces
1934  for (ContextStream str(*this); str.next(); ) {
1935    Klass* d = str.klass();
1936    InstanceKlass::cast(d)->set_is_marked_dependent(false);
1937  }
1938}
1939
1940bool KlassDepChange::involves_context(Klass* k) {
1941  if (k == NULL || !k->is_instance_klass()) {
1942    return false;
1943  }
1944  InstanceKlass* ik = InstanceKlass::cast(k);
1945  bool is_contained = ik->is_marked_dependent();
1946  assert(is_contained == new_type()->is_subtype_of(k),
1947         "correct marking of potential context types");
1948  return is_contained;
1949}
1950
1951#ifndef PRODUCT
1952void Dependencies::print_statistics() {
1953  if (deps_find_witness_print != 0) {
1954    // Call one final time, to flush out the data.
1955    deps_find_witness_print = -1;
1956    count_find_witness_calls();
1957  }
1958}
1959#endif
1960
1961CallSiteDepChange::CallSiteDepChange(Handle call_site, Handle method_handle) :
1962  _call_site(call_site),
1963  _method_handle(method_handle) {
1964  assert(_call_site()->is_a(SystemDictionary::CallSite_klass()), "must be");
1965  assert(_method_handle.is_null() || _method_handle()->is_a(SystemDictionary::MethodHandle_klass()), "must be");
1966}
1967