dependencies.cpp revision 6402:2377269bd73d
1/*
2 * Copyright (c) 2005, 2013, Oracle and/or its affiliates. All rights reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
20 * or visit www.oracle.com if you need additional information or have any
21 * questions.
22 *
23 */
24
25#include "precompiled.hpp"
26#include "ci/ciArrayKlass.hpp"
27#include "ci/ciEnv.hpp"
28#include "ci/ciKlass.hpp"
29#include "ci/ciMethod.hpp"
30#include "code/dependencies.hpp"
31#include "compiler/compileLog.hpp"
32#include "oops/oop.inline.hpp"
33#include "runtime/handles.hpp"
34#include "runtime/handles.inline.hpp"
35#include "runtime/thread.inline.hpp"
36#include "utilities/copy.hpp"
37
38
39#ifdef ASSERT
40static bool must_be_in_vm() {
41  Thread* thread = Thread::current();
42  if (thread->is_Java_thread())
43    return ((JavaThread*)thread)->thread_state() == _thread_in_vm;
44  else
45    return true;  //something like this: thread->is_VM_thread();
46}
47#endif //ASSERT
48
49void Dependencies::initialize(ciEnv* env) {
50  Arena* arena = env->arena();
51  _oop_recorder = env->oop_recorder();
52  _log = env->log();
53  _dep_seen = new(arena) GrowableArray<int>(arena, 500, 0, 0);
54  DEBUG_ONLY(_deps[end_marker] = NULL);
55  for (int i = (int)FIRST_TYPE; i < (int)TYPE_LIMIT; i++) {
56    _deps[i] = new(arena) GrowableArray<ciBaseObject*>(arena, 10, 0, 0);
57  }
58  _content_bytes = NULL;
59  _size_in_bytes = (size_t)-1;
60
61  assert(TYPE_LIMIT <= (1<<LG2_TYPE_LIMIT), "sanity");
62}
63
64void Dependencies::assert_evol_method(ciMethod* m) {
65  assert_common_1(evol_method, m);
66}
67
68void Dependencies::assert_leaf_type(ciKlass* ctxk) {
69  if (ctxk->is_array_klass()) {
70    // As a special case, support this assertion on an array type,
71    // which reduces to an assertion on its element type.
72    // Note that this cannot be done with assertions that
73    // relate to concreteness or abstractness.
74    ciType* elemt = ctxk->as_array_klass()->base_element_type();
75    if (!elemt->is_instance_klass())  return;   // Ex:  int[][]
76    ctxk = elemt->as_instance_klass();
77    //if (ctxk->is_final())  return;            // Ex:  String[][]
78  }
79  check_ctxk(ctxk);
80  assert_common_1(leaf_type, ctxk);
81}
82
83void Dependencies::assert_abstract_with_unique_concrete_subtype(ciKlass* ctxk, ciKlass* conck) {
84  check_ctxk_abstract(ctxk);
85  assert_common_2(abstract_with_unique_concrete_subtype, ctxk, conck);
86}
87
88void Dependencies::assert_abstract_with_no_concrete_subtype(ciKlass* ctxk) {
89  check_ctxk_abstract(ctxk);
90  assert_common_1(abstract_with_no_concrete_subtype, ctxk);
91}
92
93void Dependencies::assert_concrete_with_no_concrete_subtype(ciKlass* ctxk) {
94  check_ctxk_concrete(ctxk);
95  assert_common_1(concrete_with_no_concrete_subtype, ctxk);
96}
97
98void Dependencies::assert_unique_concrete_method(ciKlass* ctxk, ciMethod* uniqm) {
99  check_ctxk(ctxk);
100  assert_common_2(unique_concrete_method, ctxk, uniqm);
101}
102
103void Dependencies::assert_abstract_with_exclusive_concrete_subtypes(ciKlass* ctxk, ciKlass* k1, ciKlass* k2) {
104  check_ctxk(ctxk);
105  assert_common_3(abstract_with_exclusive_concrete_subtypes_2, ctxk, k1, k2);
106}
107
108void Dependencies::assert_exclusive_concrete_methods(ciKlass* ctxk, ciMethod* m1, ciMethod* m2) {
109  check_ctxk(ctxk);
110  assert_common_3(exclusive_concrete_methods_2, ctxk, m1, m2);
111}
112
113void Dependencies::assert_has_no_finalizable_subclasses(ciKlass* ctxk) {
114  check_ctxk(ctxk);
115  assert_common_1(no_finalizable_subclasses, ctxk);
116}
117
118void Dependencies::assert_call_site_target_value(ciCallSite* call_site, ciMethodHandle* method_handle) {
119  check_ctxk(call_site->klass());
120  assert_common_2(call_site_target_value, call_site, method_handle);
121}
122
123// Helper function.  If we are adding a new dep. under ctxk2,
124// try to find an old dep. under a broader* ctxk1.  If there is
125//
126bool Dependencies::maybe_merge_ctxk(GrowableArray<ciBaseObject*>* deps,
127                                    int ctxk_i, ciKlass* ctxk2) {
128  ciKlass* ctxk1 = deps->at(ctxk_i)->as_metadata()->as_klass();
129  if (ctxk2->is_subtype_of(ctxk1)) {
130    return true;  // success, and no need to change
131  } else if (ctxk1->is_subtype_of(ctxk2)) {
132    // new context class fully subsumes previous one
133    deps->at_put(ctxk_i, ctxk2);
134    return true;
135  } else {
136    return false;
137  }
138}
139
140void Dependencies::assert_common_1(DepType dept, ciBaseObject* x) {
141  assert(dep_args(dept) == 1, "sanity");
142  log_dependency(dept, x);
143  GrowableArray<ciBaseObject*>* deps = _deps[dept];
144
145  // see if the same (or a similar) dep is already recorded
146  if (note_dep_seen(dept, x)) {
147    assert(deps->find(x) >= 0, "sanity");
148  } else {
149    deps->append(x);
150  }
151}
152
153void Dependencies::assert_common_2(DepType dept,
154                                   ciBaseObject* x0, ciBaseObject* x1) {
155  assert(dep_args(dept) == 2, "sanity");
156  log_dependency(dept, x0, x1);
157  GrowableArray<ciBaseObject*>* deps = _deps[dept];
158
159  // see if the same (or a similar) dep is already recorded
160  bool has_ctxk = has_explicit_context_arg(dept);
161  if (has_ctxk) {
162    assert(dep_context_arg(dept) == 0, "sanity");
163    if (note_dep_seen(dept, x1)) {
164      // look in this bucket for redundant assertions
165      const int stride = 2;
166      for (int i = deps->length(); (i -= stride) >= 0; ) {
167        ciBaseObject* y1 = deps->at(i+1);
168        if (x1 == y1) {  // same subject; check the context
169          if (maybe_merge_ctxk(deps, i+0, x0->as_metadata()->as_klass())) {
170            return;
171          }
172        }
173      }
174    }
175  } else {
176    assert(dep_implicit_context_arg(dept) == 0, "sanity");
177    if (note_dep_seen(dept, x0) && note_dep_seen(dept, x1)) {
178      // look in this bucket for redundant assertions
179      const int stride = 2;
180      for (int i = deps->length(); (i -= stride) >= 0; ) {
181        ciBaseObject* y0 = deps->at(i+0);
182        ciBaseObject* y1 = deps->at(i+1);
183        if (x0 == y0 && x1 == y1) {
184          return;
185        }
186      }
187    }
188  }
189
190  // append the assertion in the correct bucket:
191  deps->append(x0);
192  deps->append(x1);
193}
194
195void Dependencies::assert_common_3(DepType dept,
196                                   ciKlass* ctxk, ciBaseObject* x, ciBaseObject* x2) {
197  assert(dep_context_arg(dept) == 0, "sanity");
198  assert(dep_args(dept) == 3, "sanity");
199  log_dependency(dept, ctxk, x, x2);
200  GrowableArray<ciBaseObject*>* deps = _deps[dept];
201
202  // try to normalize an unordered pair:
203  bool swap = false;
204  switch (dept) {
205  case abstract_with_exclusive_concrete_subtypes_2:
206    swap = (x->ident() > x2->ident() && x->as_metadata()->as_klass() != ctxk);
207    break;
208  case exclusive_concrete_methods_2:
209    swap = (x->ident() > x2->ident() && x->as_metadata()->as_method()->holder() != ctxk);
210    break;
211  }
212  if (swap) { ciBaseObject* t = x; x = x2; x2 = t; }
213
214  // see if the same (or a similar) dep is already recorded
215  if (note_dep_seen(dept, x) && note_dep_seen(dept, x2)) {
216    // look in this bucket for redundant assertions
217    const int stride = 3;
218    for (int i = deps->length(); (i -= stride) >= 0; ) {
219      ciBaseObject* y  = deps->at(i+1);
220      ciBaseObject* y2 = deps->at(i+2);
221      if (x == y && x2 == y2) {  // same subjects; check the context
222        if (maybe_merge_ctxk(deps, i+0, ctxk)) {
223          return;
224        }
225      }
226    }
227  }
228  // append the assertion in the correct bucket:
229  deps->append(ctxk);
230  deps->append(x);
231  deps->append(x2);
232}
233
234/// Support for encoding dependencies into an nmethod:
235
236void Dependencies::copy_to(nmethod* nm) {
237  address beg = nm->dependencies_begin();
238  address end = nm->dependencies_end();
239  guarantee(end - beg >= (ptrdiff_t) size_in_bytes(), "bad sizing");
240  Copy::disjoint_words((HeapWord*) content_bytes(),
241                       (HeapWord*) beg,
242                       size_in_bytes() / sizeof(HeapWord));
243  assert(size_in_bytes() % sizeof(HeapWord) == 0, "copy by words");
244}
245
246static int sort_dep(ciBaseObject** p1, ciBaseObject** p2, int narg) {
247  for (int i = 0; i < narg; i++) {
248    int diff = p1[i]->ident() - p2[i]->ident();
249    if (diff != 0)  return diff;
250  }
251  return 0;
252}
253static int sort_dep_arg_1(ciBaseObject** p1, ciBaseObject** p2)
254{ return sort_dep(p1, p2, 1); }
255static int sort_dep_arg_2(ciBaseObject** p1, ciBaseObject** p2)
256{ return sort_dep(p1, p2, 2); }
257static int sort_dep_arg_3(ciBaseObject** p1, ciBaseObject** p2)
258{ return sort_dep(p1, p2, 3); }
259
260void Dependencies::sort_all_deps() {
261  for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
262    DepType dept = (DepType)deptv;
263    GrowableArray<ciBaseObject*>* deps = _deps[dept];
264    if (deps->length() <= 1)  continue;
265    switch (dep_args(dept)) {
266    case 1: deps->sort(sort_dep_arg_1, 1); break;
267    case 2: deps->sort(sort_dep_arg_2, 2); break;
268    case 3: deps->sort(sort_dep_arg_3, 3); break;
269    default: ShouldNotReachHere();
270    }
271  }
272}
273
274size_t Dependencies::estimate_size_in_bytes() {
275  size_t est_size = 100;
276  for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
277    DepType dept = (DepType)deptv;
278    GrowableArray<ciBaseObject*>* deps = _deps[dept];
279    est_size += deps->length()*2;  // tags and argument(s)
280  }
281  return est_size;
282}
283
284ciKlass* Dependencies::ctxk_encoded_as_null(DepType dept, ciBaseObject* x) {
285  switch (dept) {
286  case abstract_with_exclusive_concrete_subtypes_2:
287    return x->as_metadata()->as_klass();
288  case unique_concrete_method:
289  case exclusive_concrete_methods_2:
290    return x->as_metadata()->as_method()->holder();
291  }
292  return NULL;  // let NULL be NULL
293}
294
295Klass* Dependencies::ctxk_encoded_as_null(DepType dept, Metadata* x) {
296  assert(must_be_in_vm(), "raw oops here");
297  switch (dept) {
298  case abstract_with_exclusive_concrete_subtypes_2:
299    assert(x->is_klass(), "sanity");
300    return (Klass*) x;
301  case unique_concrete_method:
302  case exclusive_concrete_methods_2:
303    assert(x->is_method(), "sanity");
304    return ((Method*)x)->method_holder();
305  }
306  return NULL;  // let NULL be NULL
307}
308
309void Dependencies::encode_content_bytes() {
310  sort_all_deps();
311
312  // cast is safe, no deps can overflow INT_MAX
313  CompressedWriteStream bytes((int)estimate_size_in_bytes());
314
315  for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
316    DepType dept = (DepType)deptv;
317    GrowableArray<ciBaseObject*>* deps = _deps[dept];
318    if (deps->length() == 0)  continue;
319    int stride = dep_args(dept);
320    int ctxkj  = dep_context_arg(dept);  // -1 if no context arg
321    assert(stride > 0, "sanity");
322    for (int i = 0; i < deps->length(); i += stride) {
323      jbyte code_byte = (jbyte)dept;
324      int skipj = -1;
325      if (ctxkj >= 0 && ctxkj+1 < stride) {
326        ciKlass*  ctxk = deps->at(i+ctxkj+0)->as_metadata()->as_klass();
327        ciBaseObject* x     = deps->at(i+ctxkj+1);  // following argument
328        if (ctxk == ctxk_encoded_as_null(dept, x)) {
329          skipj = ctxkj;  // we win:  maybe one less oop to keep track of
330          code_byte |= default_context_type_bit;
331        }
332      }
333      bytes.write_byte(code_byte);
334      for (int j = 0; j < stride; j++) {
335        if (j == skipj)  continue;
336        ciBaseObject* v = deps->at(i+j);
337        int idx;
338        if (v->is_object()) {
339          idx = _oop_recorder->find_index(v->as_object()->constant_encoding());
340        } else {
341          ciMetadata* meta = v->as_metadata();
342          idx = _oop_recorder->find_index(meta->constant_encoding());
343        }
344        bytes.write_int(idx);
345      }
346    }
347  }
348
349  // write a sentinel byte to mark the end
350  bytes.write_byte(end_marker);
351
352  // round it out to a word boundary
353  while (bytes.position() % sizeof(HeapWord) != 0) {
354    bytes.write_byte(end_marker);
355  }
356
357  // check whether the dept byte encoding really works
358  assert((jbyte)default_context_type_bit != 0, "byte overflow");
359
360  _content_bytes = bytes.buffer();
361  _size_in_bytes = bytes.position();
362}
363
364
365const char* Dependencies::_dep_name[TYPE_LIMIT] = {
366  "end_marker",
367  "evol_method",
368  "leaf_type",
369  "abstract_with_unique_concrete_subtype",
370  "abstract_with_no_concrete_subtype",
371  "concrete_with_no_concrete_subtype",
372  "unique_concrete_method",
373  "abstract_with_exclusive_concrete_subtypes_2",
374  "exclusive_concrete_methods_2",
375  "no_finalizable_subclasses",
376  "call_site_target_value"
377};
378
379int Dependencies::_dep_args[TYPE_LIMIT] = {
380  -1,// end_marker
381  1, // evol_method m
382  1, // leaf_type ctxk
383  2, // abstract_with_unique_concrete_subtype ctxk, k
384  1, // abstract_with_no_concrete_subtype ctxk
385  1, // concrete_with_no_concrete_subtype ctxk
386  2, // unique_concrete_method ctxk, m
387  3, // unique_concrete_subtypes_2 ctxk, k1, k2
388  3, // unique_concrete_methods_2 ctxk, m1, m2
389  1, // no_finalizable_subclasses ctxk
390  2  // call_site_target_value call_site, method_handle
391};
392
393const char* Dependencies::dep_name(Dependencies::DepType dept) {
394  if (!dept_in_mask(dept, all_types))  return "?bad-dep?";
395  return _dep_name[dept];
396}
397
398int Dependencies::dep_args(Dependencies::DepType dept) {
399  if (!dept_in_mask(dept, all_types))  return -1;
400  return _dep_args[dept];
401}
402
403void Dependencies::check_valid_dependency_type(DepType dept) {
404  guarantee(FIRST_TYPE <= dept && dept < TYPE_LIMIT, err_msg("invalid dependency type: %d", (int) dept));
405}
406
407// for the sake of the compiler log, print out current dependencies:
408void Dependencies::log_all_dependencies() {
409  if (log() == NULL)  return;
410  ciBaseObject* args[max_arg_count];
411  for (int deptv = (int)FIRST_TYPE; deptv < (int)TYPE_LIMIT; deptv++) {
412    DepType dept = (DepType)deptv;
413    GrowableArray<ciBaseObject*>* deps = _deps[dept];
414    if (deps->length() == 0)  continue;
415    int stride = dep_args(dept);
416    for (int i = 0; i < deps->length(); i += stride) {
417      for (int j = 0; j < stride; j++) {
418        // flush out the identities before printing
419        args[j] = deps->at(i+j);
420      }
421      write_dependency_to(log(), dept, stride, args);
422    }
423  }
424}
425
426void Dependencies::write_dependency_to(CompileLog* log,
427                                       DepType dept,
428                                       int nargs, DepArgument args[],
429                                       Klass* witness) {
430  if (log == NULL) {
431    return;
432  }
433  ciEnv* env = ciEnv::current();
434  ciBaseObject* ciargs[max_arg_count];
435  assert(nargs <= max_arg_count, "oob");
436  for (int j = 0; j < nargs; j++) {
437    if (args[j].is_oop()) {
438      ciargs[j] = env->get_object(args[j].oop_value());
439    } else {
440      ciargs[j] = env->get_metadata(args[j].metadata_value());
441    }
442  }
443  Dependencies::write_dependency_to(log, dept, nargs, ciargs, witness);
444}
445
446void Dependencies::write_dependency_to(CompileLog* log,
447                                       DepType dept,
448                                       int nargs, ciBaseObject* args[],
449                                       Klass* witness) {
450  if (log == NULL)  return;
451  assert(nargs <= max_arg_count, "oob");
452  int argids[max_arg_count];
453  int ctxkj = dep_context_arg(dept);  // -1 if no context arg
454  int j;
455  for (j = 0; j < nargs; j++) {
456    if (args[j]->is_object()) {
457      argids[j] = log->identify(args[j]->as_object());
458    } else {
459      argids[j] = log->identify(args[j]->as_metadata());
460    }
461  }
462  if (witness != NULL) {
463    log->begin_elem("dependency_failed");
464  } else {
465    log->begin_elem("dependency");
466  }
467  log->print(" type='%s'", dep_name(dept));
468  if (ctxkj >= 0) {
469    log->print(" ctxk='%d'", argids[ctxkj]);
470  }
471  // write remaining arguments, if any.
472  for (j = 0; j < nargs; j++) {
473    if (j == ctxkj)  continue;  // already logged
474    if (j == 1) {
475      log->print(  " x='%d'",    argids[j]);
476    } else {
477      log->print(" x%d='%d'", j, argids[j]);
478    }
479  }
480  if (witness != NULL) {
481    log->object("witness", witness);
482    log->stamp();
483  }
484  log->end_elem();
485}
486
487void Dependencies::write_dependency_to(xmlStream* xtty,
488                                       DepType dept,
489                                       int nargs, DepArgument args[],
490                                       Klass* witness) {
491  if (xtty == NULL)  return;
492  ttyLocker ttyl;
493  int ctxkj = dep_context_arg(dept);  // -1 if no context arg
494  if (witness != NULL) {
495    xtty->begin_elem("dependency_failed");
496  } else {
497    xtty->begin_elem("dependency");
498  }
499  xtty->print(" type='%s'", dep_name(dept));
500  if (ctxkj >= 0) {
501    xtty->object("ctxk", args[ctxkj].metadata_value());
502  }
503  // write remaining arguments, if any.
504  for (int j = 0; j < nargs; j++) {
505    if (j == ctxkj)  continue;  // already logged
506    if (j == 1) {
507      if (args[j].is_oop()) {
508        xtty->object("x", args[j].oop_value());
509      } else {
510        xtty->object("x", args[j].metadata_value());
511      }
512    } else {
513      char xn[10]; sprintf(xn, "x%d", j);
514      if (args[j].is_oop()) {
515        xtty->object(xn, args[j].oop_value());
516      } else {
517        xtty->object(xn, args[j].metadata_value());
518      }
519    }
520  }
521  if (witness != NULL) {
522    xtty->object("witness", witness);
523    xtty->stamp();
524  }
525  xtty->end_elem();
526}
527
528void Dependencies::print_dependency(DepType dept, int nargs, DepArgument args[],
529                                    Klass* witness) {
530  ResourceMark rm;
531  ttyLocker ttyl;   // keep the following output all in one block
532  tty->print_cr("%s of type %s",
533                (witness == NULL)? "Dependency": "Failed dependency",
534                dep_name(dept));
535  // print arguments
536  int ctxkj = dep_context_arg(dept);  // -1 if no context arg
537  for (int j = 0; j < nargs; j++) {
538    DepArgument arg = args[j];
539    bool put_star = false;
540    if (arg.is_null())  continue;
541    const char* what;
542    if (j == ctxkj) {
543      assert(arg.is_metadata(), "must be");
544      what = "context";
545      put_star = !Dependencies::is_concrete_klass((Klass*)arg.metadata_value());
546    } else if (arg.is_method()) {
547      what = "method ";
548      put_star = !Dependencies::is_concrete_method((Method*)arg.metadata_value());
549    } else if (arg.is_klass()) {
550      what = "class  ";
551    } else {
552      what = "object ";
553    }
554    tty->print("  %s = %s", what, (put_star? "*": ""));
555    if (arg.is_klass())
556      tty->print("%s", ((Klass*)arg.metadata_value())->external_name());
557    else if (arg.is_method())
558      ((Method*)arg.metadata_value())->print_value();
559    else
560      ShouldNotReachHere(); // Provide impl for this type.
561    tty->cr();
562  }
563  if (witness != NULL) {
564    bool put_star = !Dependencies::is_concrete_klass(witness);
565    tty->print_cr("  witness = %s%s",
566                  (put_star? "*": ""),
567                  witness->external_name());
568  }
569}
570
571void Dependencies::DepStream::log_dependency(Klass* witness) {
572  if (_deps == NULL && xtty == NULL)  return;  // fast cutout for runtime
573  ResourceMark rm;
574  int nargs = argument_count();
575  DepArgument args[max_arg_count];
576  for (int j = 0; j < nargs; j++) {
577    if (type() == call_site_target_value) {
578      args[j] = argument_oop(j);
579    } else {
580      args[j] = argument(j);
581    }
582  }
583  if (_deps != NULL && _deps->log() != NULL) {
584    Dependencies::write_dependency_to(_deps->log(),
585                                      type(), nargs, args, witness);
586  } else {
587    Dependencies::write_dependency_to(xtty,
588                                      type(), nargs, args, witness);
589  }
590}
591
592void Dependencies::DepStream::print_dependency(Klass* witness, bool verbose) {
593  int nargs = argument_count();
594  DepArgument args[max_arg_count];
595  for (int j = 0; j < nargs; j++) {
596    args[j] = argument(j);
597  }
598  Dependencies::print_dependency(type(), nargs, args, witness);
599  if (verbose) {
600    if (_code != NULL) {
601      tty->print("  code: ");
602      _code->print_value_on(tty);
603      tty->cr();
604    }
605  }
606}
607
608
609/// Dependency stream support (decodes dependencies from an nmethod):
610
611#ifdef ASSERT
612void Dependencies::DepStream::initial_asserts(size_t byte_limit) {
613  assert(must_be_in_vm(), "raw oops here");
614  _byte_limit = byte_limit;
615  _type       = (DepType)(end_marker-1);  // defeat "already at end" assert
616  assert((_code!=NULL) + (_deps!=NULL) == 1, "one or t'other");
617}
618#endif //ASSERT
619
620bool Dependencies::DepStream::next() {
621  assert(_type != end_marker, "already at end");
622  if (_bytes.position() == 0 && _code != NULL
623      && _code->dependencies_size() == 0) {
624    // Method has no dependencies at all.
625    return false;
626  }
627  int code_byte = (_bytes.read_byte() & 0xFF);
628  if (code_byte == end_marker) {
629    DEBUG_ONLY(_type = end_marker);
630    return false;
631  } else {
632    int ctxk_bit = (code_byte & Dependencies::default_context_type_bit);
633    code_byte -= ctxk_bit;
634    DepType dept = (DepType)code_byte;
635    _type = dept;
636    Dependencies::check_valid_dependency_type(dept);
637    int stride = _dep_args[dept];
638    assert(stride == dep_args(dept), "sanity");
639    int skipj = -1;
640    if (ctxk_bit != 0) {
641      skipj = 0;  // currently the only context argument is at zero
642      assert(skipj == dep_context_arg(dept), "zero arg always ctxk");
643    }
644    for (int j = 0; j < stride; j++) {
645      _xi[j] = (j == skipj)? 0: _bytes.read_int();
646    }
647    DEBUG_ONLY(_xi[stride] = -1);   // help detect overruns
648    return true;
649  }
650}
651
652inline Metadata* Dependencies::DepStream::recorded_metadata_at(int i) {
653  Metadata* o = NULL;
654  if (_code != NULL) {
655    o = _code->metadata_at(i);
656  } else {
657    o = _deps->oop_recorder()->metadata_at(i);
658  }
659  return o;
660}
661
662inline oop Dependencies::DepStream::recorded_oop_at(int i) {
663  return (_code != NULL)
664         ? _code->oop_at(i)
665    : JNIHandles::resolve(_deps->oop_recorder()->oop_at(i));
666}
667
668Metadata* Dependencies::DepStream::argument(int i) {
669  Metadata* result = recorded_metadata_at(argument_index(i));
670
671  if (result == NULL) { // Explicit context argument can be compressed
672    int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
673    if (ctxkj >= 0 && i == ctxkj && ctxkj+1 < argument_count()) {
674      result = ctxk_encoded_as_null(type(), argument(ctxkj+1));
675    }
676  }
677
678  assert(result == NULL || result->is_klass() || result->is_method(), "must be");
679  return result;
680}
681
682/**
683 * Returns a unique identifier for each dependency argument.
684 */
685uintptr_t Dependencies::DepStream::get_identifier(int i) {
686  if (has_oop_argument()) {
687    return (uintptr_t)(oopDesc*)argument_oop(i);
688  } else {
689    return (uintptr_t)argument(i);
690  }
691}
692
693oop Dependencies::DepStream::argument_oop(int i) {
694  oop result = recorded_oop_at(argument_index(i));
695  assert(result == NULL || result->is_oop(), "must be");
696  return result;
697}
698
699Klass* Dependencies::DepStream::context_type() {
700  assert(must_be_in_vm(), "raw oops here");
701
702  // Most dependencies have an explicit context type argument.
703  {
704    int ctxkj = dep_context_arg(type());  // -1 if no explicit context arg
705    if (ctxkj >= 0) {
706      Metadata* k = argument(ctxkj);
707      assert(k != NULL && k->is_klass(), "type check");
708      return (Klass*)k;
709    }
710  }
711
712  // Some dependencies are using the klass of the first object
713  // argument as implicit context type (e.g. call_site_target_value).
714  {
715    int ctxkj = dep_implicit_context_arg(type());
716    if (ctxkj >= 0) {
717      Klass* k = argument_oop(ctxkj)->klass();
718      assert(k != NULL && k->is_klass(), "type check");
719      return (Klass*) k;
720    }
721  }
722
723  // And some dependencies don't have a context type at all,
724  // e.g. evol_method.
725  return NULL;
726}
727
728// ----------------- DependencySignature --------------------------------------
729bool DependencySignature::equals(DependencySignature const& s1, DependencySignature const& s2) {
730  if ((s1.type() != s2.type()) || (s1.args_count() != s2.args_count())) {
731    return false;
732  }
733
734  for (int i = 0; i < s1.args_count(); i++) {
735    if (s1.arg(i) != s2.arg(i)) {
736      return false;
737    }
738  }
739  return true;
740}
741
742/// Checking dependencies:
743
744// This hierarchy walker inspects subtypes of a given type,
745// trying to find a "bad" class which breaks a dependency.
746// Such a class is called a "witness" to the broken dependency.
747// While searching around, we ignore "participants", which
748// are already known to the dependency.
749class ClassHierarchyWalker {
750 public:
751  enum { PARTICIPANT_LIMIT = 3 };
752
753 private:
754  // optional method descriptor to check for:
755  Symbol* _name;
756  Symbol* _signature;
757
758  // special classes which are not allowed to be witnesses:
759  Klass*    _participants[PARTICIPANT_LIMIT+1];
760  int       _num_participants;
761
762  // cache of method lookups
763  Method* _found_methods[PARTICIPANT_LIMIT+1];
764
765  // if non-zero, tells how many witnesses to convert to participants
766  int       _record_witnesses;
767
768  void initialize(Klass* participant) {
769    _record_witnesses = 0;
770    _participants[0]  = participant;
771    _found_methods[0] = NULL;
772    _num_participants = 0;
773    if (participant != NULL) {
774      // Terminating NULL.
775      _participants[1] = NULL;
776      _found_methods[1] = NULL;
777      _num_participants = 1;
778    }
779  }
780
781  void initialize_from_method(Method* m) {
782    assert(m != NULL && m->is_method(), "sanity");
783    _name      = m->name();
784    _signature = m->signature();
785  }
786
787 public:
788  // The walker is initialized to recognize certain methods and/or types
789  // as friendly participants.
790  ClassHierarchyWalker(Klass* participant, Method* m) {
791    initialize_from_method(m);
792    initialize(participant);
793  }
794  ClassHierarchyWalker(Method* m) {
795    initialize_from_method(m);
796    initialize(NULL);
797  }
798  ClassHierarchyWalker(Klass* participant = NULL) {
799    _name      = NULL;
800    _signature = NULL;
801    initialize(participant);
802  }
803
804  // This is common code for two searches:  One for concrete subtypes,
805  // the other for concrete method implementations and overrides.
806  bool doing_subtype_search() {
807    return _name == NULL;
808  }
809
810  int num_participants() { return _num_participants; }
811  Klass* participant(int n) {
812    assert((uint)n <= (uint)_num_participants, "oob");
813    return _participants[n];
814  }
815
816  // Note:  If n==num_participants, returns NULL.
817  Method* found_method(int n) {
818    assert((uint)n <= (uint)_num_participants, "oob");
819    Method* fm = _found_methods[n];
820    assert(n == _num_participants || fm != NULL, "proper usage");
821    assert(fm == NULL || fm->method_holder() == _participants[n], "sanity");
822    return fm;
823  }
824
825#ifdef ASSERT
826  // Assert that m is inherited into ctxk, without intervening overrides.
827  // (May return true even if this is not true, in corner cases where we punt.)
828  bool check_method_context(Klass* ctxk, Method* m) {
829    if (m->method_holder() == ctxk)
830      return true;  // Quick win.
831    if (m->is_private())
832      return false; // Quick lose.  Should not happen.
833    if (!(m->is_public() || m->is_protected()))
834      // The override story is complex when packages get involved.
835      return true;  // Must punt the assertion to true.
836    Klass* k = ctxk;
837    Method* lm = k->lookup_method(m->name(), m->signature());
838    if (lm == NULL && k->oop_is_instance()) {
839      // It might be an interface method
840        lm = ((InstanceKlass*)k)->lookup_method_in_ordered_interfaces(m->name(),
841                                                                m->signature());
842    }
843    if (lm == m)
844      // Method m is inherited into ctxk.
845      return true;
846    if (lm != NULL) {
847      if (!(lm->is_public() || lm->is_protected())) {
848        // Method is [package-]private, so the override story is complex.
849        return true;  // Must punt the assertion to true.
850      }
851      if (lm->is_static()) {
852        // Static methods don't override non-static so punt
853        return true;
854      }
855      if (   !Dependencies::is_concrete_method(lm)
856          && !Dependencies::is_concrete_method(m)
857          && lm->method_holder()->is_subtype_of(m->method_holder()))
858        // Method m is overridden by lm, but both are non-concrete.
859        return true;
860    }
861    ResourceMark rm;
862    tty->print_cr("Dependency method not found in the associated context:");
863    tty->print_cr("  context = %s", ctxk->external_name());
864    tty->print(   "  method = "); m->print_short_name(tty); tty->cr();
865    if (lm != NULL) {
866      tty->print( "  found = "); lm->print_short_name(tty); tty->cr();
867    }
868    return false;
869  }
870#endif
871
872  void add_participant(Klass* participant) {
873    assert(_num_participants + _record_witnesses < PARTICIPANT_LIMIT, "oob");
874    int np = _num_participants++;
875    _participants[np] = participant;
876    _participants[np+1] = NULL;
877    _found_methods[np+1] = NULL;
878  }
879
880  void record_witnesses(int add) {
881    if (add > PARTICIPANT_LIMIT)  add = PARTICIPANT_LIMIT;
882    assert(_num_participants + add < PARTICIPANT_LIMIT, "oob");
883    _record_witnesses = add;
884  }
885
886  bool is_witness(Klass* k) {
887    if (doing_subtype_search()) {
888      return Dependencies::is_concrete_klass(k);
889    } else {
890      Method* m = InstanceKlass::cast(k)->find_method(_name, _signature);
891      if (m == NULL || !Dependencies::is_concrete_method(m))  return false;
892      _found_methods[_num_participants] = m;
893      // Note:  If add_participant(k) is called,
894      // the method m will already be memoized for it.
895      return true;
896    }
897  }
898
899  bool is_participant(Klass* k) {
900    if (k == _participants[0]) {
901      return true;
902    } else if (_num_participants <= 1) {
903      return false;
904    } else {
905      return in_list(k, &_participants[1]);
906    }
907  }
908  bool ignore_witness(Klass* witness) {
909    if (_record_witnesses == 0) {
910      return false;
911    } else {
912      --_record_witnesses;
913      add_participant(witness);
914      return true;
915    }
916  }
917  static bool in_list(Klass* x, Klass** list) {
918    for (int i = 0; ; i++) {
919      Klass* y = list[i];
920      if (y == NULL)  break;
921      if (y == x)  return true;
922    }
923    return false;  // not in list
924  }
925
926 private:
927  // the actual search method:
928  Klass* find_witness_anywhere(Klass* context_type,
929                                 bool participants_hide_witnesses,
930                                 bool top_level_call = true);
931  // the spot-checking version:
932  Klass* find_witness_in(KlassDepChange& changes,
933                         Klass* context_type,
934                           bool participants_hide_witnesses);
935 public:
936  Klass* find_witness_subtype(Klass* context_type, KlassDepChange* changes = NULL) {
937    assert(doing_subtype_search(), "must set up a subtype search");
938    // When looking for unexpected concrete types,
939    // do not look beneath expected ones.
940    const bool participants_hide_witnesses = true;
941    // CX > CC > C' is OK, even if C' is new.
942    // CX > { CC,  C' } is not OK if C' is new, and C' is the witness.
943    if (changes != NULL) {
944      return find_witness_in(*changes, context_type, participants_hide_witnesses);
945    } else {
946      return find_witness_anywhere(context_type, participants_hide_witnesses);
947    }
948  }
949  Klass* find_witness_definer(Klass* context_type, KlassDepChange* changes = NULL) {
950    assert(!doing_subtype_search(), "must set up a method definer search");
951    // When looking for unexpected concrete methods,
952    // look beneath expected ones, to see if there are overrides.
953    const bool participants_hide_witnesses = true;
954    // CX.m > CC.m > C'.m is not OK, if C'.m is new, and C' is the witness.
955    if (changes != NULL) {
956      return find_witness_in(*changes, context_type, !participants_hide_witnesses);
957    } else {
958      return find_witness_anywhere(context_type, !participants_hide_witnesses);
959    }
960  }
961};
962
963#ifndef PRODUCT
964static int deps_find_witness_calls = 0;
965static int deps_find_witness_steps = 0;
966static int deps_find_witness_recursions = 0;
967static int deps_find_witness_singles = 0;
968static int deps_find_witness_print = 0; // set to -1 to force a final print
969static bool count_find_witness_calls() {
970  if (TraceDependencies || LogCompilation) {
971    int pcount = deps_find_witness_print + 1;
972    bool final_stats      = (pcount == 0);
973    bool initial_call     = (pcount == 1);
974    bool occasional_print = ((pcount & ((1<<10) - 1)) == 0);
975    if (pcount < 0)  pcount = 1; // crude overflow protection
976    deps_find_witness_print = pcount;
977    if (VerifyDependencies && initial_call) {
978      tty->print_cr("Warning:  TraceDependencies results may be inflated by VerifyDependencies");
979    }
980    if (occasional_print || final_stats) {
981      // Every now and then dump a little info about dependency searching.
982      if (xtty != NULL) {
983       ttyLocker ttyl;
984       xtty->elem("deps_find_witness calls='%d' steps='%d' recursions='%d' singles='%d'",
985                   deps_find_witness_calls,
986                   deps_find_witness_steps,
987                   deps_find_witness_recursions,
988                   deps_find_witness_singles);
989      }
990      if (final_stats || (TraceDependencies && WizardMode)) {
991        ttyLocker ttyl;
992        tty->print_cr("Dependency check (find_witness) "
993                      "calls=%d, steps=%d (avg=%.1f), recursions=%d, singles=%d",
994                      deps_find_witness_calls,
995                      deps_find_witness_steps,
996                      (double)deps_find_witness_steps / deps_find_witness_calls,
997                      deps_find_witness_recursions,
998                      deps_find_witness_singles);
999      }
1000    }
1001    return true;
1002  }
1003  return false;
1004}
1005#else
1006#define count_find_witness_calls() (0)
1007#endif //PRODUCT
1008
1009
1010Klass* ClassHierarchyWalker::find_witness_in(KlassDepChange& changes,
1011                                               Klass* context_type,
1012                                               bool participants_hide_witnesses) {
1013  assert(changes.involves_context(context_type), "irrelevant dependency");
1014  Klass* new_type = changes.new_type();
1015
1016  (void)count_find_witness_calls();
1017  NOT_PRODUCT(deps_find_witness_singles++);
1018
1019  // Current thread must be in VM (not native mode, as in CI):
1020  assert(must_be_in_vm(), "raw oops here");
1021  // Must not move the class hierarchy during this check:
1022  assert_locked_or_safepoint(Compile_lock);
1023
1024  int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1025  if (nof_impls > 1) {
1026    // Avoid this case: *I.m > { A.m, C }; B.m > C
1027    // %%% Until this is fixed more systematically, bail out.
1028    // See corresponding comment in find_witness_anywhere.
1029    return context_type;
1030  }
1031
1032  assert(!is_participant(new_type), "only old classes are participants");
1033  if (participants_hide_witnesses) {
1034    // If the new type is a subtype of a participant, we are done.
1035    for (int i = 0; i < num_participants(); i++) {
1036      Klass* part = participant(i);
1037      if (part == NULL)  continue;
1038      assert(changes.involves_context(part) == new_type->is_subtype_of(part),
1039             "correct marking of participants, b/c new_type is unique");
1040      if (changes.involves_context(part)) {
1041        // new guy is protected from this check by previous participant
1042        return NULL;
1043      }
1044    }
1045  }
1046
1047  if (is_witness(new_type) &&
1048      !ignore_witness(new_type)) {
1049    return new_type;
1050  }
1051
1052  return NULL;
1053}
1054
1055
1056// Walk hierarchy under a context type, looking for unexpected types.
1057// Do not report participant types, and recursively walk beneath
1058// them only if participants_hide_witnesses is false.
1059// If top_level_call is false, skip testing the context type,
1060// because the caller has already considered it.
1061Klass* ClassHierarchyWalker::find_witness_anywhere(Klass* context_type,
1062                                                     bool participants_hide_witnesses,
1063                                                     bool top_level_call) {
1064  // Current thread must be in VM (not native mode, as in CI):
1065  assert(must_be_in_vm(), "raw oops here");
1066  // Must not move the class hierarchy during this check:
1067  assert_locked_or_safepoint(Compile_lock);
1068
1069  bool do_counts = count_find_witness_calls();
1070
1071  // Check the root of the sub-hierarchy first.
1072  if (top_level_call) {
1073    if (do_counts) {
1074      NOT_PRODUCT(deps_find_witness_calls++);
1075      NOT_PRODUCT(deps_find_witness_steps++);
1076    }
1077    if (is_participant(context_type)) {
1078      if (participants_hide_witnesses)  return NULL;
1079      // else fall through to search loop...
1080    } else if (is_witness(context_type) && !ignore_witness(context_type)) {
1081      // The context is an abstract class or interface, to start with.
1082      return context_type;
1083    }
1084  }
1085
1086  // Now we must check each implementor and each subclass.
1087  // Use a short worklist to avoid blowing the stack.
1088  // Each worklist entry is a *chain* of subklass siblings to process.
1089  const int CHAINMAX = 100;  // >= 1 + InstanceKlass::implementors_limit
1090  Klass* chains[CHAINMAX];
1091  int    chaini = 0;  // index into worklist
1092  Klass* chain;       // scratch variable
1093#define ADD_SUBCLASS_CHAIN(k)                     {  \
1094    assert(chaini < CHAINMAX, "oob");                \
1095    chain = InstanceKlass::cast(k)->subklass();      \
1096    if (chain != NULL)  chains[chaini++] = chain;    }
1097
1098  // Look for non-abstract subclasses.
1099  // (Note:  Interfaces do not have subclasses.)
1100  ADD_SUBCLASS_CHAIN(context_type);
1101
1102  // If it is an interface, search its direct implementors.
1103  // (Their subclasses are additional indirect implementors.
1104  // See InstanceKlass::add_implementor.)
1105  // (Note:  nof_implementors is always zero for non-interfaces.)
1106  int nof_impls = InstanceKlass::cast(context_type)->nof_implementors();
1107  if (nof_impls > 1) {
1108    // Avoid this case: *I.m > { A.m, C }; B.m > C
1109    // Here, I.m has 2 concrete implementations, but m appears unique
1110    // as A.m, because the search misses B.m when checking C.
1111    // The inherited method B.m was getting missed by the walker
1112    // when interface 'I' was the starting point.
1113    // %%% Until this is fixed more systematically, bail out.
1114    // (Old CHA had the same limitation.)
1115    return context_type;
1116  }
1117  if (nof_impls > 0) {
1118    Klass* impl = InstanceKlass::cast(context_type)->implementor();
1119    assert(impl != NULL, "just checking");
1120    // If impl is the same as the context_type, then more than one
1121    // implementor has seen. No exact info in this case.
1122    if (impl == context_type) {
1123      return context_type;  // report an inexact witness to this sad affair
1124    }
1125    if (do_counts)
1126      { NOT_PRODUCT(deps_find_witness_steps++); }
1127    if (is_participant(impl)) {
1128      if (!participants_hide_witnesses) {
1129        ADD_SUBCLASS_CHAIN(impl);
1130      }
1131    } else if (is_witness(impl) && !ignore_witness(impl)) {
1132      return impl;
1133    } else {
1134      ADD_SUBCLASS_CHAIN(impl);
1135    }
1136  }
1137
1138  // Recursively process each non-trivial sibling chain.
1139  while (chaini > 0) {
1140    Klass* chain = chains[--chaini];
1141    for (Klass* sub = chain; sub != NULL; sub = sub->next_sibling()) {
1142      if (do_counts) { NOT_PRODUCT(deps_find_witness_steps++); }
1143      if (is_participant(sub)) {
1144        if (participants_hide_witnesses)  continue;
1145        // else fall through to process this guy's subclasses
1146      } else if (is_witness(sub) && !ignore_witness(sub)) {
1147        return sub;
1148      }
1149      if (chaini < (VerifyDependencies? 2: CHAINMAX)) {
1150        // Fast path.  (Partially disabled if VerifyDependencies.)
1151        ADD_SUBCLASS_CHAIN(sub);
1152      } else {
1153        // Worklist overflow.  Do a recursive call.  Should be rare.
1154        // The recursive call will have its own worklist, of course.
1155        // (Note that sub has already been tested, so that there is
1156        // no need for the recursive call to re-test.  That's handy,
1157        // since the recursive call sees sub as the context_type.)
1158        if (do_counts) { NOT_PRODUCT(deps_find_witness_recursions++); }
1159        Klass* witness = find_witness_anywhere(sub,
1160                                                 participants_hide_witnesses,
1161                                                 /*top_level_call=*/ false);
1162        if (witness != NULL)  return witness;
1163      }
1164    }
1165  }
1166
1167  // No witness found.  The dependency remains unbroken.
1168  return NULL;
1169#undef ADD_SUBCLASS_CHAIN
1170}
1171
1172
1173bool Dependencies::is_concrete_klass(Klass* k) {
1174  if (k->is_abstract())  return false;
1175  // %%% We could treat classes which are concrete but
1176  // have not yet been instantiated as virtually abstract.
1177  // This would require a deoptimization barrier on first instantiation.
1178  //if (k->is_not_instantiated())  return false;
1179  return true;
1180}
1181
1182bool Dependencies::is_concrete_method(Method* m) {
1183  // Statics are irrelevant to virtual call sites.
1184  if (m->is_static())  return false;
1185
1186  // We could also return false if m does not yet appear to be
1187  // executed, if the VM version supports this distinction also.
1188  // Default methods are considered "concrete" as well.
1189  return !m->is_abstract() &&
1190         !m->is_overpass(); // error functions aren't concrete
1191}
1192
1193
1194Klass* Dependencies::find_finalizable_subclass(Klass* k) {
1195  if (k->is_interface())  return NULL;
1196  if (k->has_finalizer()) return k;
1197  k = k->subklass();
1198  while (k != NULL) {
1199    Klass* result = find_finalizable_subclass(k);
1200    if (result != NULL) return result;
1201    k = k->next_sibling();
1202  }
1203  return NULL;
1204}
1205
1206
1207bool Dependencies::is_concrete_klass(ciInstanceKlass* k) {
1208  if (k->is_abstract())  return false;
1209  // We could also return false if k does not yet appear to be
1210  // instantiated, if the VM version supports this distinction also.
1211  //if (k->is_not_instantiated())  return false;
1212  return true;
1213}
1214
1215bool Dependencies::is_concrete_method(ciMethod* m) {
1216  // Statics are irrelevant to virtual call sites.
1217  if (m->is_static())  return false;
1218
1219  // We could also return false if m does not yet appear to be
1220  // executed, if the VM version supports this distinction also.
1221  return !m->is_abstract();
1222}
1223
1224
1225bool Dependencies::has_finalizable_subclass(ciInstanceKlass* k) {
1226  return k->has_finalizable_subclass();
1227}
1228
1229
1230// Any use of the contents (bytecodes) of a method must be
1231// marked by an "evol_method" dependency, if those contents
1232// can change.  (Note: A method is always dependent on itself.)
1233Klass* Dependencies::check_evol_method(Method* m) {
1234  assert(must_be_in_vm(), "raw oops here");
1235  // Did somebody do a JVMTI RedefineClasses while our backs were turned?
1236  // Or is there a now a breakpoint?
1237  // (Assumes compiled code cannot handle bkpts; change if UseFastBreakpoints.)
1238  if (m->is_old()
1239      || m->number_of_breakpoints() > 0) {
1240    return m->method_holder();
1241  } else {
1242    return NULL;
1243  }
1244}
1245
1246// This is a strong assertion:  It is that the given type
1247// has no subtypes whatever.  It is most useful for
1248// optimizing checks on reflected types or on array types.
1249// (Checks on types which are derived from real instances
1250// can be optimized more strongly than this, because we
1251// know that the checked type comes from a concrete type,
1252// and therefore we can disregard abstract types.)
1253Klass* Dependencies::check_leaf_type(Klass* ctxk) {
1254  assert(must_be_in_vm(), "raw oops here");
1255  assert_locked_or_safepoint(Compile_lock);
1256  InstanceKlass* ctx = InstanceKlass::cast(ctxk);
1257  Klass* sub = ctx->subklass();
1258  if (sub != NULL) {
1259    return sub;
1260  } else if (ctx->nof_implementors() != 0) {
1261    // if it is an interface, it must be unimplemented
1262    // (if it is not an interface, nof_implementors is always zero)
1263    Klass* impl = ctx->implementor();
1264    assert(impl != NULL, "must be set");
1265    return impl;
1266  } else {
1267    return NULL;
1268  }
1269}
1270
1271// Test the assertion that conck is the only concrete subtype* of ctxk.
1272// The type conck itself is allowed to have have further concrete subtypes.
1273// This allows the compiler to narrow occurrences of ctxk by conck,
1274// when dealing with the types of actual instances.
1275Klass* Dependencies::check_abstract_with_unique_concrete_subtype(Klass* ctxk,
1276                                                                   Klass* conck,
1277                                                                   KlassDepChange* changes) {
1278  ClassHierarchyWalker wf(conck);
1279  return wf.find_witness_subtype(ctxk, changes);
1280}
1281
1282// If a non-concrete class has no concrete subtypes, it is not (yet)
1283// instantiatable.  This can allow the compiler to make some paths go
1284// dead, if they are gated by a test of the type.
1285Klass* Dependencies::check_abstract_with_no_concrete_subtype(Klass* ctxk,
1286                                                               KlassDepChange* changes) {
1287  // Find any concrete subtype, with no participants:
1288  ClassHierarchyWalker wf;
1289  return wf.find_witness_subtype(ctxk, changes);
1290}
1291
1292
1293// If a concrete class has no concrete subtypes, it can always be
1294// exactly typed.  This allows the use of a cheaper type test.
1295Klass* Dependencies::check_concrete_with_no_concrete_subtype(Klass* ctxk,
1296                                                               KlassDepChange* changes) {
1297  // Find any concrete subtype, with only the ctxk as participant:
1298  ClassHierarchyWalker wf(ctxk);
1299  return wf.find_witness_subtype(ctxk, changes);
1300}
1301
1302
1303// Find the unique concrete proper subtype of ctxk, or NULL if there
1304// is more than one concrete proper subtype.  If there are no concrete
1305// proper subtypes, return ctxk itself, whether it is concrete or not.
1306// The returned subtype is allowed to have have further concrete subtypes.
1307// That is, return CC1 for CX > CC1 > CC2, but NULL for CX > { CC1, CC2 }.
1308Klass* Dependencies::find_unique_concrete_subtype(Klass* ctxk) {
1309  ClassHierarchyWalker wf(ctxk);   // Ignore ctxk when walking.
1310  wf.record_witnesses(1);          // Record one other witness when walking.
1311  Klass* wit = wf.find_witness_subtype(ctxk);
1312  if (wit != NULL)  return NULL;   // Too many witnesses.
1313  Klass* conck = wf.participant(0);
1314  if (conck == NULL) {
1315#ifndef PRODUCT
1316    // Make sure the dependency mechanism will pass this discovery:
1317    if (VerifyDependencies) {
1318      // Turn off dependency tracing while actually testing deps.
1319      FlagSetting fs(TraceDependencies, false);
1320      if (!Dependencies::is_concrete_klass(ctxk)) {
1321        guarantee(NULL ==
1322                  (void *)check_abstract_with_no_concrete_subtype(ctxk),
1323                  "verify dep.");
1324      } else {
1325        guarantee(NULL ==
1326                  (void *)check_concrete_with_no_concrete_subtype(ctxk),
1327                  "verify dep.");
1328      }
1329    }
1330#endif //PRODUCT
1331    return ctxk;                   // Return ctxk as a flag for "no subtypes".
1332  } else {
1333#ifndef PRODUCT
1334    // Make sure the dependency mechanism will pass this discovery:
1335    if (VerifyDependencies) {
1336      // Turn off dependency tracing while actually testing deps.
1337      FlagSetting fs(TraceDependencies, false);
1338      if (!Dependencies::is_concrete_klass(ctxk)) {
1339        guarantee(NULL == (void *)
1340                  check_abstract_with_unique_concrete_subtype(ctxk, conck),
1341                  "verify dep.");
1342      }
1343    }
1344#endif //PRODUCT
1345    return conck;
1346  }
1347}
1348
1349// Test the assertion that the k[12] are the only concrete subtypes of ctxk,
1350// except possibly for further subtypes of k[12] themselves.
1351// The context type must be abstract.  The types k1 and k2 are themselves
1352// allowed to have further concrete subtypes.
1353Klass* Dependencies::check_abstract_with_exclusive_concrete_subtypes(
1354                                                Klass* ctxk,
1355                                                Klass* k1,
1356                                                Klass* k2,
1357                                                KlassDepChange* changes) {
1358  ClassHierarchyWalker wf;
1359  wf.add_participant(k1);
1360  wf.add_participant(k2);
1361  return wf.find_witness_subtype(ctxk, changes);
1362}
1363
1364// Search ctxk for concrete implementations.  If there are klen or fewer,
1365// pack them into the given array and return the number.
1366// Otherwise, return -1, meaning the given array would overflow.
1367// (Note that a return of 0 means there are exactly no concrete subtypes.)
1368// In this search, if ctxk is concrete, it will be reported alone.
1369// For any type CC reported, no proper subtypes of CC will be reported.
1370int Dependencies::find_exclusive_concrete_subtypes(Klass* ctxk,
1371                                                   int klen,
1372                                                   Klass* karray[]) {
1373  ClassHierarchyWalker wf;
1374  wf.record_witnesses(klen);
1375  Klass* wit = wf.find_witness_subtype(ctxk);
1376  if (wit != NULL)  return -1;  // Too many witnesses.
1377  int num = wf.num_participants();
1378  assert(num <= klen, "oob");
1379  // Pack the result array with the good news.
1380  for (int i = 0; i < num; i++)
1381    karray[i] = wf.participant(i);
1382#ifndef PRODUCT
1383  // Make sure the dependency mechanism will pass this discovery:
1384  if (VerifyDependencies) {
1385    // Turn off dependency tracing while actually testing deps.
1386    FlagSetting fs(TraceDependencies, false);
1387    switch (Dependencies::is_concrete_klass(ctxk)? -1: num) {
1388    case -1: // ctxk was itself concrete
1389      guarantee(num == 1 && karray[0] == ctxk, "verify dep.");
1390      break;
1391    case 0:
1392      guarantee(NULL == (void *)check_abstract_with_no_concrete_subtype(ctxk),
1393                "verify dep.");
1394      break;
1395    case 1:
1396      guarantee(NULL == (void *)
1397                check_abstract_with_unique_concrete_subtype(ctxk, karray[0]),
1398                "verify dep.");
1399      break;
1400    case 2:
1401      guarantee(NULL == (void *)
1402                check_abstract_with_exclusive_concrete_subtypes(ctxk,
1403                                                                karray[0],
1404                                                                karray[1]),
1405                "verify dep.");
1406      break;
1407    default:
1408      ShouldNotReachHere();  // klen > 2 yet supported
1409    }
1410  }
1411#endif //PRODUCT
1412  return num;
1413}
1414
1415// If a class (or interface) has a unique concrete method uniqm, return NULL.
1416// Otherwise, return a class that contains an interfering method.
1417Klass* Dependencies::check_unique_concrete_method(Klass* ctxk, Method* uniqm,
1418                                                    KlassDepChange* changes) {
1419  // Here is a missing optimization:  If uniqm->is_final(),
1420  // we don't really need to search beneath it for overrides.
1421  // This is probably not important, since we don't use dependencies
1422  // to track final methods.  (They can't be "definalized".)
1423  ClassHierarchyWalker wf(uniqm->method_holder(), uniqm);
1424  return wf.find_witness_definer(ctxk, changes);
1425}
1426
1427// Find the set of all non-abstract methods under ctxk that match m.
1428// (The method m must be defined or inherited in ctxk.)
1429// Include m itself in the set, unless it is abstract.
1430// If this set has exactly one element, return that element.
1431Method* Dependencies::find_unique_concrete_method(Klass* ctxk, Method* m) {
1432  ClassHierarchyWalker wf(m);
1433  assert(wf.check_method_context(ctxk, m), "proper context");
1434  wf.record_witnesses(1);
1435  Klass* wit = wf.find_witness_definer(ctxk);
1436  if (wit != NULL)  return NULL;  // Too many witnesses.
1437  Method* fm = wf.found_method(0);  // Will be NULL if num_parts == 0.
1438  if (Dependencies::is_concrete_method(m)) {
1439    if (fm == NULL) {
1440      // It turns out that m was always the only implementation.
1441      fm = m;
1442    } else if (fm != m) {
1443      // Two conflicting implementations after all.
1444      // (This can happen if m is inherited into ctxk and fm overrides it.)
1445      return NULL;
1446    }
1447  }
1448#ifndef PRODUCT
1449  // Make sure the dependency mechanism will pass this discovery:
1450  if (VerifyDependencies && fm != NULL) {
1451    guarantee(NULL == (void *)check_unique_concrete_method(ctxk, fm),
1452              "verify dep.");
1453  }
1454#endif //PRODUCT
1455  return fm;
1456}
1457
1458Klass* Dependencies::check_exclusive_concrete_methods(Klass* ctxk,
1459                                                        Method* m1,
1460                                                        Method* m2,
1461                                                        KlassDepChange* changes) {
1462  ClassHierarchyWalker wf(m1);
1463  wf.add_participant(m1->method_holder());
1464  wf.add_participant(m2->method_holder());
1465  return wf.find_witness_definer(ctxk, changes);
1466}
1467
1468// Find the set of all non-abstract methods under ctxk that match m[0].
1469// (The method m[0] must be defined or inherited in ctxk.)
1470// Include m itself in the set, unless it is abstract.
1471// Fill the given array m[0..(mlen-1)] with this set, and return the length.
1472// (The length may be zero if no concrete methods are found anywhere.)
1473// If there are too many concrete methods to fit in marray, return -1.
1474int Dependencies::find_exclusive_concrete_methods(Klass* ctxk,
1475                                                  int mlen,
1476                                                  Method* marray[]) {
1477  Method* m0 = marray[0];
1478  ClassHierarchyWalker wf(m0);
1479  assert(wf.check_method_context(ctxk, m0), "proper context");
1480  wf.record_witnesses(mlen);
1481  bool participants_hide_witnesses = true;
1482  Klass* wit = wf.find_witness_definer(ctxk);
1483  if (wit != NULL)  return -1;  // Too many witnesses.
1484  int num = wf.num_participants();
1485  assert(num <= mlen, "oob");
1486  // Keep track of whether m is also part of the result set.
1487  int mfill = 0;
1488  assert(marray[mfill] == m0, "sanity");
1489  if (Dependencies::is_concrete_method(m0))
1490    mfill++;  // keep m0 as marray[0], the first result
1491  for (int i = 0; i < num; i++) {
1492    Method* fm = wf.found_method(i);
1493    if (fm == m0)  continue;  // Already put this guy in the list.
1494    if (mfill == mlen) {
1495      return -1;              // Oops.  Too many methods after all!
1496    }
1497    marray[mfill++] = fm;
1498  }
1499#ifndef PRODUCT
1500  // Make sure the dependency mechanism will pass this discovery:
1501  if (VerifyDependencies) {
1502    // Turn off dependency tracing while actually testing deps.
1503    FlagSetting fs(TraceDependencies, false);
1504    switch (mfill) {
1505    case 1:
1506      guarantee(NULL == (void *)check_unique_concrete_method(ctxk, marray[0]),
1507                "verify dep.");
1508      break;
1509    case 2:
1510      guarantee(NULL == (void *)
1511                check_exclusive_concrete_methods(ctxk, marray[0], marray[1]),
1512                "verify dep.");
1513      break;
1514    default:
1515      ShouldNotReachHere();  // mlen > 2 yet supported
1516    }
1517  }
1518#endif //PRODUCT
1519  return mfill;
1520}
1521
1522
1523Klass* Dependencies::check_has_no_finalizable_subclasses(Klass* ctxk, KlassDepChange* changes) {
1524  Klass* search_at = ctxk;
1525  if (changes != NULL)
1526    search_at = changes->new_type(); // just look at the new bit
1527  return find_finalizable_subclass(search_at);
1528}
1529
1530
1531Klass* Dependencies::check_call_site_target_value(oop call_site, oop method_handle, CallSiteDepChange* changes) {
1532  assert(call_site    ->is_a(SystemDictionary::CallSite_klass()),     "sanity");
1533  assert(method_handle->is_a(SystemDictionary::MethodHandle_klass()), "sanity");
1534  if (changes == NULL) {
1535    // Validate all CallSites
1536    if (java_lang_invoke_CallSite::target(call_site) != method_handle)
1537      return call_site->klass();  // assertion failed
1538  } else {
1539    // Validate the given CallSite
1540    if (call_site == changes->call_site() && java_lang_invoke_CallSite::target(call_site) != changes->method_handle()) {
1541      assert(method_handle != changes->method_handle(), "must be");
1542      return call_site->klass();  // assertion failed
1543    }
1544  }
1545  return NULL;  // assertion still valid
1546}
1547
1548
1549void Dependencies::DepStream::trace_and_log_witness(Klass* witness) {
1550  if (witness != NULL) {
1551    if (TraceDependencies) {
1552      print_dependency(witness, /*verbose=*/ true);
1553    }
1554    // The following is a no-op unless logging is enabled:
1555    log_dependency(witness);
1556  }
1557}
1558
1559
1560Klass* Dependencies::DepStream::check_klass_dependency(KlassDepChange* changes) {
1561  assert_locked_or_safepoint(Compile_lock);
1562  Dependencies::check_valid_dependency_type(type());
1563
1564  Klass* witness = NULL;
1565  switch (type()) {
1566  case evol_method:
1567    witness = check_evol_method(method_argument(0));
1568    break;
1569  case leaf_type:
1570    witness = check_leaf_type(context_type());
1571    break;
1572  case abstract_with_unique_concrete_subtype:
1573    witness = check_abstract_with_unique_concrete_subtype(context_type(), type_argument(1), changes);
1574    break;
1575  case abstract_with_no_concrete_subtype:
1576    witness = check_abstract_with_no_concrete_subtype(context_type(), changes);
1577    break;
1578  case concrete_with_no_concrete_subtype:
1579    witness = check_concrete_with_no_concrete_subtype(context_type(), changes);
1580    break;
1581  case unique_concrete_method:
1582    witness = check_unique_concrete_method(context_type(), method_argument(1), changes);
1583    break;
1584  case abstract_with_exclusive_concrete_subtypes_2:
1585    witness = check_abstract_with_exclusive_concrete_subtypes(context_type(), type_argument(1), type_argument(2), changes);
1586    break;
1587  case exclusive_concrete_methods_2:
1588    witness = check_exclusive_concrete_methods(context_type(), method_argument(1), method_argument(2), changes);
1589    break;
1590  case no_finalizable_subclasses:
1591    witness = check_has_no_finalizable_subclasses(context_type(), changes);
1592    break;
1593  default:
1594    witness = NULL;
1595    break;
1596  }
1597  trace_and_log_witness(witness);
1598  return witness;
1599}
1600
1601
1602Klass* Dependencies::DepStream::check_call_site_dependency(CallSiteDepChange* changes) {
1603  assert_locked_or_safepoint(Compile_lock);
1604  Dependencies::check_valid_dependency_type(type());
1605
1606  Klass* witness = NULL;
1607  switch (type()) {
1608  case call_site_target_value:
1609    witness = check_call_site_target_value(argument_oop(0), argument_oop(1), changes);
1610    break;
1611  default:
1612    witness = NULL;
1613    break;
1614  }
1615  trace_and_log_witness(witness);
1616  return witness;
1617}
1618
1619
1620Klass* Dependencies::DepStream::spot_check_dependency_at(DepChange& changes) {
1621  // Handle klass dependency
1622  if (changes.is_klass_change() && changes.as_klass_change()->involves_context(context_type()))
1623    return check_klass_dependency(changes.as_klass_change());
1624
1625  // Handle CallSite dependency
1626  if (changes.is_call_site_change())
1627    return check_call_site_dependency(changes.as_call_site_change());
1628
1629  // irrelevant dependency; skip it
1630  return NULL;
1631}
1632
1633
1634void DepChange::print() {
1635  int nsup = 0, nint = 0;
1636  for (ContextStream str(*this); str.next(); ) {
1637    Klass* k = str.klass();
1638    switch (str.change_type()) {
1639    case Change_new_type:
1640      tty->print_cr("  dependee = %s", InstanceKlass::cast(k)->external_name());
1641      break;
1642    case Change_new_sub:
1643      if (!WizardMode) {
1644        ++nsup;
1645      } else {
1646        tty->print_cr("  context super = %s", InstanceKlass::cast(k)->external_name());
1647      }
1648      break;
1649    case Change_new_impl:
1650      if (!WizardMode) {
1651        ++nint;
1652      } else {
1653        tty->print_cr("  context interface = %s", InstanceKlass::cast(k)->external_name());
1654      }
1655      break;
1656    }
1657  }
1658  if (nsup + nint != 0) {
1659    tty->print_cr("  context supers = %d, interfaces = %d", nsup, nint);
1660  }
1661}
1662
1663void DepChange::ContextStream::start() {
1664  Klass* new_type = _changes.is_klass_change() ? _changes.as_klass_change()->new_type() : (Klass*) NULL;
1665  _change_type = (new_type == NULL ? NO_CHANGE : Start_Klass);
1666  _klass = new_type;
1667  _ti_base = NULL;
1668  _ti_index = 0;
1669  _ti_limit = 0;
1670}
1671
1672bool DepChange::ContextStream::next() {
1673  switch (_change_type) {
1674  case Start_Klass:             // initial state; _klass is the new type
1675    _ti_base = InstanceKlass::cast(_klass)->transitive_interfaces();
1676    _ti_index = 0;
1677    _change_type = Change_new_type;
1678    return true;
1679  case Change_new_type:
1680    // fall through:
1681    _change_type = Change_new_sub;
1682  case Change_new_sub:
1683    // 6598190: brackets workaround Sun Studio C++ compiler bug 6629277
1684    {
1685      _klass = InstanceKlass::cast(_klass)->super();
1686      if (_klass != NULL) {
1687        return true;
1688      }
1689    }
1690    // else set up _ti_limit and fall through:
1691    _ti_limit = (_ti_base == NULL) ? 0 : _ti_base->length();
1692    _change_type = Change_new_impl;
1693  case Change_new_impl:
1694    if (_ti_index < _ti_limit) {
1695      _klass = _ti_base->at(_ti_index++);
1696      return true;
1697    }
1698    // fall through:
1699    _change_type = NO_CHANGE;  // iterator is exhausted
1700  case NO_CHANGE:
1701    break;
1702  default:
1703    ShouldNotReachHere();
1704  }
1705  return false;
1706}
1707
1708void KlassDepChange::initialize() {
1709  // entire transaction must be under this lock:
1710  assert_lock_strong(Compile_lock);
1711
1712  // Mark all dependee and all its superclasses
1713  // Mark transitive interfaces
1714  for (ContextStream str(*this); str.next(); ) {
1715    Klass* d = str.klass();
1716    assert(!InstanceKlass::cast(d)->is_marked_dependent(), "checking");
1717    InstanceKlass::cast(d)->set_is_marked_dependent(true);
1718  }
1719}
1720
1721KlassDepChange::~KlassDepChange() {
1722  // Unmark all dependee and all its superclasses
1723  // Unmark transitive interfaces
1724  for (ContextStream str(*this); str.next(); ) {
1725    Klass* d = str.klass();
1726    InstanceKlass::cast(d)->set_is_marked_dependent(false);
1727  }
1728}
1729
1730bool KlassDepChange::involves_context(Klass* k) {
1731  if (k == NULL || !k->oop_is_instance()) {
1732    return false;
1733  }
1734  InstanceKlass* ik = InstanceKlass::cast(k);
1735  bool is_contained = ik->is_marked_dependent();
1736  assert(is_contained == new_type()->is_subtype_of(k),
1737         "correct marking of potential context types");
1738  return is_contained;
1739}
1740
1741#ifndef PRODUCT
1742void Dependencies::print_statistics() {
1743  if (deps_find_witness_print != 0) {
1744    // Call one final time, to flush out the data.
1745    deps_find_witness_print = -1;
1746    count_find_witness_calls();
1747  }
1748}
1749#endif
1750