vframe.hpp revision 844:bd02caa94611
1/*
2 * Copyright 1997-2009 Sun Microsystems, Inc.  All Rights Reserved.
3 * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
4 *
5 * This code is free software; you can redistribute it and/or modify it
6 * under the terms of the GNU General Public License version 2 only, as
7 * published by the Free Software Foundation.
8 *
9 * This code is distributed in the hope that it will be useful, but WITHOUT
10 * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
11 * FITNESS FOR A PARTICULAR PURPOSE.  See the GNU General Public License
12 * version 2 for more details (a copy is included in the LICENSE file that
13 * accompanied this code).
14 *
15 * You should have received a copy of the GNU General Public License version
16 * 2 along with this work; if not, write to the Free Software Foundation,
17 * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
18 *
19 * Please contact Sun Microsystems, Inc., 4150 Network Circle, Santa Clara,
20 * CA 95054 USA or visit www.sun.com if you need additional information or
21 * have any questions.
22 *
23 */
24
25// vframes are virtual stack frames representing source level activations.
26// A single frame may hold several source level activations in the case of
27// optimized code. The debugging stored with the optimized code enables
28// us to unfold a frame as a stack of vframes.
29// A cVFrame represents an activation of a non-java method.
30
31// The vframe inheritance hierarchy:
32// - vframe
33//   - javaVFrame
34//     - interpretedVFrame
35//     - compiledVFrame     ; (used for both compiled Java methods and native stubs)
36//   - externalVFrame
37//     - entryVFrame        ; special frame created when calling Java from C
38
39// - BasicLock
40
41class vframe: public ResourceObj {
42 protected:
43  frame        _fr;      // Raw frame behind the virtual frame.
44  RegisterMap  _reg_map; // Register map for the raw frame (used to handle callee-saved registers).
45  JavaThread*  _thread;  // The thread owning the raw frame.
46
47  vframe(const frame* fr, const RegisterMap* reg_map, JavaThread* thread);
48  vframe(const frame* fr, JavaThread* thread);
49 public:
50  // Factory method for creating vframes
51  static vframe* new_vframe(const frame* f, const RegisterMap *reg_map, JavaThread* thread);
52
53  // Accessors
54  frame              fr()           const { return _fr;       }
55  CodeBlob*          cb()         const { return _fr.cb();  }
56  nmethod*           nm()         const {
57      assert( cb() != NULL && cb()->is_nmethod(), "usage");
58      return (nmethod*) cb();
59  }
60
61// ???? Does this need to be a copy?
62  frame*             frame_pointer() { return &_fr;       }
63  const RegisterMap* register_map() const { return &_reg_map; }
64  JavaThread*        thread()       const { return _thread;   }
65
66  // Returns the sender vframe
67  virtual vframe* sender() const;
68
69  // Returns the next javaVFrame on the stack (skipping all other kinds of frame)
70  javaVFrame *java_sender() const;
71
72  // Answers if the this is the top vframe in the frame, i.e., if the sender vframe
73  // is in the caller frame
74  virtual bool is_top() const { return true; }
75
76  // Returns top vframe within same frame (see is_top())
77  virtual vframe* top() const;
78
79  // Type testing operations
80  virtual bool is_entry_frame()       const { return false; }
81  virtual bool is_java_frame()        const { return false; }
82  virtual bool is_interpreted_frame() const { return false; }
83  virtual bool is_compiled_frame()    const { return false; }
84
85#ifndef PRODUCT
86  // printing operations
87  virtual void print_value() const;
88  virtual void print();
89#endif
90};
91
92
93class javaVFrame: public vframe {
94 public:
95  // JVM state
96  virtual methodOop                    method()         const = 0;
97  virtual int                          bci()            const = 0;
98  virtual StackValueCollection*        locals()         const = 0;
99  virtual StackValueCollection*        expressions()    const = 0;
100  // the order returned by monitors() is from oldest -> youngest#4418568
101  virtual GrowableArray<MonitorInfo*>* monitors()       const = 0;
102
103  // Debugging support via JVMTI.
104  // NOTE that this is not guaranteed to give correct results for compiled vframes.
105  // Deoptimize first if necessary.
106  virtual void set_locals(StackValueCollection* values) const = 0;
107
108  // Test operation
109  bool is_java_frame() const { return true; }
110
111 protected:
112  javaVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread) : vframe(fr, reg_map, thread) {}
113  javaVFrame(const frame* fr, JavaThread* thread) : vframe(fr, thread) {}
114
115 public:
116  // casting
117  static javaVFrame* cast(vframe* vf) {
118    assert(vf == NULL || vf->is_java_frame(), "must be java frame");
119    return (javaVFrame*) vf;
120  }
121
122  // Return an array of monitors locked by this frame in the youngest to oldest order
123  GrowableArray<MonitorInfo*>* locked_monitors();
124
125  // printing used during stack dumps
126  void print_lock_info_on(outputStream* st, int frame_count);
127  void print_lock_info(int frame_count) { print_lock_info_on(tty, frame_count); }
128
129#ifndef PRODUCT
130 public:
131  // printing operations
132  void print();
133  void print_value() const;
134  void print_activation(int index) const;
135
136  // verify operations
137  virtual void verify() const;
138
139  // Structural compare
140  bool structural_compare(javaVFrame* other);
141#endif
142  friend class vframe;
143};
144
145class interpretedVFrame: public javaVFrame {
146 public:
147  // JVM state
148  methodOop                    method()         const;
149  int                          bci()            const;
150  StackValueCollection*        locals()         const;
151  StackValueCollection*        expressions()    const;
152  GrowableArray<MonitorInfo*>* monitors()       const;
153
154  void set_locals(StackValueCollection* values) const;
155
156  // Test operation
157  bool is_interpreted_frame() const { return true; }
158
159 protected:
160  interpretedVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread) : javaVFrame(fr, reg_map, thread) {};
161
162 public:
163  // Accessors for Byte Code Pointer
164  u_char* bcp() const;
165  void set_bcp(u_char* bcp);
166
167  // casting
168  static interpretedVFrame* cast(vframe* vf) {
169    assert(vf == NULL || vf->is_interpreted_frame(), "must be interpreted frame");
170    return (interpretedVFrame*) vf;
171  }
172
173 private:
174  static const int bcp_offset;
175  intptr_t* locals_addr_at(int offset) const;
176
177  // returns where the parameters starts relative to the frame pointer
178  int start_of_parameters() const;
179
180#ifndef PRODUCT
181 public:
182  // verify operations
183  void verify() const;
184#endif
185  friend class vframe;
186};
187
188
189class externalVFrame: public vframe {
190 protected:
191  externalVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread) : vframe(fr, reg_map, thread) {}
192
193#ifndef PRODUCT
194 public:
195  // printing operations
196  void print_value() const;
197  void print();
198#endif
199  friend class vframe;
200};
201
202class entryVFrame: public externalVFrame {
203 public:
204  bool is_entry_frame() const { return true; }
205
206 protected:
207  entryVFrame(const frame* fr, const RegisterMap* reg_map, JavaThread* thread);
208
209 public:
210  // casting
211  static entryVFrame* cast(vframe* vf) {
212    assert(vf == NULL || vf->is_entry_frame(), "must be entry frame");
213    return (entryVFrame*) vf;
214  }
215
216#ifndef PRODUCT
217 public:
218  // printing
219  void print_value() const;
220  void print();
221#endif
222  friend class vframe;
223};
224
225
226// A MonitorInfo is a ResourceObject that describes a the pair:
227// 1) the owner of the monitor
228// 2) the monitor lock
229class MonitorInfo : public ResourceObj {
230 private:
231  oop        _owner; // the object owning the monitor
232  BasicLock* _lock;
233  oop        _owner_klass; // klass if owner was scalar replaced
234  bool       _eliminated;
235  bool       _owner_is_scalar_replaced;
236 public:
237  // Constructor
238  MonitorInfo(oop owner, BasicLock* lock, bool eliminated, bool owner_is_scalar_replaced) {
239    if (!owner_is_scalar_replaced) {
240      _owner = owner;
241      _owner_klass = NULL;
242    } else {
243      assert(eliminated, "monitor should be eliminated for scalar replaced object");
244      _owner = NULL;
245      _owner_klass = owner;
246    }
247    _lock  = lock;
248    _eliminated = eliminated;
249    _owner_is_scalar_replaced = owner_is_scalar_replaced;
250  }
251  // Accessors
252  oop        owner() const {
253    assert(!_owner_is_scalar_replaced, "should not be called for scalar replaced object");
254    return _owner;
255  }
256  klassOop   owner_klass() const {
257    assert(_owner_is_scalar_replaced, "should not be called for not scalar replaced object");
258    return (klassOop)_owner_klass;
259  }
260  BasicLock* lock()  const { return _lock;  }
261  bool eliminated()  const { return _eliminated; }
262  bool owner_is_scalar_replaced()  const { return _owner_is_scalar_replaced; }
263};
264
265class vframeStreamCommon : StackObj {
266 protected:
267  // common
268  frame        _frame;
269  JavaThread*  _thread;
270  RegisterMap  _reg_map;
271  enum { interpreted_mode, compiled_mode, at_end_mode } _mode;
272
273  int _sender_decode_offset;
274
275  // Cached information
276  methodOop _method;
277  int       _bci;
278
279  // Should VM activations be ignored or not
280  bool _stop_at_java_call_stub;
281
282  bool fill_in_compiled_inlined_sender();
283  void fill_from_compiled_frame(int decode_offset);
284  void fill_from_compiled_native_frame();
285
286  void found_bad_method_frame();
287
288  void fill_from_interpreter_frame();
289  bool fill_from_frame();
290
291  // Helper routine for security_get_caller_frame
292  void skip_prefixed_method_and_wrappers();
293
294 public:
295  // Constructor
296  vframeStreamCommon(JavaThread* thread) : _reg_map(thread, false) {
297    _thread = thread;
298  }
299
300  // Accessors
301  methodOop method() const { return _method; }
302  int bci() const { return _bci; }
303  intptr_t* frame_id() const { return _frame.id(); }
304  address frame_pc() const { return _frame.pc(); }
305
306  CodeBlob*          cb()         const { return _frame.cb();  }
307  nmethod*           nm()         const {
308      assert( cb() != NULL && cb()->is_nmethod(), "usage");
309      return (nmethod*) cb();
310  }
311
312  // Frame type
313  bool is_interpreted_frame() const { return _frame.is_interpreted_frame(); }
314  bool is_entry_frame() const       { return _frame.is_entry_frame(); }
315
316  // Iteration
317  void next() {
318    // handle frames with inlining
319    if (_mode == compiled_mode    && fill_in_compiled_inlined_sender()) return;
320
321    // handle general case
322    do {
323      _frame = _frame.sender(&_reg_map);
324    } while (!fill_from_frame());
325  }
326
327  bool at_end() const { return _mode == at_end_mode; }
328
329  // Implements security traversal. Skips depth no. of frame including
330  // special security frames and prefixed native methods
331  void security_get_caller_frame(int depth);
332
333  // Helper routine for JVM_LatestUserDefinedLoader -- needed for 1.4
334  // reflection implementation
335  void skip_reflection_related_frames();
336};
337
338class vframeStream : public vframeStreamCommon {
339 public:
340  // Constructors
341  vframeStream(JavaThread* thread, bool stop_at_java_call_stub = false)
342    : vframeStreamCommon(thread) {
343    _stop_at_java_call_stub = stop_at_java_call_stub;
344
345    if (!thread->has_last_Java_frame()) {
346      _mode = at_end_mode;
347      return;
348    }
349
350    _frame = _thread->last_frame();
351    while (!fill_from_frame()) {
352      _frame = _frame.sender(&_reg_map);
353    }
354  }
355
356  // top_frame may not be at safepoint, start with sender
357  vframeStream(JavaThread* thread, frame top_frame, bool stop_at_java_call_stub = false);
358};
359
360
361inline bool vframeStreamCommon::fill_in_compiled_inlined_sender() {
362  if (_sender_decode_offset == DebugInformationRecorder::serialized_null) {
363    return false;
364  }
365  fill_from_compiled_frame(_sender_decode_offset);
366  return true;
367}
368
369
370inline void vframeStreamCommon::fill_from_compiled_frame(int decode_offset) {
371  _mode = compiled_mode;
372
373  // Range check to detect ridiculous offsets.
374  if (decode_offset == DebugInformationRecorder::serialized_null ||
375      decode_offset < 0 ||
376      decode_offset >= nm()->scopes_data_size()) {
377    // 6379830 AsyncGetCallTrace sometimes feeds us wild frames.
378    // If we attempt to read nmethod::scopes_data at serialized_null (== 0),
379    // or if we read some at other crazy offset,
380    // we will decode garbage and make wild references into the heap,
381    // leading to crashes in product mode.
382    // (This isn't airtight, of course, since there are internal
383    // offsets which are also crazy.)
384#ifdef ASSERT
385    if (WizardMode) {
386      tty->print_cr("Error in fill_from_frame: pc_desc for "
387                    INTPTR_FORMAT " not found or invalid at %d",
388                    _frame.pc(), decode_offset);
389      nm()->print();
390      nm()->method()->print_codes();
391      nm()->print_code();
392      nm()->print_pcs();
393    }
394#endif
395    // Provide a cheap fallback in product mode.  (See comment above.)
396    found_bad_method_frame();
397    fill_from_compiled_native_frame();
398    return;
399  }
400
401  // Decode first part of scopeDesc
402  DebugInfoReadStream buffer(nm(), decode_offset);
403  _sender_decode_offset = buffer.read_int();
404  _method               = methodOop(buffer.read_oop());
405  _bci                  = buffer.read_bci();
406
407  assert(_method->is_method(), "checking type of decoded method");
408}
409
410// The native frames are handled specially. We do not rely on ScopeDesc info
411// since the pc might not be exact due to the _last_native_pc trick.
412inline void vframeStreamCommon::fill_from_compiled_native_frame() {
413  _mode = compiled_mode;
414  _sender_decode_offset = DebugInformationRecorder::serialized_null;
415  _method = nm()->method();
416  _bci = 0;
417}
418
419inline bool vframeStreamCommon::fill_from_frame() {
420  // Interpreted frame
421  if (_frame.is_interpreted_frame()) {
422    fill_from_interpreter_frame();
423    return true;
424  }
425
426  // Compiled frame
427
428  if (cb() != NULL && cb()->is_nmethod()) {
429    if (nm()->is_native_method()) {
430      // Do not rely on scopeDesc since the pc might be unprecise due to the _last_native_pc trick.
431      fill_from_compiled_native_frame();
432    } else {
433      PcDesc* pc_desc = nm()->pc_desc_at(_frame.pc());
434      int decode_offset;
435      if (pc_desc == NULL) {
436        // Should not happen, but let fill_from_compiled_frame handle it.
437
438        // If we are trying to walk the stack of a thread that is not
439        // at a safepoint (like AsyncGetCallTrace would do) then this is an
440        // acceptable result. [ This is assuming that safe_for_sender
441        // is so bullet proof that we can trust the frames it produced. ]
442        //
443        // So if we see that the thread is not safepoint safe
444        // then simply produce the method and a bci of zero
445        // and skip the possibility of decoding any inlining that
446        // may be present. That is far better than simply stopping (or
447        // asserting. If however the thread is safepoint safe this
448        // is the sign of a compiler bug  and we'll let
449        // fill_from_compiled_frame handle it.
450
451
452        JavaThreadState state = _thread->thread_state();
453
454        // in_Java should be good enough to test safepoint safety
455        // if state were say in_Java_trans then we'd expect that
456        // the pc would have already been slightly adjusted to
457        // one that would produce a pcDesc since the trans state
458        // would be one that might in fact anticipate a safepoint
459
460        if (state == _thread_in_Java ) {
461          // This will get a method a zero bci and no inlining.
462          // Might be nice to have a unique bci to signify this
463          // particular case but for now zero will do.
464
465          fill_from_compiled_native_frame();
466
467          // There is something to be said for setting the mode to
468          // at_end_mode to prevent trying to walk further up the
469          // stack. There is evidence that if we walk any further
470          // that we could produce a bad stack chain. However until
471          // we see evidence that allowing this causes us to find
472          // frames bad enough to cause segv's or assertion failures
473          // we don't do it as while we may get a bad call chain the
474          // probability is much higher (several magnitudes) that we
475          // get good data.
476
477          return true;
478        }
479        decode_offset = DebugInformationRecorder::serialized_null;
480      } else {
481        decode_offset = pc_desc->scope_decode_offset();
482      }
483      fill_from_compiled_frame(decode_offset);
484    }
485    return true;
486  }
487
488  // End of stack?
489  if (_frame.is_first_frame() || (_stop_at_java_call_stub && _frame.is_entry_frame())) {
490    _mode = at_end_mode;
491    return true;
492  }
493
494  return false;
495}
496
497
498inline void vframeStreamCommon::fill_from_interpreter_frame() {
499  methodOop method = _frame.interpreter_frame_method();
500  intptr_t  bcx    = _frame.interpreter_frame_bcx();
501  int       bci    = method->validate_bci_from_bcx(bcx);
502  // 6379830 AsyncGetCallTrace sometimes feeds us wild frames.
503  if (bci < 0) {
504    found_bad_method_frame();
505    bci = 0;  // pretend it's on the point of entering
506  }
507  _mode   = interpreted_mode;
508  _method = method;
509  _bci    = bci;
510}
511