Lines Matching refs:th

15 static inline VALUE vm_yield_with_cref(rb_thread_t *th, int argc, const VALUE *argv, const NODE *cref);
16 static inline VALUE vm_yield(rb_thread_t *th, int argc, const VALUE *argv);
17 static NODE *vm_cref_push(rb_thread_t *th, VALUE klass, int noex, rb_block_t *blockptr);
18 static VALUE vm_exec(rb_thread_t *th);
19 static void vm_set_eval_stack(rb_thread_t * th, VALUE iseqval, const NODE *cref, rb_block_t *base_block);
20 static int vm_collect_local_variables_in_heap(rb_thread_t *th, VALUE *dfp, VALUE ary);
23 VALUE vm_backtrace_str_ary(rb_thread_t *th, int lev, int n);
34 static VALUE vm_call0_body(rb_thread_t* th, rb_call_info_t *ci, const VALUE *argv);
37 vm_call0(rb_thread_t* th, VALUE recv, ID id, int argc, const VALUE *argv,
49 return vm_call0_body(th, ci, argv);
54 vm_call0_cfunc(rb_thread_t* th, rb_call_info_t *ci, const VALUE *argv)
58 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(th, ci->defined_class, ci->mid);
59 EXEC_EVENT_HOOK(th, RUBY_EVENT_C_CALL, ci->recv, ci->mid, ci->defined_class, Qnil);
61 rb_control_frame_t *reg_cfp = th->cfp;
68 th->passed_ci = ci;
73 if (reg_cfp == th->cfp) {
74 if (UNLIKELY(th->passed_ci != ci)) {
75 rb_bug("vm_call0_cfunc: passed_ci error (ci: %p, passed_ci: %p)", ci, th->passed_ci);
77 th->passed_ci = 0;
80 if (reg_cfp != th->cfp + 1) {
84 vm_pop_frame(th);
87 EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, ci->recv, ci->mid, ci->defined_class, val);
88 RUBY_DTRACE_CMETHOD_RETURN_HOOK(th, ci->defined_class, ci->mid);
94 vm_call0_cfunc_with_frame(rb_thread_t* th, rb_call_info_t *ci, const VALUE *argv)
106 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(th, defined_class, mid);
107 EXEC_EVENT_HOOK(th, RUBY_EVENT_C_CALL, recv, mid, defined_class, Qnil);
109 rb_control_frame_t *reg_cfp = th->cfp;
111 vm_push_frame(th, 0, VM_FRAME_MAGIC_CFUNC, recv, defined_class,
119 if (UNLIKELY(reg_cfp != th->cfp + 1)) {
123 vm_pop_frame(th);
125 EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, recv, mid, defined_class, val);
126 RUBY_DTRACE_CMETHOD_RETURN_HOOK(th, defined_class, mid);
132 vm_call0_cfunc(rb_thread_t* th, rb_call_info_t *ci, const VALUE *argv)
134 return vm_call0_cfunc_with_frame(th, ci, argv);
140 vm_call0_body(rb_thread_t* th, rb_call_info_t *ci, const VALUE *argv)
146 if (th->passed_block) {
147 ci->blockptr = (rb_block_t *)th->passed_block;
148 th->passed_block = 0;
158 rb_control_frame_t *reg_cfp = th->cfp;
168 vm_call_iseq_setup(th, reg_cfp, ci);
169 th->cfp->flag |= VM_FRAME_FLAG_FINISH;
170 return vm_exec(th); /* CHECK_INTS in this function */
174 ret = vm_call0_cfunc(th, ci, argv);
185 ret = vm_call_bmethod_body(th, ci, argv);
202 RUBY_VM_CHECK_INTS(th);
212 th->passed_block = ci->blockptr;
224 ret = rb_vm_invoke_proc(th, proc, ci->argc, argv, ci->blockptr);
238 RUBY_VM_CHECK_INTS(th);
243 rb_vm_call(rb_thread_t *th, VALUE recv, VALUE id, int argc, const VALUE *argv,
246 return vm_call0(th, recv, id, argc, argv, me, defined_class);
250 vm_call_super(rb_thread_t *th, int argc, const VALUE *argv)
252 VALUE recv = th->cfp->self;
256 rb_control_frame_t *cfp = th->cfp;
269 return vm_call0(th, recv, id, argc, argv, me, klass);
282 rb_thread_t *th = GET_THREAD();
284 if (!rb_thread_raised_p(th, RAISED_STACKOVERFLOW) && ruby_stack_check()) {
285 rb_thread_raised_set(th, RAISED_STACKOVERFLOW);
292 static inline int rb_method_call_status(rb_thread_t *th, const rb_method_entry_t *me, call_type scope, VALUE self);
317 rb_thread_t *th = GET_THREAD();
318 int call_status = rb_method_call_status(th, me, scope, self);
324 return vm_call0(th, recv, mid, argc, argv, me, defined_class);
355 check_funcall_respond_to(rb_thread_t *th, VALUE klass, VALUE recv, ID mid)
361 const rb_block_t *passed_block = th->passed_block;
372 result = vm_call0(th, recv, idRespond_to, arity, args, me, defined_class);
373 th->passed_block = passed_block;
382 check_funcall_callable(rb_thread_t *th, const rb_method_entry_t *me)
384 return rb_method_call_status(th, me, CALL_FCALL, th->cfp->self) == NOEX_OK;
388 check_funcall_missing(rb_thread_t *th, VALUE klass, VALUE recv, ID mid, int argc, VALUE *argv)
396 th->method_missing_reason = 0;
412 rb_thread_t *th = GET_THREAD();
415 if (!check_funcall_respond_to(th, klass, recv, mid))
419 if (check_funcall_callable(th, me) != NOEX_OK) {
420 return check_funcall_missing(th, klass, recv, mid, argc, argv);
423 return vm_call0(th, recv, mid, argc, argv, me, defined_class);
432 rb_thread_t *th = GET_THREAD();
435 if (!check_funcall_respond_to(th, klass, recv, mid))
439 if (check_funcall_callable(th, me) != NOEX_OK) {
441 return check_funcall_missing(th, klass, recv, mid, argc, argv);
445 return vm_call0(th, recv, mid, argc, argv, me, defined_class);
526 rb_method_call_status(rb_thread_t *th, const rb_method_entry_t *me, call_type scope, VALUE self)
559 if (NOEX_SAFE(noex) > th->safe_level) {
583 rb_thread_t *th = GET_THREAD();
584 return rb_call0(recv, mid, argc, argv, scope, th->cfp->self);
587 NORETURN(static void raise_method_missing(rb_thread_t *th, int argc, const VALUE *argv,
626 rb_thread_t *th = GET_THREAD();
627 raise_method_missing(th, argc, argv, obj, th->method_missing_reason);
658 raise_method_missing(rb_thread_t *th, int argc, const VALUE *argv, VALUE obj,
687 th->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(th->cfp);
697 rb_thread_t *th = GET_THREAD();
698 const rb_block_t *blockptr = th->passed_block;
700 th->method_missing_reason = call_status;
701 th->passed_block = 0;
704 raise_method_missing(th, argc, argv, obj, call_status | NOEX_MISSING);
719 raise_method_missing(th, argc+1, nargv, obj, call_status | NOEX_MISSING);
721 th->passed_block = blockptr;
728 rb_raise_method_missing(rb_thread_t *th, int argc, VALUE *argv,
731 th->passed_block = 0;
732 raise_method_missing(th, argc, argv, obj, call_status | NOEX_MISSING);
836 rb_thread_t *th = GET_THREAD();
843 th->passed_block = block;
855 rb_thread_t *th = GET_THREAD();
861 self = RUBY_VM_PREVIOUS_CONTROL_FRAME(th->cfp)->self;
879 PASS_PASSED_BLOCK_TH(th);
1037 rb_thread_t *th = GET_THREAD();
1038 rb_control_frame_t *volatile cfp = th->cfp;
1041 TH_PUSH_TAG(th);
1048 blockptr = RUBY_VM_GET_BLOCK_PTR_IN_CFP(th->cfp);
1053 blockptr = VM_CF_BLOCK_PTR(th->cfp);
1055 th->passed_block = blockptr;
1060 VALUE err = th->errinfo;
1067 th->state = 0;
1068 th->errinfo = Qnil;
1072 while (th->cfp != cfp) {
1074 printf("skipped frame: %s\n", vm_frametype_name(th->cfp));
1076 if (UNLIKELY(VM_FRAME_TYPE(th->cfp) == VM_FRAME_MAGIC_CFUNC)) {
1077 const rb_method_entry_t *me = th->cfp->me;
1078 EXEC_EVENT_HOOK(th, RUBY_EVENT_C_RETURN, th->cfp->self, me->called_id, me->klass, Qnil);
1079 RUBY_DTRACE_CMETHOD_RETURN_HOOK(th, me->klass, me->called_id);
1082 th->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(th->cfp);
1095 th->state = 0;
1096 th->errinfo = Qnil;
1097 th->cfp = cfp;
1108 TH_JUMP_TAG(th, state);
1176 rb_thread_t *th = GET_THREAD();
1187 parse_in_eval = th->parse_in_eval;
1188 mild_compile_error = th->mild_compile_error;
1189 TH_PUSH_TAG(th);
1220 rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(th, th->cfp);
1238 th->parse_in_eval++;
1239 th->mild_compile_error++;
1241 th->mild_compile_error--;
1242 th->parse_in_eval--;
1244 vm_set_eval_stack(th, iseqval, cref, base_block);
1254 bind->env = rb_vm_make_env_object(th, th->cfp);
1258 CHECK_VM_STACK_OVERFLOW(th->cfp, iseq->stack_max);
1259 result = vm_exec(th);
1262 th->mild_compile_error = mild_compile_error;
1263 th->parse_in_eval = parse_in_eval;
1267 VALUE errinfo = th->errinfo;
1276 (bt2 = vm_backtrace_str_ary(th, 0, 0), RARRAY_LEN(bt2) > 0)) {
1429 rb_thread_t *th = GET_THREAD();
1430 VALUE self = th->top_self;
1431 VALUE wrapper = th->top_wrapper;
1434 th->top_wrapper = rb_module_new();
1435 th->top_self = rb_obj_clone(rb_vm_top_self());
1436 rb_extend_object(th->top_self, th->top_wrapper);
1440 th->top_self = self;
1441 th->top_wrapper = wrapper;
1495 rb_thread_t *th = GET_THREAD();
1499 if ((blockptr = VM_CF_BLOCK_PTR(th->cfp)) != 0) {
1502 VM_CF_LEP(th->cfp)[0] = VM_ENVVAL_BLOCK_PTR(&block);
1504 cref = vm_cref_push(th, under, NOEX_PUBLIC, blockptr);
1508 return vm_yield_with_cref(th, 1, &self, cref);
1511 return vm_yield_with_cref(th, RARRAY_LENINT(values), RARRAY_PTR(values), cref);
1518 rb_thread_t *th = GET_THREAD();
1522 if ((blockptr = VM_CF_BLOCK_PTR(th->cfp)) != 0) {
1525 VM_CF_LEP(th->cfp)[0] = VM_ENVVAL_BLOCK_PTR(&block);
1527 cref = vm_cref_push(th, refinement, NOEX_PUBLIC, blockptr);
1531 return vm_yield_with_cref(th, 0, NULL, cref);
1732 rb_thread_t *th = GET_THREAD();
1733 struct rb_vm_tag *tt = th->tag;
1747 th->errinfo = NEW_THROW_OBJECT(tag, 0, TAG_THROW);
1827 rb_thread_t *th = GET_THREAD();
1828 rb_control_frame_t *saved_cfp = th->cfp;
1830 TH_PUSH_TAG(th);
1832 th->tag->tag = tag;
1838 else if (state == TAG_THROW && RNODE(th->errinfo)->u1.value == tag) {
1839 th->cfp = saved_cfp;
1840 val = th->tag->retval;
1841 th->errinfo = Qnil;
1868 rb_thread_t *th = GET_THREAD();
1870 vm_get_ruby_level_caller_cfp(th, RUBY_VM_PREVIOUS_CONTROL_FRAME(th->cfp));
1890 if (vm_collect_local_variables_in_heap(th, ep, ary)) {
1931 rb_thread_t *th = GET_THREAD();
1932 rb_control_frame_t *cfp = th->cfp;
1933 cfp = vm_get_ruby_level_caller_cfp(th, RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
1946 rb_thread_t *th = GET_THREAD();
1947 rb_control_frame_t *cfp = th->cfp;
1948 cfp = vm_get_ruby_level_caller_cfp(th, RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));