24#include "internal/variable.h"
30#include "insns_info.inc"
71NORETURN(
static void vm_stackoverflow(
void));
79 ec_stack_overflow(GET_EC(),
TRUE);
87 rb_bug(
"system stack overflow during GC. Faulty native extension?");
95 ec_stack_overflow(ec,
TRUE);
97 ec_stack_overflow(ec,
FALSE);
106#if VM_CHECK_MODE >= 2
108 switch (RB_BUILTIN_TYPE(
klass)) {
147vm_check_frame_detail(
VALUE type,
int req_block,
int req_me,
int req_cref,
VALUE specval,
VALUE cref_or_me,
int is_cframe,
const rb_iseq_t *iseq)
152 if (RB_TYPE_P(cref_or_me,
T_IMEMO)) {
160 rb_bug(
"vm_push_frame: specval (%p) should be a block_ptr on %x frame", (
void *)specval, magic);
163 rb_bug(
"vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (
void *)specval, magic);
168 rb_bug(
"vm_push_frame: (%s) should be method entry on %x frame",
rb_obj_info(cref_or_me), magic);
172 if (req_cref && cref_or_me_type !=
imemo_cref) {
173 rb_bug(
"vm_push_frame: (%s) should be CREF on %x frame",
rb_obj_info(cref_or_me), magic);
181 rb_bug(
"vm_push_frame: (%s) should be false or cref on %x frame",
rb_obj_info(cref_or_me), magic);
190 if (!callable_method_entry_p(me)) {
191 rb_bug(
"vm_push_frame: ment (%s) should be callable on %x frame.",
rb_obj_info(cref_or_me), magic);
197 RUBY_VM_NORMAL_ISEQ_P(iseq) );
200 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
213#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
215 vm_check_frame_detail(type, req_block, req_me, req_cref, \
216 specval, cref_or_me, is_cframe, iseq); \
218 switch (given_magic) {
230 rb_bug(
"vm_push_frame: unknown type (%x)", (
unsigned int)given_magic);
235static VALUE vm_stack_canary;
236static bool vm_stack_canary_was_born =
false;
245 if (!
LIKELY(vm_stack_canary_was_born)) {
255 else if (
LIKELY(sp[0] != vm_stack_canary)) {
266 const ptrdiff_t pos =
GET_PC() - encoded;
267 const enum ruby_vminsn_type insn = (
enum ruby_vminsn_type)orig[pos];
268 const char *
name = insn_name(insn);
279 "We are killing the stack canary set by %s, "
281 "watch out the C stack trace.\n"
283 name, stri, pos, strd);
287#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
290#define vm_check_canary(ec, sp)
291#define vm_check_frame(a, b, c, d)
305 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
306 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
307 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
341#define vm_push_frame_debug_counter_inc(ec, cfp, t)
372 for (
int i=0; i < local_size; i++) {
421 vm_pop_frame(ec, ec->
cfp, ec->
cfp->
ep);
426rb_arity_error_new(
int argc,
int min,
int max)
430 err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d)",
argc, min);
433 err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d+)",
argc, min);
436 err_mess =
rb_sprintf(
"wrong number of arguments (given %d, expected %d..%d)",
argc, min,
max);
452vm_env_write_slowpath(
const VALUE *ep,
int index,
VALUE v)
456 VM_FORCE_WRITE(&ep[index], v);
462vm_env_write(
const VALUE *ep,
int index,
VALUE v)
466 VM_STACK_ENV_WRITE(ep, index, v);
469 vm_env_write_slowpath(ep, index, v);
480 switch (vm_block_handler_type(block_handler)) {
483 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler),
rb_cProc);
487 return VM_BH_TO_PROC(block_handler);
498vm_svar_valid_p(
VALUE svar)
548 const struct vm_svar *svar = lep_svar(ec, lep);
554 return svar->lastline;
556 return svar->backref;
558 const VALUE ary = svar->others;
579 struct vm_svar *svar = lep_svar(ec, lep);
582 lep_svar_write(ec, lep, svar = svar_new((
VALUE)svar));
593 VALUE ary = svar->others;
609 val = lep_svar_get(ec, lep,
key);
629 rb_bug(
"unexpected back-ref");
641check_method_entry(
VALUE obj,
int can_be_svar)
660 rb_bug(
"check_method_entry: svar should not be there:");
672 while (!VM_ENV_LOCAL_P(ep)) {
674 ep = VM_ENV_PREV_EP(ep);
683 switch (me->
def->type) {
694 switch (me->
def->type) {
702#if VM_CHECK_MODE == 0
706check_cref(
VALUE obj,
int can_be_svar)
725 rb_bug(
"check_method_entry: svar should not be there:");
732vm_env_cref(
const VALUE *ep)
736 while (!VM_ENV_LOCAL_P(ep)) {
738 ep = VM_ENV_PREV_EP(ep);
745is_cref(
const VALUE v,
int can_be_svar)
761vm_env_cref_by_cref(
const VALUE *ep)
763 while (!VM_ENV_LOCAL_P(ep)) {
765 ep = VM_ENV_PREV_EP(ep);
771cref_replace_with_duplicated_cref_each_frame(
const VALUE *vptr,
int can_be_svar,
VALUE parent)
773 const VALUE v = *vptr;
780 new_cref = vm_cref_dup(cref);
785 VM_FORCE_WRITE(vptr, (
VALUE)new_cref);
794 rb_bug(
"cref_replace_with_duplicated_cref_each_frame: unreachable");
803vm_cref_replace_with_duplicated_cref(
const VALUE *ep)
805 if (vm_env_cref_by_cref(ep)) {
809 while (!VM_ENV_LOCAL_P(ep)) {
810 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
814 ep = VM_ENV_PREV_EP(ep);
816 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) :
Qfalse;
820 rb_bug(
"vm_cref_dup: unreachable");
825vm_get_cref(
const VALUE *ep)
833 rb_bug(
"vm_get_cref: unreachable");
845 return vm_get_cref(cfp->
ep);
849vm_get_const_key_cref(
const VALUE *ep)
859 cref = CREF_NEXT(cref);
872 if (CREF_CLASS(cref) == old_klass) {
874 *new_cref_ptr = new_cref;
878 cref = CREF_NEXT(cref);
879 *new_cref_ptr = new_cref;
882 *new_cref_ptr =
NULL;
891 prev_cref = vm_env_cref(ep);
897 prev_cref = vm_env_cref(cfp->
ep);
905vm_get_cbase(
const VALUE *ep)
911 if ((klass = CREF_CLASS(cref)) != 0) {
914 cref = CREF_NEXT(cref);
921vm_get_const_base(
const VALUE *ep)
927 if (!CREF_PUSHED_BY_EVAL(cref) &&
928 (klass = CREF_CLASS(cref)) != 0) {
931 cref = CREF_NEXT(cref);
938vm_check_if_namespace(
VALUE klass)
946vm_ensure_not_refinement_module(
VALUE self)
949 rb_warn(
"not defined at the refinement, but at the outer class/module");
965 if (orig_klass ==
Qnil && allow_nil) {
971 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
972 root_cref = CREF_NEXT(root_cref);
975 while (cref && CREF_NEXT(cref)) {
976 if (CREF_PUSHED_BY_EVAL(cref)) {
980 klass = CREF_CLASS(cref);
982 cref = CREF_NEXT(cref);
992 if (am == klass)
break;
994 if (is_defined)
return 1;
997 goto search_continue;
1004 if (
UNLIKELY(!rb_ractor_main_p())) {
1005 if (!rb_ractor_shareable_p(val)) {
1018 if (root_cref && !
NIL_P(CREF_CLASS(root_cref))) {
1019 klass = vm_get_iclass(ec->
cfp, CREF_CLASS(root_cref));
1033 vm_check_if_namespace(orig_klass);
1049 rb_bug(
"vm_get_cvar_base: no cref");
1052 while (CREF_NEXT(cref) &&
1054 CREF_PUSHED_BY_EVAL(cref))) {
1055 cref = CREF_NEXT(cref);
1057 if (top_level_raise && !CREF_NEXT(cref)) {
1061 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1070vm_search_const_defined_class(
const VALUE cbase,
ID id)
1088 if (iv_index_tbl ==
NULL)
return false;
1096 return found ? true :
false;
1110 vm_cc_attr_index_set(cc, (
int)ent->
index + 1);
1124 else if (
LIKELY(is_attr ?
1133 LIKELY(index < ROBJECT_NUMIV(obj))) {
1134 val = ROBJECT_IVPTR(obj)[index];
1136 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) :
true);
1150 if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl,
id, &ent)) {
1151 fill_ivar_cache(iseq, ic, cc, is_attr, ent);
1154 if (ent->
index < ROBJECT_NUMIV(obj)) {
1155 val = ROBJECT_IVPTR(obj)[ent->
index];
1157 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) :
true);
1164 if (iv_index_tbl && iv_index_tbl_lookup(iv_index_tbl,
id, &ent)) {
1165 fill_ivar_cache(iseq, ic, cc, is_attr, ent);
1208 if (iv_index_tbl_lookup(iv_index_tbl,
id, &ent)) {
1213 else if (ent->
index >= INT_MAX) {
1217 vm_cc_attr_index_set(cc, (
int)(ent->
index + 1));
1240 return vm_setivar_slowpath(obj,
id, val, iseq, ic,
NULL,
false);
1246 return vm_setivar_slowpath(obj,
id, val,
NULL,
NULL, cc,
true);
1254 LIKELY(!RB_OBJ_FROZEN_RAW(obj))) {
1277 return vm_setivar_slowpath_attr(obj,
id, val, cc);
1280 return vm_setivar_slowpath_ivar(obj,
id, val, iseq, ic);
1287 return vm_getivar(obj,
id, iseq, ic,
NULL,
FALSE);
1293 vm_setivar(obj,
id, val, iseq, ic, 0, 0);
1318 const int flag,
const VALUE throwobj)
1330 escape_cfp = reg_cfp;
1332 while (base_iseq->
body->
type != ISEQ_TYPE_BLOCK) {
1335 ep = escape_cfp->
ep;
1336 base_iseq = escape_cfp->
iseq;
1339 ep = VM_ENV_PREV_EP(ep);
1341 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1346 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1352 ep = VM_ENV_PREV_EP(ep);
1354 while (escape_cfp < eocfp) {
1355 if (escape_cfp->
ep == ep) {
1362 for (i=0; i < ct->size; i++) {
1367 entry->
iseq == base_iseq &&
1368 entry->
start < epc && entry->
end >= epc) {
1369 if (entry->
cont == epc) {
1389 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1393 const VALUE *target_lep = VM_EP_LEP(current_ep);
1394 int in_class_frame = 0;
1396 escape_cfp = reg_cfp;
1398 while (escape_cfp < eocfp) {
1399 const VALUE *lep = VM_CF_LEP(escape_cfp);
1405 if (lep == target_lep &&
1406 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1412 if (lep == target_lep) {
1413 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1415 if (in_class_frame) {
1420 const VALUE *tep = current_ep;
1422 while (target_lep != tep) {
1423 if (escape_cfp->
ep == tep) {
1427 tep = VM_ENV_PREV_EP(tep);
1431 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1434 case ISEQ_TYPE_MAIN:
1436 if (in_class_frame)
goto unexpected_return;
1440 case ISEQ_TYPE_EVAL:
1441 case ISEQ_TYPE_CLASS:
1450 if (escape_cfp->
ep == target_lep && escape_cfp->
iseq->
body->
type == ISEQ_TYPE_METHOD) {
1463 rb_bug(
"isns(throw): unsupported throw type");
1467 return (
VALUE)THROW_DATA_NEW(throwobj, escape_cfp,
state);
1478 return vm_throw_start(ec, reg_cfp,
state, flag, throwobj);
1481 return vm_throw_continue(ec, throwobj);
1488 int is_splat = flag & 0x01;
1493 const VALUE obj = ary;
1505 if (space_size == 0) {
1508 else if (flag & 0x02) {
1513 for (i=0; i<
num-
len; i++) {
1517 for (j=0; i<
num; i++, j++) {
1528 VALUE *bptr = &base[space_size - 1];
1530 for (i=0; i<
num; i++) {
1532 for (; i<
num; i++) {
1559#if VM_CHECK_MODE > 0
1560 ccs->debug_sig = ~(
VALUE)ccs;
1573 if (! vm_cc_markable(cc)) {
1576 else if (! vm_ci_markable(ci)) {
1581 if (ccs->
capa == 0) {
1592 const int pos = ccs->
len++;
1602#if VM_CHECK_MODE > 0
1606 fprintf(stderr,
"ccs:%p (%d,%d)\n", (
void *)ccs, ccs->len, ccs->capa);
1607 for (
int i=0; i<ccs->
len; i++) {
1619 for (
int i=0; i<ccs->
len; i++) {
1637 const ID mid = vm_ci_mid(ci);
1643 const int ccs_len = ccs->len;
1644 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
1652 for (
int i=0; i<ccs_len; i++) {
1653 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
1654 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
1662 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
1695 return &vm_empty_cc;
1698#if VM_CHECK_MODE > 0
1714 ccs = vm_ccs_create(
klass, cme);
1719 vm_ccs_push(
klass, ccs, ci, cc);
1723 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
1737 cc = vm_search_cc(
klass, ci);
1755#if USE_DEBUG_COUNTER
1761#if OPT_INLINE_METHOD_CACHE
1772#if USE_DEBUG_COUNTER
1773 if (old_cc == &empty_cc) {
1777 else if (old_cc == cc) {
1780 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
1783 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
1784 vm_cc_cme(old_cc)->
def == vm_cc_cme(cc)->
def) {
1794 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->
ci));
1807#if OPT_INLINE_METHOD_CACHE
1811 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
1826 return vm_search_method_slowpath0(cd_owner, cd, klass);
1836 return vm_search_method_fastpath(cd_owner, cd,
klass);
1863 return check_cfunc(vm_cc_cme(cc), func);
1866#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
1898opt_equality_specialized(
VALUE recv,
VALUE obj)
1901 goto compare_by_identity;
1904 goto compare_by_identity;
1907 goto compare_by_identity;
1916#if MSC_VERSION_BEFORE(1300)
1920 else if (
isnan(b)) {
1936 else if (RB_TYPE_P(obj,
T_STRING)) {
1937 return rb_str_eql_internal(obj, recv);
1942 compare_by_identity:
1956 VALUE val = opt_equality_specialized(recv, obj);
1957 if (val !=
Qundef)
return val;
1959 if (!vm_method_cfunc_is(cd_owner, cd, recv,
rb_obj_equal)) {
1972#undef EQ_UNREDEFINED_P
1980opt_equality_by_mid_slowpath(
VALUE recv,
VALUE obj,
ID mid)
1982 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2000 VALUE val = opt_equality_specialized(recv, obj);
2005 return opt_equality_by_mid_slowpath(recv, obj, mid);
2012 return opt_equality_by_mid(obj1, obj2,
idEq);
2018 return opt_equality_by_mid(obj1, obj2,
idEqlP);
2048 rb_bug(
"check_match: unreachable");
2053#if MSC_VERSION_BEFORE(1300)
2054#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2056#define CHECK_CMP_NAN(a, b)
2060double_cmp_lt(
double a,
double b)
2067double_cmp_le(
double a,
double b)
2074double_cmp_gt(
double a,
double b)
2081double_cmp_ge(
double a,
double b)
2087static inline VALUE *
2093 if (cfp->
iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2099#if VM_DEBUG_BP_CHECK
2100 if (
bp != cfp->bp_check) {
2101 fprintf(stderr,
"bp_check: %ld, bp: %ld\n",
2102 (
long)(cfp->bp_check - GET_EC()->vm_stack),
2103 (
long)(
bp - GET_EC()->vm_stack));
2104 rb_bug(
"vm_base_ptr: unreachable");
2136 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2145 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->
def);
2148 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param,
local);
2164rb_iseq_only_optparam_p(
const rb_iseq_t *iseq)
2176rb_iseq_only_kwparam_p(
const rb_iseq_t *iseq)
2204 vm_caller_setup_arg_splat(cfp, calling);
2206 calling->argc > 0 &&
2207 RB_TYPE_P((final_hash = *(cfp->sp - 1)),
T_HASH) &&
2210 calling->kw_splat = 1;
2219 vm_caller_setup_arg_kw(cfp, calling, ci);
2222 VALUE keyword_hash = cfp->sp[-1];
2223 if (!RB_TYPE_P(keyword_hash,
T_HASH)) {
2254 calling->kw_splat = 0;
2259#define USE_OPT_HIST 0
2262#define OPT_HIST_MAX 64
2263static int opt_hist[OPT_HIST_MAX+1];
2267opt_hist_show_results_at_exit(
void)
2269 for (
int i=0; i<OPT_HIST_MAX; i++) {
2270 fprintf(stderr,
"opt_hist\t%d\t%d\n", i, opt_hist[i]);
2280 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->
def);
2282 const int opt = calling->
argc - lead_num;
2287 const int delta = opt_num - opt;
2292 if (opt_pc < OPT_HIST_MAX) {
2296 opt_hist[OPT_HIST_MAX]++;
2300 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta,
local);
2308 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->
def);
2310 const int opt = calling->
argc - lead_num;
2316 if (opt_pc < OPT_HIST_MAX) {
2320 opt_hist[OPT_HIST_MAX]++;
2324 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2329 VALUE *
const passed_values,
const int passed_keyword_len,
const VALUE *
const passed_keywords,
2330 VALUE *
const locals);
2342 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->
def);
2348 VALUE *
const klocals =
argv + kw_param->bits_start - kw_param->num;
2352 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2356 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param,
local);
2369 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->
def);
2372 VALUE *
const klocals =
argv + kw_param->bits_start - kw_param->num;
2375 for (i=0; i<kw_param->num; i++) {
2376 klocals[i] = kw_param->default_values[i];
2385 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param,
local);
2394 bool cacheable_ci = vm_ci_markable(ci);
2399 CALLER_SETUP_ARG(cfp, calling, ci);
2400 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2408 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2411 else if (rb_iseq_only_optparam_p(iseq)) {
2413 CALLER_SETUP_ARG(cfp, calling, ci);
2414 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2419 const int opt =
argc - lead_num;
2421 if (opt < 0 || opt > opt_num) {
2422 argument_arity_error(ec, iseq,
argc, lead_num, lead_num + opt_num);
2426 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2431 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2438 for (
int i=
argc; i<lead_num + opt_num; i++) {
2443 else if (rb_iseq_only_kwparam_p(iseq) && !
IS_ARGS_SPLAT(ci)) {
2457 VALUE *
const klocals =
argv + kw_param->bits_start - kw_param->num;
2458 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2460 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2466 else if (
argc == lead_num) {
2468 VALUE *
const klocals =
argv + kw_param->bits_start - kw_param->num;
2469 args_setup_kw_parameters(ec, iseq,
NULL, 0,
NULL, klocals);
2471 if (klocals[kw_param->num] ==
INT2FIX(0)) {
2473 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2491 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->
def);
2494 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->
def), cfp->
sp - calling->
argc, param_size, local_size);
2495 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2500 int opt_pc,
int param_size,
int local_size)
2506 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2509 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2515 int opt_pc,
int param_size,
int local_size)
2525 local_size - param_size,
2539 VALUE *sp_orig, *sp;
2547 calling->
block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2550 calling->
block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2554 vm_pop_frame(ec, cfp, cfp->
ep);
2557 sp_orig = sp = cfp->
sp;
2560 sp[0] = calling->
recv;
2565 *sp++ = src_argv[i];
2580ractor_unsafe_check(
void)
2582 if (!rb_ractor_main_p()) {
2590 ractor_unsafe_check();
2597 ractor_unsafe_check();
2604 ractor_unsafe_check();
2612 ractor_unsafe_check();
2614 return (*
f)(recv,
argv[0]);
2620 ractor_unsafe_check();
2628 ractor_unsafe_check();
2636 ractor_unsafe_check();
2644 ractor_unsafe_check();
2645 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2652 ractor_unsafe_check();
2653 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2660 ractor_unsafe_check();
2661 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2668 ractor_unsafe_check();
2669 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2676 ractor_unsafe_check();
2677 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2684 ractor_unsafe_check();
2685 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2686 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9]);
2692 ractor_unsafe_check();
2693 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2694 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10]);
2700 ractor_unsafe_check();
2701 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2702 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11]);
2708 ractor_unsafe_check();
2709 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2710 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12]);
2716 ractor_unsafe_check();
2717 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2718 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12],
argv[13]);
2724 ractor_unsafe_check();
2725 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2726 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12],
argv[13],
argv[14]);
2752 return (*
f)(recv,
argv[0]);
2779 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2786 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2793 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2800 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2807 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2814 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2815 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9]);
2821 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2822 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10]);
2828 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2829 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11]);
2835 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2836 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12]);
2842 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2843 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12],
argv[13]);
2849 VALUE(*
f)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE) = (
VALUE(*)(
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE,
VALUE))func;
2850 return (*
f)(recv,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12],
argv[13],
argv[14]);
2865#define CHECK_CFP_CONSISTENCY(func) \
2866 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
2867 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
2873#if VM_DEBUG_VERIFY_METHOD_CACHE
2874 switch (me->
def->type) {
2878# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
2880 METHOD_BUG(ATTRSET);
2882 METHOD_BUG(BMETHOD);
2885 METHOD_BUG(OPTIMIZED);
2886 METHOD_BUG(MISSING);
2887 METHOD_BUG(REFINED);
2891 rb_bug(
"wrong method type: %d", me->
def->type);
2912 int orig_argc =
argc;
2921 vm_push_frame(ec,
NULL, frame_type, recv,
2922 block_handler, (
VALUE)me,
2923 0, ec->
cfp->
sp, 0, 0);
2927 reg_cfp->
sp -= orig_argc + 1;
2946 CALLER_SETUP_ARG(reg_cfp, calling, ci);
2947 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
2949 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
2968 return vm_setivar(calling->
recv, vm_cc_cme(cc)->
def->body.attr.id, val,
NULL,
NULL, cc, 1);
3001 CALLER_SETUP_ARG(cfp, calling, ci);
3007 return vm_call_bmethod_body(ec, calling,
argv);
3013 VALUE klass = current_class;
3021 while (
RTEST(klass)) {
3023 if (owner == target_owner) {
3029 return current_class;
3056 VM_ASSERT(callable_method_entry_p(cme));
3066 aliased_callable_method_entry(vm_cc_cme(calling->
cc)));
3068 return vm_call_method_each_type(ec, cfp, calling);
3098 mid = idMethodMissing;
3099 missing_reason = ci_missing_reason(ci);
3100 ec->method_missing_reason = missing_reason;
3139 { .method_missing_reason = missing_reason },
3142 return vm_call_method(ec, reg_cfp, calling);
3153 CALLER_SETUP_ARG(reg_cfp, calling, calling->
ci);
3155 i = calling->
argc - 1;
3157 if (calling->
argc == 0) {
3182 return vm_call_symbol(ec, reg_cfp, calling, calling->
ci,
sym);
3202 return vm_invoke_block(ec, reg_cfp, calling, ci,
false, block_handler);
3212 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3220 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3224 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3229 return vm_call_general(ec, reg_cfp, calling);
3242 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3257 ec->method_missing_reason = reason;
3261 return vm_call_method(ec, reg_cfp, calling);
3267 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->
ci, vm_cc_cmethod_missing_reason(calling->
cc));
3278 return vm_call_method_nome(ec, cfp, calling);
3282 cme = refined_method_callable_without_refinement(cme);
3287 return vm_call_method_each_type(ec, cfp, calling);
3291find_refinement(
VALUE refinements,
VALUE klass)
3293 if (
NIL_P(refinements)) {
3310 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3314 }
while (cfp->
iseq != local_iseq);
3333 VM_ASSERT(callable_method_entry_p(cme));
3345 ID mid = vm_ci_mid(calling->
ci);
3350 for (; cref; cref = CREF_NEXT(cref)) {
3351 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3352 if (
NIL_P(refinement))
continue;
3358 if (vm_cc_call(cc) == vm_call_super_method) {
3367 cme->
def != ref_me->
def) {
3379 if (vm_cc_cme(cc)->
def->body.refined.orig_me) {
3380 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3393 search_refined_method(ec, cfp, calling));
3395 if (vm_cc_cme(ref_cc)) {
3396 calling->
cc= ref_cc;
3397 return vm_call_method(ec, cfp, calling);
3400 return vm_call_method_nome(ec, cfp, calling);
3410 switch (vm_cc_cme(cc)->
def->type) {
3412 CC_SET_FASTPATH(cc, vm_call_iseq_setup,
TRUE);
3413 return vm_call_iseq_setup(ec, cfp, calling);
3417 CC_SET_FASTPATH(cc, vm_call_cfunc,
TRUE);
3418 return vm_call_cfunc(ec, cfp, calling);
3421 CALLER_SETUP_ARG(cfp, calling, ci);
3422 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3425 vm_cc_attr_index_set(cc, 0);
3427 return vm_call_attrset(ec, cfp, calling);
3430 CALLER_SETUP_ARG(cfp, calling, ci);
3431 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3433 vm_cc_attr_index_set(cc, 0);
3435 return vm_call_ivar(ec, cfp, calling);
3438 vm_cc_method_missing_reason_set(cc, 0);
3439 CC_SET_FASTPATH(cc, vm_call_method_missing,
TRUE);
3440 return vm_call_method_missing(ec, cfp, calling);
3443 CC_SET_FASTPATH(cc, vm_call_bmethod,
TRUE);
3444 return vm_call_bmethod(ec, cfp, calling);
3447 CC_SET_FASTPATH(cc, vm_call_alias,
TRUE);
3448 return vm_call_alias(ec, cfp, calling);
3451 switch (vm_cc_cme(cc)->
def->body.optimize_type) {
3453 CC_SET_FASTPATH(cc, vm_call_opt_send,
TRUE);
3454 return vm_call_opt_send(ec, cfp, calling);
3456 CC_SET_FASTPATH(cc, vm_call_opt_call,
TRUE);
3457 return vm_call_opt_call(ec, cfp, calling);
3459 CC_SET_FASTPATH(cc, vm_call_opt_block_call,
TRUE);
3460 return vm_call_opt_block_call(ec, cfp, calling);
3462 rb_bug(
"vm_call_method: unsupported optimized method type (%d)",
3463 vm_cc_cme(cc)->
def->body.optimize_type);
3470 return vm_call_zsuper(ec, cfp, calling,
RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
3475 return vm_call_refined(ec, cfp, calling);
3478 rb_bug(
"vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->
def->type);
3488 const int stat = ci_missing_reason(ci);
3490 if (vm_ci_mid(ci) == idMethodMissing) {
3496 return vm_call_method_missing_body(ec, cfp, calling, ci,
stat);
3506 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3508 if (vm_cc_cme(cc) !=
NULL) {
3511 return vm_call_method_each_type(ec, cfp, calling);
3518 vm_cc_method_missing_reason_set(cc,
stat);
3519 CC_SET_FASTPATH(cc, vm_call_method_missing,
TRUE);
3520 return vm_call_method_missing(ec, cfp, calling);
3522 return vm_call_method_each_type(ec, cfp, calling);
3528 return vm_call_method_missing(ec, cfp, calling);
3535 calling->
cc = &cc_on_stack;
3536 return vm_call_method_each_type(ec, cfp, calling);
3539 return vm_call_method_each_type(ec, cfp, calling);
3546 return vm_call_method_nome(ec, cfp, calling);
3554 return vm_call_method(ec, reg_cfp, calling);
3564 if (vm_cc_call(cc) != vm_call_super_method)
rb_bug(
"bug");
3565 return vm_call_method(ec, reg_cfp, calling);
3581NORETURN(
static void vm_super_outside(
void));
3584vm_super_outside(
void)
3592 VALUE current_defined_class;
3607 reg_cfp->
iseq != method_entry_iseqptr(me) &&
3614 "self has wrong type to call super in this context: "
3622 "implicit argument passing of super from method defined"
3623 " by define_method() is not supported."
3624 " Specify all arguments explicitly.");
3633 vm_ci_kwarg(cd->
ci));
3643 cc = vm_cc_new(
klass,
NULL, vm_call_method_missing);
3651 if (cached_cme ==
NULL) {
3654 if (empty_cc_for_super ==
NULL) {
3655 empty_cc_for_super = vm_cc_new(0,
NULL, vm_call_super_method);
3661 else if (cached_cme->
called_id != mid) {
3663 cc = vm_cc_new(
klass, cme, vm_call_super_method);
3667 switch (cached_cme->
def->type) {
3673 vm_cc_call_set(cc, vm_call_super_method);
3687block_proc_is_lambda(
const VALUE procval)
3706 int is_lambda =
FALSE;
3707 VALUE val, arg, blockarg;
3714 else if (
argc == 0) {
3733 0, ec->
cfp->
sp, 0, 0);
3762vm_callee_setup_block_arg_arg0_check(
VALUE *
argv)
3781 CALLER_SETUP_ARG(cfp, calling, ci);
3782 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3785 calling->
argc == 1 &&
3788 !
NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(
argv))) {
3789 calling->
argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq,
argv, arg0);
3797 for (i=calling->
argc; i<iseq->body->param.lead_num; i++)
argv[i] =
Qnil;
3821 calling = &calling_entry;
3828 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq,
argv,
arg_setup_type);
3836 bool is_lambda,
VALUE block_handler)
3846 vm_push_frame(ec,
iseq,
3862 if (calling->
argc < 1) {
3866 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
3867 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3869 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol);
3881 CALLER_SETUP_ARG(ec->
cfp, calling, ci);
3882 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->
cfp, calling, ci);
3890vm_proc_to_block_handler(
VALUE procval)
3892 const struct rb_block *block = vm_proc_block(procval);
3894 switch (vm_block_type(block)) {
3896 return VM_BH_FROM_ISEQ_BLOCK(&block->
as.
captured);
3898 return VM_BH_FROM_IFUNC_BLOCK(&block->
as.
captured);
3900 return VM_BH_FROM_SYMBOL(block->
as.
symbol);
3902 return VM_BH_FROM_PROC(block->
as.
proc);
3911 bool is_lambda,
VALUE block_handler)
3914 VALUE proc = VM_BH_TO_PROC(block_handler);
3915 is_lambda = block_proc_is_lambda(
proc);
3916 block_handler = vm_proc_to_block_handler(
proc);
3919 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
3925 bool is_lambda,
VALUE block_handler)
3929 bool is_lambda,
VALUE block_handler);
3931 switch (vm_block_handler_type(block_handler)) {
3936 default:
rb_bug(
"vm_invoke_block: unreachable");
3939 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
3943vm_make_proc_with_iseq(
const rb_iseq_t *blockiseq)
3950 rb_bug(
"vm_make_proc_with_iseq: unreachable");
3953 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
3956 return rb_vm_make_proc(ec, captured,
rb_cProc);
3967vm_once_clear(
VALUE data)
3982 args[0] = obj; args[1] =
Qfalse;
4006 klass = vm_get_cbase(
GET_EP());
4015 klass = vm_get_cvar_base(cref,
GET_CFP(), 0);
4025 if (vm_get_ev_const(ec, klass,
SYM2ID(obj), allow_nil,
true)) {
4056 expr_type = check_respond_to_missing(obj, v);
4086 rb_bug(
"unimplemented defined? type (VM)");
4090 if (expr_type != 0) {
4107 const VALUE *ep = reg_ep;
4108 for (i = 0; i < lv; i++) {
4115vm_get_special_object(
const VALUE *
const reg_ep,
4122 return vm_get_cbase(reg_ep);
4124 return vm_get_const_base(reg_ep);
4126 rb_bug(
"putspecialobject insn: unknown value_type %d",
type);
4133 const VALUE ary2 = ary2st;
4158 else if (
RTEST(flag)) {
4175 for (i = 0; i < n; i++) {
4210 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4211 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4212 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4213 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4237 if ((ns = vm_search_const_defined_class(cbase,
id)) == 0) {
4251 if (!RB_TYPE_P(klass,
T_CLASS)) {
4259 "superclass mismatch for class %"PRIsVALUE"",
4302vm_declare_module(
ID id,
VALUE cbase)
4316 if (!
NIL_P(location)) {
4318 " previous definition of %"PRIsVALUE" was here",
4331 "superclass must be an instance of Class (given an instance of %"PRIsVALUE")",
4335 vm_check_if_namespace(cbase);
4339 if ((klass = vm_const_get_under(
id, flags, cbase)) != 0) {
4340 if (!vm_check_if_class(
id, flags, super, klass))
4341 unmatched_redefinition(
"class", cbase,
id, klass);
4345 return vm_declare_class(
id, flags, cbase, super);
4354 vm_check_if_namespace(cbase);
4355 if ((
mod = vm_const_get_under(
id, flags, cbase)) != 0) {
4356 if (!vm_check_if_module(
id,
mod))
4357 unmatched_redefinition(
"module", cbase,
id,
mod);
4361 return vm_declare_module(
id, cbase);
4366vm_find_or_create_class_by_id(
ID id,
4376 return vm_define_class(
id, flags, cbase, super);
4384 return vm_define_module(
id, flags, cbase);
4387 rb_bug(
"unknown defineclass type: %d", (
int)
type);
4396 if (!vm_env_cref_by_cref(cfp->
ep)) {
4400 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4409 if (!vm_env_cref_by_cref(cfp->
ep)) {
4413 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4424 if (!is_singleton) {
4425 klass = CREF_CLASS(cref);
4426 visi = vm_scope_visibility_get(ec);
4439 if (!is_singleton && vm_scope_module_func_check(ec)) {
4457 return vm_invoke_block(ec,
GET_CFP(), calling, ci,
false, block_handler);
4465 return vm_search_method((
VALUE)reg_cfp->
iseq, cd, recv);
4475 .
call_ = vm_invokeblock_i,
4481# define mexp_search_method vm_search_method_wrap
4482# define mexp_search_super vm_search_super_method
4483# define mexp_search_invokeblock vm_search_invokeblock
4501 VALUE block_handler,
4511 int argc = vm_ci_argc(ci);
4524 val = vm_cc_call(
cc)(ec,
GET_CFP(), &calling);
4526 switch (method_explorer) {
4529 val = vm_cc_call(
cc)(ec,
GET_CFP(), &calling);
4532 calling.
cc =
cc = vm_search_super_method(reg_cfp, cd,
recv);
4533 calling.
ci = cd->
ci;
4534 val = vm_cc_call(
cc)(ec,
GET_CFP(), &calling);
4537 val = vm_invokeblock_i(ec,
GET_CFP(), &calling);
4556 if (
GET_ISEQ()->body->catch_except_p) {
4560 else if ((val = mjit_exec(ec)) ==
Qundef) {
4571 return mjit_exec(ec);
4643#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
4654 return (ic_cref ==
NULL ||
4655 ic_cref == vm_get_cref(reg_ep));
4673 ice->
ic_cref = vm_get_const_key_cref(reg_ep);
4701 return vm_once_exec((
VALUE)iseq);
4729 if (RB_FLOAT_TYPE_P(
key)) {
4731 if (!
isinf(kval) && modf(kval, &kval) == 0.0) {
4757 static const char stack_consistency_error[] =
4759#if defined RUBY_DEVEL
4765 rb_bug(stack_consistency_error, nsp, nbp);
4772 if (FIXNUM_2_P(recv, obj) &&
4774 return rb_fix_plus_fix(recv, obj);
4776 else if (FLONUM_2_P(recv, obj) &&
4806 if (FIXNUM_2_P(recv, obj) &&
4808 return rb_fix_minus_fix(recv, obj);
4810 else if (FLONUM_2_P(recv, obj) &&
4830 if (FIXNUM_2_P(recv, obj) &&
4832 return rb_fix_mul_fix(recv, obj);
4834 else if (FLONUM_2_P(recv, obj) &&
4854 if (FIXNUM_2_P(recv, obj) &&
4856 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_div_fix(recv, obj);
4858 else if (FLONUM_2_P(recv, obj) &&
4878 if (FIXNUM_2_P(recv, obj) &&
4880 return (
FIX2LONG(obj) == 0) ?
Qundef : rb_fix_mod_fix(recv, obj);
4882 else if (FLONUM_2_P(recv, obj) &&
4903 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
4916 if (FIXNUM_2_P(recv, obj) &&
4920 else if (FLONUM_2_P(recv, obj) &&
4941 if (FIXNUM_2_P(recv, obj) &&
4945 else if (FLONUM_2_P(recv, obj) &&
4966 if (FIXNUM_2_P(recv, obj) &&
4970 else if (FLONUM_2_P(recv, obj) &&
4991 if (FIXNUM_2_P(recv, obj) &&
4995 else if (FLONUM_2_P(recv, obj) &&
5036 if (FIXNUM_2_P(recv, obj) &&
5038 return (recv & obj) | 1;
5048 if (FIXNUM_2_P(recv, obj) &&
5061 if (FIXNUM_2_P(recv, obj) &&
5070 return rb_ary_entry_internal(recv,
FIX2LONG(obj));
5134vm_opt_length(
VALUE recv,
int bop)
5162vm_opt_empty_p(
VALUE recv)
5180 else if (vm_method_cfunc_is(iseq, cd, recv,
rb_false)) {
5196 case RSHIFT(~0UL, 1):
5217vm_opt_succ(
VALUE recv)
5221 return fix_succ(recv);
5238 if (vm_method_cfunc_is(iseq, cd, recv,
rb_obj_not)) {
5280 if (event & global_hooks->
events) {
5283 vm_dtrace(event, ec);
5284 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
5288 if (local_hooks !=
NULL) {
5289 if (event & local_hooks->
events) {
5292 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
5305 case BIN(opt_nil_p):
5306 return check_cfunc(vm_cc_cme(cc),
rb_false);
5308 return check_cfunc(vm_cc_cme(cc),
rb_obj_not);
5314#define VM_TRACE_HOOK(target_event, val) do { \
5315 if ((pc_events & (target_event)) & enabled_flags) { \
5316 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks, (val)); \
5323 const VALUE *pc = reg_cfp->
pc;
5335 enabled_flags |= local_hook_events;
5339 if ((pc_events & enabled_flags) == 0) {
5359 fprintf(stderr,
"vm_trace>>%4d (%4x) - %s:%d %s\n",
5380#if VM_CHECK_MODE > 0
5382void rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)));
5389 vm_stack_canary |= 0x01;
5391 vm_stack_canary_was_born =
true;
5397rb_vm_canary_is_found_dead(
enum ruby_vminsn_type i,
VALUE c)
5405 rb_bug(
"dead canary found at %s: %s", insn,
str);
5442 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
5449 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self,
argv[0]);
5456 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self,
argv[0],
argv[1]);
5463 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2]);
5470 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3]);
5477 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4]);
5504 typedef VALUE (*rb_invoke_funcptr9_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9);
5505 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8]);
5511 typedef VALUE (*rb_invoke_funcptr10_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10);
5512 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9]);
5518 typedef VALUE (*rb_invoke_funcptr11_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11);
5519 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10]);
5525 typedef VALUE (*rb_invoke_funcptr12_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12);
5526 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11]);
5532 typedef VALUE (*rb_invoke_funcptr13_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13);
5533 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12]);
5539 typedef VALUE (*rb_invoke_funcptr14_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14);
5540 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12],
argv[13]);
5546 typedef VALUE (*rb_invoke_funcptr15_t)(
rb_execution_context_t *ec,
VALUE self,
VALUE v1,
VALUE v2,
VALUE v3,
VALUE v4,
VALUE v5,
VALUE v6,
VALUE v7,
VALUE v8,
VALUE v9,
VALUE v10,
VALUE v11,
VALUE v12,
VALUE v13,
VALUE v14,
VALUE v15);
5547 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self,
argv[0],
argv[1],
argv[2],
argv[3],
argv[4],
argv[5],
argv[6],
argv[7],
argv[8],
argv[9],
argv[10],
argv[11],
argv[12],
argv[13],
argv[14]);
5553lookup_builtin_invoker(
int argc)
5574 return invokers[
argc];
5590 return invoke_bf(ec, cfp, bf,
argv);
5597 fprintf(stderr,
"vm_invoke_builtin_delegate: passing -> ");
5598 for (
int i=0; i<bf->
argc; i++) {
5601 fprintf(stderr,
"\n");
5602 fprintf(stderr,
"%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->
name, bf->
argc, bf->
func_ptr);
5605 if (bf->
argc == 0) {
5606 return invoke_bf(ec, cfp, bf,
NULL);
5610 return invoke_bf(ec, cfp, bf,
argv);
5620 return cfp->
ep[index];
void rb_ary_store(VALUE ary, long idx, VALUE val)
VALUE rb_check_to_array(VALUE ary)
VALUE rb_ary_dup(VALUE ary)
VALUE rb_ary_push(VALUE ary, VALUE item)
VALUE rb_ary_aref1(VALUE ary, VALUE arg)
VALUE rb_ary_concat(VALUE x, VALUE y)
VALUE rb_check_array_type(VALUE ary)
VALUE rb_ary_plus(VALUE x, VALUE y)
VALUE rb_ary_entry(VALUE ary, long offset)
VALUE rb_uint2big(uintptr_t n)
VALUE rb_dbl2big(double d)
int bits(struct state *s, int need)
const char * rb_insns_name(int i)
VALUE * rb_iseq_original_iseq(const rb_iseq_t *iseq)
#define OBJ_BUILTIN_TYPE(obj)
int rb_autoloading_value(VALUE mod, ID id, VALUE *value, rb_const_flag_t *flag)
VALUE rb_public_const_get_at(VALUE klass, ID id)
rb_const_entry_t * rb_const_lookup(VALUE klass, ID id)
int rb_public_const_defined_from(VALUE klass, ID id)
VALUE rb_const_source_location_at(VALUE, ID)
VALUE rb_public_const_get_from(VALUE klass, ID id)
#define RB_DEBUG_COUNTER_INC_UNLESS(type, cond)
#define RB_DEBUG_COUNTER_SETMAX(type, num)
#define RB_DEBUG_COUNTER_INC(type)
#define check_match(s, start, match, length)
#define MJIT_FUNC_EXPORTED
char str[HTML_ESCAPE_MAX_LEN+1]
#define rb_ec_raised_p(ec, f)
VALUE rb_ec_backtrace_object(const rb_execution_context_t *ec)
#define EC_JUMP_TAG(ec, st)
void rb_vm_localjump_error(const char *, VALUE, int)
#define rb_ec_raised_reset(ec, f)
#define RUBY_EVENT_C_CALL
#define RUBY_EVENT_B_RETURN
#define RUBY_EVENT_RETURN
#define RUBY_EVENT_C_RETURN
#define RUBY_EVENT_B_CALL
#define RSTRING_LEN(string)
#define RSTRING_PTR(string)
void rb_gc_verify_internal_consistency(void)
const char * rb_obj_info(VALUE obj)
void rb_vm_ccs_free(struct rb_class_cc_entries *ccs)
VALUE rb_imemo_new(enum imemo_type type, VALUE v1, VALUE v2, VALUE v3, VALUE v0)
void rb_gc_writebarrier_remember(VALUE obj)
void rb_gc_register_mark_object(VALUE obj)
Inform the garbage collector that object is a live Ruby object that should not be moved.
VALUE rb_singleton_class(VALUE obj)
Returns the singleton class of obj.
VALUE rb_module_new(void)
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class::inherited.
VALUE rb_define_class_id(ID id, VALUE super)
Defines a new class.
void rb_notimplement(void)
void rb_raise(VALUE exc, const char *fmt,...)
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
void rb_bug(const char *fmt,...)
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
void rb_warn(const char *fmt,...)
VALUE rb_exc_new_str(VALUE etype, VALUE str)
VALUE rb_iseqw_new(const rb_iseq_t *)
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
VALUE rb_cClass
Class class.
VALUE rb_obj_not_equal(VALUE obj1, VALUE obj2)
VALUE rb_cObject
Object class.
VALUE rb_obj_alloc(VALUE)
Allocates an instance of klass.
VALUE rb_obj_equal(VALUE obj1, VALUE obj2)
VALUE rb_obj_class(VALUE)
VALUE rb_inspect(VALUE)
Convenient wrapper of Object::inspect.
VALUE rb_cBasicObject
BasicObject class.
VALUE rb_cModule
Module class.
VALUE rb_class_real(VALUE)
Looks up the nearest ancestor of cl, skipping singleton classes or module inclusions.
VALUE rb_obj_is_kind_of(VALUE, VALUE)
Determines if obj is a kind of c.
VALUE rb_obj_not(VALUE obj)
VALUE rb_false(VALUE obj)
void rb_obj_copy_ivar(VALUE dest, VALUE obj)
VALUE rb_to_hash_type(VALUE hash)
VALUE rb_hash_has_key(VALUE hash, VALUE key)
VALUE rb_hash_compare_by_id_p(VALUE hash)
VALUE rb_hash_aref(VALUE hash, VALUE key)
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
VALUE rb_hash_lookup(VALUE hash, VALUE key)
VALUE rb_hash_dup(VALUE hash)
int rb_hash_stlike_lookup(VALUE hash, st_data_t key, st_data_t *pval)
void *PTR64 __attribute__((mode(DI)))
int rb_id_table_insert(struct rb_id_table *tbl, ID id, VALUE val)
int rb_id_table_lookup(struct rb_id_table *tbl, ID id, VALUE *valp)
struct rb_id_table * rb_id_table_create(size_t capa)
int rb_id_table_delete(struct rb_id_table *tbl, ID id)
#define IMEMO_TYPE_P(v, t)
#define THROW_DATA_P(err)
@ imemo_cref
class reference
@ imemo_svar
special variable
Thin wrapper to ruby/config.h.
VALUE rb_funcall(VALUE, ID, int,...)
Calls a method.
#define UNLIMITED_ARGUMENTS
#define rb_check_frozen_internal(obj)
VALUE rb_proc_call_with_block(VALUE, int argc, const VALUE *argv, VALUE)
VALUE rb_reg_last_match(VALUE)
VALUE rb_reg_match_post(VALUE)
VALUE rb_reg_match_pre(VALUE)
VALUE rb_reg_match(VALUE, VALUE)
VALUE rb_reg_nth_match(int, VALUE)
VALUE rb_reg_match_last(VALUE)
VALUE rb_str_concat(VALUE, VALUE)
VALUE rb_str_append(VALUE, VALUE)
VALUE rb_str_intern(VALUE)
#define rb_str_cat_cstr(buf, str)
VALUE rb_str_length(VALUE)
void rb_thread_schedule(void)
void rb_set_class_path_string(VALUE, VALUE, VALUE)
VALUE rb_const_get(VALUE, ID)
VALUE rb_cvar_defined(VALUE, ID)
VALUE rb_ivar_get(VALUE, ID)
int rb_const_defined_at(VALUE, ID)
VALUE rb_class_path(VALUE)
VALUE rb_const_get_at(VALUE, ID)
VALUE rb_ivar_defined(VALUE, ID)
void rb_const_set(VALUE, ID, VALUE)
VALUE rb_attr_get(VALUE, ID)
VALUE rb_ivar_set(VALUE, ID, VALUE)
VALUE rb_autoload_load(VALUE, ID)
int rb_const_defined(VALUE, ID)
VALUE rb_check_funcall(VALUE, ID, int, const VALUE *)
void rb_define_alloc_func(VALUE, rb_alloc_func_t)
int rb_method_boundp(VALUE, ID, int)
rb_alloc_func_t rb_get_alloc_func(VALUE)
const char * rb_id2name(ID)
ID rb_check_id(volatile VALUE *)
Returns ID for the given name if it is interned already, or 0.
Internal header for Class.
#define RCLASS_INCLUDER(c)
#define RCLASS_REFINED_CLASS(c)
#define RICLASS_IS_ORIGIN
#define RCLASS_IV_INDEX_TBL(c)
Internal header for Comparable.
#define OPTIMIZED_CMP(a, b, data)
#define UNALIGNED_MEMBER_PTR(ptr, mem)
Internal header for Hash.
Internal header for Numeric.
VALUE rb_fix_aref(VALUE fix, VALUE idx)
VALUE rb_flo_div_flo(VALUE x, VALUE y)
double ruby_float_mod(double x, double y)
#define ROBJECT_IV_INDEX_TBL
Internal header for Proc.
VALUE rb_sym_to_proc(VALUE sym)
Internal header for Random.
int ruby_fill_random_bytes(void *, size_t, int)
VALUE rb_sym_proc_call(ID mid, int argc, const VALUE *argv, int kw_splat, VALUE passed_proc)
VALUE rb_str_opt_plus(VALUE x, VALUE y)
char * rb_str_to_cstr(VALUE str)
VALUE rb_gvar_defined(ID)
void rb_const_warn_if_deprecated(const rb_const_entry_t *, VALUE, ID)
void rb_init_iv_list(VALUE obj)
int rb_ec_obj_respond_to(struct rb_execution_context_struct *ec, VALUE obj, ID id, int priv)
#define rb_method_basic_definition_p(...)
typedef int(ZCALLBACK *close_file_func) OF((voidpf opaque
VALUE rb_iseq_path(const rb_iseq_t *iseq)
unsigned int rb_iseq_line_no(const rb_iseq_t *iseq, size_t pos)
VALUE rb_iseq_label(const rb_iseq_t *iseq)
VALUE rb_iseq_defined_string(enum defined_type type)
void rb_iseq_trace_set(const rb_iseq_t *iseq, rb_event_flag_t turnon_events)
VALUE rb_iseq_disasm(const rb_iseq_t *iseq)
#define ISEQ_TRACE_EVENTS
Internal header for Math.
#define MEMCPY(p1, p2, type, n)
#define ALLOCA_N(type, n)
#define MEMMOVE(p1, p2, type, n)
const rb_callable_method_entry_t * rb_callable_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class)
const rb_callable_method_entry_t * rb_callable_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class)
void rb_add_method_iseq(VALUE klass, ID mid, const rb_iseq_t *iseq, rb_cref_t *cref, rb_method_visibility_t visi)
const rb_callable_method_entry_t * rb_callable_method_entry(VALUE klass, ID id)
@ VM_METHOD_TYPE_ISEQ
Ruby method.
@ VM_METHOD_TYPE_ATTRSET
attr_writer or attr_accessor
@ VM_METHOD_TYPE_CFUNC
C method.
@ VM_METHOD_TYPE_OPTIMIZED
Kernel::send, Proc::call, etc.
@ VM_METHOD_TYPE_REFINED
refinement
@ VM_METHOD_TYPE_NOTIMPLEMENTED
@ VM_METHOD_TYPE_MISSING
wrapper for method_missing(id)
@ VM_METHOD_TYPE_IVAR
attr_reader or attr_accessor
#define METHOD_ENTRY_INVALIDATED(me)
@ OPTIMIZED_METHOD_TYPE_CALL
@ OPTIMIZED_METHOD_TYPE_BLOCK_CALL
@ OPTIMIZED_METHOD_TYPE_SEND
#define METHOD_ENTRY_CACHEABLE(me)
const rb_method_entry_t * rb_method_entry_with_refinements(VALUE klass, ID id, VALUE *defined_class)
#define UNDEFINED_METHOD_ENTRY_P(me)
const rb_callable_method_entry_t * rb_method_entry_complement_defined_class(const rb_method_entry_t *src_me, ID called_id, VALUE defined_class)
#define METHOD_ENTRY_CACHED_SET(me)
#define METHOD_ENTRY_VISI(me)
VALUE type(ANYARGS)
ANYARGS-ed function type.
#define RUBY_DTRACE_METHOD_RETURN_HOOK(ec, klass, id)
#define RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, klass, id)
#define RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, klass, id)
#define RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, klass, id)
#define RARRAY_AREF(a, i)
VALUE rb_eRactorUnsafeError
VALUE rb_eRactorIsolationError
#define RB_OBJ_SHAREABLE_P(obj)
#define RARRAY_CONST_PTR_TRANSIENT
#define RMODULE_INCLUDED_INTO_REFINEMENT
#define RMODULE_IS_REFINEMENT
#define RB_OBJ_WRITE(a, slot, b)
WB for new reference from ‘a’ to ‘b’.
#define RB_OBJ_WRITTEN(a, oldv, b)
WB for new reference from ‘a’ to ‘b’.
VALUE rb_mRubyVMFrozenCore
#define StringValueCStr(v)
unsigned LONG_LONG rb_serial_t
unsigned long long uint64_t
VALUE rb_str_catf(VALUE, const char *,...)
VALUE rb_sprintf(const char *,...)
enum iseq_catch_table_entry::catch_type type
const rb_cref_t * ic_cref
struct iseq_inline_constant_cache_entry * entry
struct rb_iv_index_tbl_entry * entry
struct rb_captured_block captured
const void *const func_ptr
const struct rb_callcache * cc
const struct rb_callinfo * ci
const VALUE defined_class
struct rb_method_definition_struct *const def
const vm_call_handler call_
const struct rb_callable_method_entry_struct *const cme_
union rb_callcache::@184 aux_
const struct rb_callcache * cc
const struct rb_callinfo * ci
const struct vm_ifunc * ifunc
union rb_captured_block::@198 code
const struct rb_callcache * cc
const struct rb_callinfo * ci
const struct rb_callable_method_entry_struct * cme
struct rb_class_cc_entries::rb_class_cc_entries_entry * entries
struct rb_cref_struct * next
struct rb_trace_arg_struct * trace_arg
struct iseq_catch_table * catch_table
unsigned int ambiguous_param0
enum rb_iseq_constant_body::iseq_type type
const struct rb_iseq_constant_body::@188::rb_iseq_param_keyword * keyword
struct rb_iseq_struct * local_iseq
unsigned int local_table_size
struct rb_iseq_constant_body::@188::@190 flags
unsigned int accepts_no_kwarg
struct rb_iseq_constant_body::@188 param
parameter information
const struct rb_iseq_struct * parent_iseq
struct rb_iseq_constant_body * body
struct rb_hook_list_struct * local_hooks
struct rb_iseq_struct::@191::@193 exec
union rb_iseq_struct::@191 aux
struct rb_method_entry_struct * original_me
VALUE(* invoker)(VALUE recv, int argc, const VALUE *argv, VALUE(*func)(ANYARGS))
rb_method_bmethod_t bmethod
union rb_method_definition_struct::@123 body
rb_method_refined_t refined
rb_cref_t * cref
class reference, should be marked
rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
struct rb_method_entry_struct * orig_me
IFUNC (Internal FUNCtion)
rb_block_call_func_t func
const VALUE cref_or_me
class reference or rb_method_entry_t
struct iseq_inline_storage_entry::@187 once
struct rb_thread_struct * running_thread
VALUE rb_ivar_generic_lookup_with_index(VALUE obj, ID id, uint32_t index)
VALUE ruby_vm_const_missing_count
rb_control_frame_t * rb_vm_get_ruby_level_next_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp)
rb_event_flag_t ruby_vm_event_flags
unsigned int ruby_vm_event_local_num
const struct rb_callcache * rb_vm_empty_cc(void)
VALUE rb_vm_invoke_bmethod(rb_execution_context_t *ec, rb_proc_t *proc, VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler, const rb_callable_method_entry_t *me)
#define KW_SPECIFIED_BITS_MAX
VALUE(* vm_call_handler)(struct rb_execution_context_struct *ec, struct rb_control_frame_struct *cfp, struct rb_calling_info *calling)
#define VM_CI_ON_STACK(mid_, flags_, argc_, kwarg_)
#define VM_CALLCACHE_UNMARKABLE
#define VM_CALL_ARGS_SPLAT
#define vm_ci_new_runtime(mid, flag, argc, kwarg)
#define VM_CC_ON_STACK(clazz, call, aux, cme)
#define REGEXP_REDEFINED_OP_FLAG
#define STRING_REDEFINED_OP_FLAG
@ VM_THROW_NO_ESCAPE_FLAG
#define FALSE_REDEFINED_OP_FLAG
@ VM_SPECIAL_OBJECT_CBASE
@ VM_SPECIAL_OBJECT_VMCORE
@ VM_SPECIAL_OBJECT_CONST_BASE
#define HASH_REDEFINED_OP_FLAG
#define RUBY_VM_NEXT_CONTROL_FRAME(cfp)
#define VM_ENV_DATA_INDEX_FLAGS
#define TRUE_REDEFINED_OP_FLAG
@ block_handler_type_ifunc
@ block_handler_type_proc
@ block_handler_type_symbol
@ block_handler_type_iseq
#define VMDEBUG
VM Debug Level.
#define ARRAY_REDEFINED_OP_FLAG
#define EXEC_EVENT_HOOK(ec_, flag_, self_, id_, called_id_, klass_, data_)
#define RUBY_EVENT_COVERAGE_LINE
#define VM_CHECKMATCH_TYPE_MASK
#define PROC_REDEFINED_OP_FLAG
#define VM_DEFINECLASS_SCOPED_P(x)
#define VM_DEFINECLASS_TYPE(x)
#define RUBY_VM_CHECK_INTS(ec)
#define VM_GUARDED_PREV_EP(ep)
#define NIL_REDEFINED_OP_FLAG
#define INTEGER_REDEFINED_OP_FLAG
#define VM_DEFINECLASS_HAS_SUPERCLASS_P(x)
#define CHECK_VM_STACK_OVERFLOW(cfp, margin)
#define SYMBOL_REDEFINED_OP_FLAG
#define BASIC_OP_UNREDEFINED_P(op, klass)
rb_control_frame_t *FUNC_FASTCALL rb_insn_func_t(rb_execution_context_t *, rb_control_frame_t *)
#define CHECK_VM_STACK_OVERFLOW0(cfp, sp, margin)
#define RUBY_EVENT_COVERAGE_BRANCH
#define VM_BLOCK_HANDLER_NONE
#define VM_ENV_DATA_INDEX_SPECVAL
#define GetProcPtr(obj, ptr)
#define VM_ENV_DATA_INDEX_ME_CREF
@ VM_DEFINECLASS_TYPE_CLASS
@ VM_DEFINECLASS_TYPE_MODULE
@ VM_DEFINECLASS_TYPE_SINGLETON_CLASS
#define VM_CHECKMATCH_ARRAY
#define RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp)
#define VM_UNREACHABLE(func)
@ VM_CHECKMATCH_TYPE_RESCUE
@ VM_CHECKMATCH_TYPE_CASE
@ VM_CHECKMATCH_TYPE_WHEN
#define FLOAT_REDEFINED_OP_FLAG
@ VM_FRAME_FLAG_CFRAME_KW
@ VM_ENV_FLAG_WB_REQUIRED
#define VM_SP_CNT(ec, sp)
#define vm_push_frame_debug_counter_inc(ec, cfp, t)
VALUE(* builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
#define EQ_UNREDEFINED_P(t)
const rb_callable_method_entry_t * rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2)
#define CHECK_CFP_CONSISTENCY(func)
void rb_vm_pop_frame(rb_execution_context_t *ec)
@ mexp_search_invokeblock
void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE *, const rb_callable_method_entry_t *, int kw_splat)
void rb_error_arity(int argc, int min, int max)
void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts)
VALUE rb_eql_opt(VALUE obj1, VALUE obj2)
VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj, int argc, const VALUE *argv, int priv)
VALUE ruby_vm_special_exception_copy(VALUE exc)
VALUE rb_equal_opt(VALUE obj1, VALUE obj2)
bool rb_splat_or_kwargs_p(const struct rb_callinfo *restrict ci)
bool rb_vm_opt_cfunc_p(CALL_CACHE cc, int insn)
#define vm_check_frame(a, b, c, d)
VALUE rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
#define CHECK_CMP_NAN(a, b)
void rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
void Init_vm_stack_canary(void)
#define VM_TRACE_HOOK(target_event, val)
const struct rb_callcache * rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
#define vm_check_canary(ec, sp)
#define IMEMO_CONST_CACHE_SHAREABLE
bool rb_simple_iseq_p(const rb_iseq_t *iseq)
rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos)
VALUE rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
rb_method_definition_t * rb_method_definition_create(rb_method_type_t type, ID mid)
VALUE rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)
#define CHECK_CANARY(cond, insn)
#define IS_ARGS_KW_SPLAT_MUT(ci)
#define IS_ARGS_KW_OR_KW_SPLAT(ci)
#define SETUP_CANARY(cond)
#define IS_ARGS_SPLAT(ci)
#define IS_ARGS_KW_SPLAT(ci)
#define GET_BLOCK_HANDLER()
#define IS_ARGS_KEYWORD(ci)
#define GET_GLOBAL_CONSTANT_STATE()
#define STACK_ADDR_FROM_TOP(n)
#define RB_VM_LOCK_ENTER()
#define RB_VM_LOCK_LEAVE()
int def(FILE *source, FILE *dest, int level)