27#include "internal/symbol.h"
53#define MAX_EVENT_NUM 32
72 clean_hooks(GET_EC(), hooks);
83 if (new_iseq_events & ~enabled_iseq_events) {
131 update_global_event_hook(list->
events);
149 rb_event_hook_t *hook = alloc_event_hook(func, events, data, hook_flags);
151 connect_event_hook(ec, hook);
164 connect_event_hook(GET_EC(), hook);
170 rb_threadptr_add_event_hook(GET_EC(), rb_thread_ptr(thval), func, events, data, hook_flags);
176 rb_event_hook_t *hook = alloc_event_hook(func, events, data, hook_flags);
177 connect_event_hook(GET_EC(), hook);
189 while ((hook = *nextp) != 0) {
200 if (list == rb_ec_ractor_hooks(ec)) {
202 update_global_event_hook(list->
events);
214 clean_hooks(ec, list);
219#define MATCH_ANY_FILTER_TH ((rb_thread_t *)1)
230 if (func == 0 || hook->
func == func) {
242 clean_hooks_check(ec, list);
249 return remove_event_hook(ec, filter_th, func, data);
255 return rb_threadptr_remove_event_hook(GET_EC(), rb_thread_ptr(thval), func,
Qundef);
261 return rb_threadptr_remove_event_hook(GET_EC(), rb_thread_ptr(thval), func, data);
267 return remove_event_hook(GET_EC(),
NULL, func,
Qundef);
273 return remove_event_hook(GET_EC(),
NULL, func, data);
279 rb_threadptr_remove_event_hook(ec, rb_ec_thread_ptr(ec), 0,
Qundef);
295 for (hook = list->
hooks; hook; hook = hook->
next) {
326 clean_hooks_check(ec, list);
332 if (exec_hooks_precheck(ec, list, trace_arg) == 0)
return;
333 exec_hooks_body(ec, list, trace_arg);
334 exec_hooks_postcheck(ec, list);
343 if (exec_hooks_precheck(ec, list, trace_arg) == 0)
return 0;
351 exec_hooks_body(ec, list, trace_arg);
355 exec_hooks_postcheck(ec, list);
378 exec_hooks_unprotected(ec, rb_ec_ractor_hooks(ec), trace_arg);
395 if ((
state = exec_hooks_protected(ec, hooks, trace_arg)) ==
TAG_NONE) {
406 if (VM_FRAME_FINISHED_P(ec->
cfp)) {
423 rb_vm_t *
const vm = rb_ec_vm_ptr(ec);
426 dummy_trace_arg.
event = 0;
436 result = (*func)(arg);
452#if defined RUBY_USE_SETJMPEX && RUBY_USE_SETJMPEX
561thread_add_trace_func_m(
VALUE obj,
VALUE trace)
563 thread_add_trace_func(GET_EC(), rb_thread_ptr(obj), trace);
579thread_set_trace_func_m(
VALUE target_thread,
VALUE trace)
582 rb_thread_t *target_th = rb_thread_ptr(target_thread);
584 rb_threadptr_remove_event_hook(ec, target_th, call_trace_func,
Qundef);
590 thread_add_trace_func(ec, target_th, trace);
618#define C(name, NAME) case RUBY_EVENT_##NAME: CONST_ID(id, #name); return id;
625 C(c_return, C_RETURN);
628 C(b_return, B_RETURN);
629 C(thread_begin, THREAD_BEGIN);
630 C(thread_end, THREAD_END);
631 C(fiber_switch, FIBER_SWITCH);
632 C(script_compiled, SCRIPT_COMPILED);
672 get_path_and_lineno(ec, ec->
cfp, event, &
filename, &line);
680 klass =
RBASIC(klass)->klass;
699static VALUE rb_cTracePoint;
726tp_memsize(
const void *
ptr)
745symbol2event_flag(
VALUE v)
754#define C(name, NAME) CONST_ID(id, #name); if (sym == ID2SYM(id)) return RUBY_EVENT_##NAME
761 C(c_return, C_RETURN);
764 C(b_return, B_RETURN);
765 C(thread_begin, THREAD_BEGIN);
766 C(thread_end, THREAD_END);
767 C(fiber_switch, FIBER_SWITCH);
768 C(script_compiled, SCRIPT_COMPILED);
772 C(a_return, A_RETURN);
789 if (trace_arg == 0) {
798 return get_trace_arg();
804 return trace_arg->
event;
817 get_path_and_lineno(trace_arg->
ec, trace_arg->
cfp, trace_arg->
event, &trace_arg->
path, &trace_arg->
lineno);
824 fill_path_and_lineno(trace_arg);
830 fill_path_and_lineno(trace_arg);
831 return trace_arg->
path;
838 if (!trace_arg->
klass) {
842 if (trace_arg->
klass) {
858 switch(trace_arg->
event) {
875 fill_id_and_klass(trace_arg);
876 if (trace_arg->
klass && trace_arg->
id) {
898 fill_id_and_klass(trace_arg);
905 fill_id_and_klass(trace_arg);
912 fill_id_and_klass(trace_arg);
913 return trace_arg->
klass;
933 return trace_arg->
self;
946 rb_bug(
"rb_tracearg_return_value: unreachable");
948 return trace_arg->
data;
961 rb_bug(
"rb_tracearg_raised_exception: unreachable");
963 return trace_arg->
data;
978 rb_bug(
"rb_tracearg_raised_exception: unreachable");
980 if (rb_obj_is_iseq(
data)) {
1002 rb_bug(
"rb_tracearg_raised_exception: unreachable");
1005 if (rb_obj_is_iseq(
data)) {
1027 rb_bug(
"rb_tracearg_object: unreachable");
1029 return trace_arg->
data;
1147iseq_of(
VALUE target)
1161rb_tracepoint_enable_for_target(
VALUE tpval,
VALUE target,
VALUE target_line)
1164 const rb_iseq_t *iseq = iseq_of(target);
1166 unsigned int line = 0;
1172 if (!
NIL_P(target_line)) {
1227 def->body.bmethod.hooks =
NULL;
1264 hook_list_connect(target, list, hook,
FALSE);
1274 if (hook->
data == tpval) {
1291 int previous_tracing = tp->
tracing;
1294 if (
RTEST(target_thread)) {
1298 tp->
target_th = rb_thread_ptr(target_thread);
1304 if (
NIL_P(target)) {
1305 if (!
NIL_P(target_line)) {
1311 rb_tracepoint_enable_for_target(tpval, target, target_line);
1328 int previous_tracing = tp->
tracing;
1367 tp->
ractor = rb_ractor_shareable_p(proc) ?
NULL : GET_RACTOR();
1411 if (
RTEST(target_thval)) {
1412 target_th = rb_thread_ptr(target_thval);
1417 return tracepoint_new(rb_cTracePoint, target_th, events, func,
data,
Qundef);
1428 for (i=0; i<
argc; i++) {
1429 events |= symbol2event_flag(
RARRAY_AREF(args, i));
1458 switch (trace_arg->
event) {
1500 int active = 0, deleted = 0;
1521 tracepoint_stat_event_hooks(
stat, vm->
self, rb_ec_ractor_hooks(
ec)->hooks);
1527#include "trace_point.rbinc"
1547#define MAX_POSTPONED_JOB 1000
1548#define MAX_POSTPONED_JOB_SPECIAL_ADDITION 24
1618 default:
rb_bug(
"unreachable\n");
1632 rb_atomic_t i, index;
1636 for (i=0; i<index; i++) {
1638 if (pjob->
func == func) {
1647 default:
rb_bug(
"unreachable\n");
1661 if (!wq_job)
return FALSE;
1680 volatile rb_atomic_t saved_mask = ec->
interrupt_mask & block_mask;
1682 struct list_head tmp;
1684 list_head_init(&tmp);
1719 if (!list_empty(&tmp)) {
1721 list_prepend_list(&vm->
workqueue, &tmp);
#define rb_define_method(klass, mid, func, arity)
Defines klass#mid.
#define rb_define_global_function(mid, func, arity)
Defines rb_mKernel #mid.
@ RUBY_EVENT_HOOK_FLAG_DELETED
@ RUBY_EVENT_HOOK_FLAG_SAFE
@ RUBY_EVENT_HOOK_FLAG_RAW_ARG
void(* rb_postponed_job_func_t)(void *arg)
#define MJIT_FUNC_EXPORTED
int rb_ec_set_raised(rb_execution_context_t *ec)
#define EC_JUMP_TAG(ec, st)
int rb_ec_reset_raised(rb_execution_context_t *ec)
#define RUBY_EVENT_C_CALL
#define RUBY_EVENT_TRACEPOINT_ALL
#define RUBY_EVENT_B_RETURN
#define RUBY_EVENT_SCRIPT_COMPILED
#define RUBY_INTERNAL_EVENT_MASK
#define RUBY_EVENT_THREAD_BEGIN
void(* rb_event_hook_func_t)(rb_event_flag_t evflag, VALUE data, VALUE self, ID mid, VALUE klass)
#define RUBY_EVENT_RETURN
#define RUBY_EVENT_C_RETURN
#define RUBY_EVENT_B_CALL
#define RUBY_INTERNAL_EVENT_FREEOBJ
#define RUBY_INTERNAL_EVENT_NEWOBJ
#define RUBY_EVENT_THREAD_END
void rb_objspace_set_event_hook(const rb_event_flag_t event)
void rb_gc_mark(VALUE ptr)
VALUE rb_define_class(const char *name, VALUE super)
Defines a top-level class.
int rb_block_given_p(void)
Determines if the current method is given a block.
void rb_raise(VALUE exc, const char *fmt,...)
void rb_bug(const char *fmt,...)
VALUE rb_ident_hash_new(void)
VALUE rb_iseqw_new(const rb_iseq_t *)
VALUE rb_ensure(VALUE(*b_proc)(VALUE), VALUE data1, VALUE(*e_proc)(VALUE), VALUE data2)
An equivalent to ensure clause.
VALUE rb_cObject
Object class.
VALUE rb_obj_hide(VALUE obj)
Make the object invisible from Ruby code.
void rb_hash_foreach(VALUE hash, rb_foreach_func *func, VALUE farg)
VALUE rb_hash_aset(VALUE hash, VALUE key, VALUE val)
VALUE rb_funcall(VALUE, ID, int,...)
Calls a method.
Defines RBIMPL_HAS_BUILTIN.
VALUE rb_obj_is_method(VALUE)
VALUE rb_obj_is_proc(VALUE)
VALUE rb_proc_call_with_block(VALUE, int argc, const VALUE *argv, VALUE)
VALUE rb_block_proc(void)
VALUE rb_binding_new(void)
VALUE rb_ivar_get(VALUE, ID)
void rb_undef_alloc_func(VALUE)
ID rb_intern(const char *)
Internal header for Hash.
VALUE rb_to_symbol_type(VALUE obj)
VALUE rb_iseq_path(const rb_iseq_t *iseq)
VALUE rb_iseq_parameters(const rb_iseq_t *iseq, int is_proc)
const rb_iseq_t * rb_iseqw_to_iseq(VALUE iseqw)
void rb_iseq_trace_set_all(rb_event_flag_t turnon_events)
VALUE rb_iseq_first_lineno(const rb_iseq_t *iseq)
int rb_iseq_remove_local_tracepoint_recursively(const rb_iseq_t *iseq, VALUE tpval)
int rb_iseq_add_local_tracepoint_recursively(const rb_iseq_t *iseq, rb_event_flag_t turnon_events, VALUE tpval, unsigned int target_line)
#define ISEQ_TRACE_EVENTS
int rb_method_entry_arity(const rb_method_entry_t *me)
VALUE rb_unnamed_parameters(int arity)
const rb_method_entry_t * rb_method_entry_without_refinements(VALUE klass, ID id, VALUE *defined_class)
#define RARRAY_AREF(a, i)
rb_execution_context_t * rb_vm_main_ractor_ec(rb_vm_t *vm)
#define RB_OBJ_WRITTEN(a, oldv, b)
WB for new reference from ‘a’ to ‘b’.
VALUE rb_mRubyVMFrozenCore
#define RUBY_TYPED_DEFAULT_FREE
#define TypedData_Get_Struct(obj, type, data_type, sval)
@ RUBY_TYPED_FREE_IMMEDIATELY
#define TypedData_Make_Struct(klass, type, data_type, sval)
#define ATOMIC_CAS(var, oldval, newval)
VALUE rb_sprintf(const char *,...)
struct rb_event_hook_struct::@204 filter
rb_event_hook_func_t func
rb_event_hook_flag_t hook_flags
struct rb_event_hook_struct * next
VALUE local_storage_recursive_hash_for_trace
rb_atomic_t interrupt_mask
VALUE local_storage_recursive_hash
struct rb_trace_arg_struct * trace_arg
struct rb_event_hook_struct * hooks
rb_postponed_job_func_t func
void(* func)(VALUE tpval, void *data)
rb_execution_context_t * ec
const rb_control_frame_t * cfp
struct rb_postponed_job_struct * postponed_job_buffer
struct list_head workqueue
rb_atomic_t postponed_job_index
rb_nativethread_lock_t workqueue_lock
void rb_nativethread_lock_lock(rb_nativethread_lock_t *lock)
void rb_nativethread_lock_unlock(rb_nativethread_lock_t *lock)
VALUE rb_vm_make_binding(const rb_execution_context_t *ec, const rb_control_frame_t *src_cfp)
int rb_vm_control_frame_id_and_class(const rb_control_frame_t *cfp, ID *idp, ID *called_idp, VALUE *klassp)
rb_control_frame_t * rb_vm_get_ruby_level_next_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp)
rb_control_frame_t * rb_vm_get_binding_creatable_next_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp)
rb_event_flag_t ruby_vm_event_flags
unsigned int ruby_vm_event_local_num
int rb_ec_frame_method_id_and_class(const rb_execution_context_t *ec, ID *idp, ID *called_idp, VALUE *klassp)
rb_event_flag_t ruby_vm_event_enabled_global_flags
int rb_vm_get_sourceline(const rb_control_frame_t *cfp)
void rb_vm_pop_frame(rb_execution_context_t *ec)
#define RUBY_VM_SET_POSTPONED_JOB_INTERRUPT(ec)
@ POSTPONED_JOB_INTERRUPT_MASK
VALUE rb_tracearg_binding(rb_trace_arg_t *trace_arg)
VALUE rb_tracearg_parameters(rb_trace_arg_t *trace_arg)
void rb_add_event_hook(rb_event_hook_func_t func, rb_event_flag_t events, VALUE data)
struct rb_postponed_job_struct rb_postponed_job_t
VALUE rb_tracearg_instruction_sequence(rb_trace_arg_t *trace_arg)
void(* rb_event_hook_raw_arg_func_t)(VALUE data, const rb_trace_arg_t *arg)
VALUE rb_tracepoint_enabled_p(VALUE tpval)
const rb_method_definition_t * rb_method_def(VALUE method)
VALUE rb_tracearg_object(rb_trace_arg_t *trace_arg)
VALUE rb_tracearg_callee_id(rb_trace_arg_t *trace_arg)
VALUE rb_tracearg_defined_class(rb_trace_arg_t *trace_arg)
#define MATCH_ANY_FILTER_TH
struct rb_trace_arg_struct * rb_tracearg_from_tracepoint(VALUE tpval)
VALUE rb_suppress_tracing(VALUE(*func)(VALUE), VALUE arg)
VALUE rb_tracearg_raised_exception(rb_trace_arg_t *trace_arg)
postponed_job_register_result
void rb_thread_add_event_hook(VALUE thval, rb_event_hook_func_t func, rb_event_flag_t events, VALUE data)
#define MAX_POSTPONED_JOB_SPECIAL_ADDITION
VALUE rb_tracepoint_disable(VALUE tpval)
int rb_remove_event_hook(rb_event_hook_func_t func)
VALUE rb_tracearg_self(rb_trace_arg_t *trace_arg)
VALUE rb_tracepoint_new(VALUE target_thval, rb_event_flag_t events, void(*func)(VALUE, void *), void *data)
void rb_hook_list_free(rb_hook_list_t *hooks)
int rb_thread_remove_event_hook(VALUE thval, rb_event_hook_func_t func)
int rb_postponed_job_register_one(unsigned int flags, rb_postponed_job_func_t func, void *data)
VALUE rb_tracearg_return_value(rb_trace_arg_t *trace_arg)
void Init_vm_postponed_job(void)
struct rb_tp_struct rb_tp_t
rb_event_flag_t rb_tracearg_event_flag(rb_trace_arg_t *trace_arg)
void rb_ec_clear_all_trace_func(const rb_execution_context_t *ec)
void rb_exec_event_hooks(rb_trace_arg_t *trace_arg, rb_hook_list_t *hooks, int pop_p)
struct rb_event_hook_struct rb_event_hook_t
VALUE rb_tracearg_path(rb_trace_arg_t *trace_arg)
VALUE rb_tracearg_eval_script(rb_trace_arg_t *trace_arg)
int rb_thread_remove_event_hook_with_data(VALUE thval, rb_event_hook_func_t func, VALUE data)
VALUE rb_tracepoint_enable(VALUE tpval)
#define MAX_POSTPONED_JOB
void rb_hook_list_mark(rb_hook_list_t *hooks)
int rb_postponed_job_register(unsigned int flags, rb_postponed_job_func_t func, void *data)
VALUE rb_tracearg_method_id(rb_trace_arg_t *trace_arg)
int rb_remove_event_hook_with_data(rb_event_hook_func_t func, VALUE data)
void rb_ec_clear_current_thread_trace_func(const rb_execution_context_t *ec)
void rb_thread_add_event_hook2(VALUE thval, rb_event_hook_func_t func, rb_event_flag_t events, VALUE data, rb_event_hook_flag_t hook_flags)
void rb_add_event_hook2(rb_event_hook_func_t func, rb_event_flag_t events, VALUE data, rb_event_hook_flag_t hook_flags)
void rb_hook_list_remove_tracepoint(rb_hook_list_t *list, VALUE tpval)
VALUE rb_tracearg_lineno(rb_trace_arg_t *trace_arg)
int rb_workqueue_register(unsigned flags, rb_postponed_job_func_t func, void *data)
void rb_hook_list_connect_tracepoint(VALUE target, rb_hook_list_t *list, VALUE tpval, unsigned int target_line)
void rb_postponed_job_flush(rb_vm_t *vm)
VALUE rb_tracearg_event(rb_trace_arg_t *trace_arg)
int def(FILE *source, FILE *dest, int level)
#define ZALLOC(strm, items, size)