Ruby 3.2.5p208 (2024-07-26 revision 31d0f1a2e7dbfb60731d1f05b868e1d578cda493)
vm_insnhelper.c
1/**********************************************************************
2
3 vm_insnhelper.c - instruction helper functions.
4
5 $Author$
6
7 Copyright (C) 2007 Koichi Sasada
8
9**********************************************************************/
10
11#include "ruby/internal/config.h"
12
13#include <math.h>
14
15#ifdef HAVE_STDATOMIC_H
16 #include <stdatomic.h>
17#endif
18
19#include "constant.h"
20#include "debug_counter.h"
21#include "internal.h"
22#include "internal/class.h"
23#include "internal/compar.h"
24#include "internal/hash.h"
25#include "internal/numeric.h"
26#include "internal/proc.h"
27#include "internal/random.h"
28#include "internal/variable.h"
29#include "internal/struct.h"
30#include "variable.h"
31
32/* finish iseq array */
33#include "insns.inc"
34#ifndef MJIT_HEADER
35#include "insns_info.inc"
36#endif
37
38extern rb_method_definition_t *rb_method_definition_create(rb_method_type_t type, ID mid);
39extern void rb_method_definition_set(const rb_method_entry_t *me, rb_method_definition_t *def, void *opts);
40extern int rb_method_definition_eq(const rb_method_definition_t *d1, const rb_method_definition_t *d2);
41extern VALUE rb_make_no_method_exception(VALUE exc, VALUE format, VALUE obj,
42 int argc, const VALUE *argv, int priv);
43
44#ifndef MJIT_HEADER
45static const struct rb_callcache vm_empty_cc;
46static const struct rb_callcache vm_empty_cc_for_super;
47#endif
48
49/* control stack frame */
50
51static rb_control_frame_t *vm_get_ruby_level_caller_cfp(const rb_execution_context_t *ec, const rb_control_frame_t *cfp);
52
53MJIT_STATIC VALUE
54ruby_vm_special_exception_copy(VALUE exc)
55{
57 rb_obj_copy_ivar(e, exc);
58 return e;
59}
60
61NORETURN(static void ec_stack_overflow(rb_execution_context_t *ec, int));
62static void
63ec_stack_overflow(rb_execution_context_t *ec, int setup)
64{
65 VALUE mesg = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_sysstack];
66 ec->raised_flag = RAISED_STACKOVERFLOW;
67 if (setup) {
68 VALUE at = rb_ec_backtrace_object(ec);
69 mesg = ruby_vm_special_exception_copy(mesg);
70 rb_ivar_set(mesg, idBt, at);
71 rb_ivar_set(mesg, idBt_locations, at);
72 }
73 ec->errinfo = mesg;
74 EC_JUMP_TAG(ec, TAG_RAISE);
75}
76
77NORETURN(static void vm_stackoverflow(void));
78#ifdef MJIT_HEADER
79NOINLINE(static COLDFUNC void vm_stackoverflow(void));
80#endif
81
82static void
83vm_stackoverflow(void)
84{
85 ec_stack_overflow(GET_EC(), TRUE);
86}
87
88NORETURN(MJIT_STATIC void rb_ec_stack_overflow(rb_execution_context_t *ec, int crit));
89MJIT_STATIC void
90rb_ec_stack_overflow(rb_execution_context_t *ec, int crit)
91{
92 if (rb_during_gc()) {
93 rb_bug("system stack overflow during GC. Faulty native extension?");
94 }
95 if (crit) {
96 ec->raised_flag = RAISED_STACKOVERFLOW;
97 ec->errinfo = rb_ec_vm_ptr(ec)->special_exceptions[ruby_error_stackfatal];
98 EC_JUMP_TAG(ec, TAG_RAISE);
99 }
100#ifdef USE_SIGALTSTACK
101 ec_stack_overflow(ec, TRUE);
102#else
103 ec_stack_overflow(ec, FALSE);
104#endif
105}
106
107
108#if VM_CHECK_MODE > 0
109static int
110callable_class_p(VALUE klass)
111{
112#if VM_CHECK_MODE >= 2
113 if (!klass) return FALSE;
114 switch (RB_BUILTIN_TYPE(klass)) {
115 default:
116 break;
117 case T_ICLASS:
118 if (!RB_TYPE_P(RCLASS_SUPER(klass), T_MODULE)) break;
119 case T_MODULE:
120 return TRUE;
121 }
122 while (klass) {
123 if (klass == rb_cBasicObject) {
124 return TRUE;
125 }
126 klass = RCLASS_SUPER(klass);
127 }
128 return FALSE;
129#else
130 return klass != 0;
131#endif
132}
133
134static int
135callable_method_entry_p(const rb_callable_method_entry_t *cme)
136{
137 if (cme == NULL) {
138 return TRUE;
139 }
140 else {
141 VM_ASSERT(IMEMO_TYPE_P((VALUE)cme, imemo_ment));
142
143 if (callable_class_p(cme->defined_class)) {
144 return TRUE;
145 }
146 else {
147 return FALSE;
148 }
149 }
150}
151
152static void
153vm_check_frame_detail(VALUE type, int req_block, int req_me, int req_cref, VALUE specval, VALUE cref_or_me, int is_cframe, const rb_iseq_t *iseq)
154{
155 unsigned int magic = (unsigned int)(type & VM_FRAME_MAGIC_MASK);
156 enum imemo_type cref_or_me_type = imemo_env; /* impossible value */
157
158 if (RB_TYPE_P(cref_or_me, T_IMEMO)) {
159 cref_or_me_type = imemo_type(cref_or_me);
160 }
161 if (type & VM_FRAME_FLAG_BMETHOD) {
162 req_me = TRUE;
163 }
164
165 if (req_block && (type & VM_ENV_FLAG_LOCAL) == 0) {
166 rb_bug("vm_push_frame: specval (%p) should be a block_ptr on %x frame", (void *)specval, magic);
167 }
168 if (!req_block && (type & VM_ENV_FLAG_LOCAL) != 0) {
169 rb_bug("vm_push_frame: specval (%p) should not be a block_ptr on %x frame", (void *)specval, magic);
170 }
171
172 if (req_me) {
173 if (cref_or_me_type != imemo_ment) {
174 rb_bug("vm_push_frame: (%s) should be method entry on %x frame", rb_obj_info(cref_or_me), magic);
175 }
176 }
177 else {
178 if (req_cref && cref_or_me_type != imemo_cref) {
179 rb_bug("vm_push_frame: (%s) should be CREF on %x frame", rb_obj_info(cref_or_me), magic);
180 }
181 else { /* cref or Qfalse */
182 if (cref_or_me != Qfalse && cref_or_me_type != imemo_cref) {
183 if (((type & VM_FRAME_FLAG_LAMBDA) || magic == VM_FRAME_MAGIC_IFUNC) && (cref_or_me_type == imemo_ment)) {
184 /* ignore */
185 }
186 else {
187 rb_bug("vm_push_frame: (%s) should be false or cref on %x frame", rb_obj_info(cref_or_me), magic);
188 }
189 }
190 }
191 }
192
193 if (cref_or_me_type == imemo_ment) {
194 const rb_callable_method_entry_t *me = (const rb_callable_method_entry_t *)cref_or_me;
195
196 if (!callable_method_entry_p(me)) {
197 rb_bug("vm_push_frame: ment (%s) should be callable on %x frame.", rb_obj_info(cref_or_me), magic);
198 }
199 }
200
201 if ((type & VM_FRAME_MAGIC_MASK) == VM_FRAME_MAGIC_DUMMY) {
202 VM_ASSERT(iseq == NULL ||
203 RBASIC_CLASS((VALUE)iseq) == 0 || // dummy frame for loading
204 RUBY_VM_NORMAL_ISEQ_P(iseq) //argument error
205 );
206 }
207 else {
208 VM_ASSERT(is_cframe == !RUBY_VM_NORMAL_ISEQ_P(iseq));
209 }
210}
211
212static void
213vm_check_frame(VALUE type,
214 VALUE specval,
215 VALUE cref_or_me,
216 const rb_iseq_t *iseq)
217{
218 VALUE given_magic = type & VM_FRAME_MAGIC_MASK;
219 VM_ASSERT(FIXNUM_P(type));
220
221#define CHECK(magic, req_block, req_me, req_cref, is_cframe) \
222 case magic: \
223 vm_check_frame_detail(type, req_block, req_me, req_cref, \
224 specval, cref_or_me, is_cframe, iseq); \
225 break
226 switch (given_magic) {
227 /* BLK ME CREF CFRAME */
228 CHECK(VM_FRAME_MAGIC_METHOD, TRUE, TRUE, FALSE, FALSE);
229 CHECK(VM_FRAME_MAGIC_CLASS, TRUE, FALSE, TRUE, FALSE);
230 CHECK(VM_FRAME_MAGIC_TOP, TRUE, FALSE, TRUE, FALSE);
231 CHECK(VM_FRAME_MAGIC_CFUNC, TRUE, TRUE, FALSE, TRUE);
232 CHECK(VM_FRAME_MAGIC_BLOCK, FALSE, FALSE, FALSE, FALSE);
233 CHECK(VM_FRAME_MAGIC_IFUNC, FALSE, FALSE, FALSE, TRUE);
234 CHECK(VM_FRAME_MAGIC_EVAL, FALSE, FALSE, FALSE, FALSE);
235 CHECK(VM_FRAME_MAGIC_RESCUE, FALSE, FALSE, FALSE, FALSE);
236 CHECK(VM_FRAME_MAGIC_DUMMY, TRUE, FALSE, FALSE, FALSE);
237 default:
238 rb_bug("vm_push_frame: unknown type (%x)", (unsigned int)given_magic);
239 }
240#undef CHECK
241}
242
243static VALUE vm_stack_canary; /* Initialized later */
244static bool vm_stack_canary_was_born = false;
245
246#ifndef MJIT_HEADER
247MJIT_FUNC_EXPORTED void
248rb_vm_check_canary(const rb_execution_context_t *ec, VALUE *sp)
249{
250 const struct rb_control_frame_struct *reg_cfp = ec->cfp;
251 const struct rb_iseq_struct *iseq;
252
253 if (! LIKELY(vm_stack_canary_was_born)) {
254 return; /* :FIXME: isn't it rather fatal to enter this branch? */
255 }
256 else if ((VALUE *)reg_cfp == ec->vm_stack + ec->vm_stack_size) {
257 /* This is at the very beginning of a thread. cfp does not exist. */
258 return;
259 }
260 else if (! (iseq = GET_ISEQ())) {
261 return;
262 }
263 else if (LIKELY(sp[0] != vm_stack_canary)) {
264 return;
265 }
266 else {
267 /* we are going to call methods below; squash the canary to
268 * prevent infinite loop. */
269 sp[0] = Qundef;
270 }
271
272 const VALUE *orig = rb_iseq_original_iseq(iseq);
273 const VALUE *encoded = ISEQ_BODY(iseq)->iseq_encoded;
274 const ptrdiff_t pos = GET_PC() - encoded;
275 const enum ruby_vminsn_type insn = (enum ruby_vminsn_type)orig[pos];
276 const char *name = insn_name(insn);
277 const VALUE iseqw = rb_iseqw_new(iseq);
278 const VALUE inspection = rb_inspect(iseqw);
279 const char *stri = rb_str_to_cstr(inspection);
280 const VALUE disasm = rb_iseq_disasm(iseq);
281 const char *strd = rb_str_to_cstr(disasm);
282
283 /* rb_bug() is not capable of outputting this large contents. It
284 is designed to run form a SIGSEGV handler, which tends to be
285 very restricted. */
286 ruby_debug_printf(
287 "We are killing the stack canary set by %s, "
288 "at %s@pc=%"PRIdPTR"\n"
289 "watch out the C stack trace.\n"
290 "%s",
291 name, stri, pos, strd);
292 rb_bug("see above.");
293}
294#endif
295#define vm_check_canary(ec, sp) rb_vm_check_canary(ec, sp)
296
297#else
298#define vm_check_canary(ec, sp)
299#define vm_check_frame(a, b, c, d)
300#endif /* VM_CHECK_MODE > 0 */
301
302#if USE_DEBUG_COUNTER
303static void
304vm_push_frame_debug_counter_inc(
305 const struct rb_execution_context_struct *ec,
306 const struct rb_control_frame_struct *reg_cfp,
307 VALUE type)
308{
309 const struct rb_control_frame_struct *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(reg_cfp);
310
311 RB_DEBUG_COUNTER_INC(frame_push);
312
313 if (RUBY_VM_END_CONTROL_FRAME(ec) != prev_cfp) {
314 const bool curr = VM_FRAME_RUBYFRAME_P(reg_cfp);
315 const bool prev = VM_FRAME_RUBYFRAME_P(prev_cfp);
316 if (prev) {
317 if (curr) {
318 RB_DEBUG_COUNTER_INC(frame_R2R);
319 }
320 else {
321 RB_DEBUG_COUNTER_INC(frame_R2C);
322 }
323 }
324 else {
325 if (curr) {
326 RB_DEBUG_COUNTER_INC(frame_C2R);
327 }
328 else {
329 RB_DEBUG_COUNTER_INC(frame_C2C);
330 }
331 }
332 }
333
334 switch (type & VM_FRAME_MAGIC_MASK) {
335 case VM_FRAME_MAGIC_METHOD: RB_DEBUG_COUNTER_INC(frame_push_method); return;
336 case VM_FRAME_MAGIC_BLOCK: RB_DEBUG_COUNTER_INC(frame_push_block); return;
337 case VM_FRAME_MAGIC_CLASS: RB_DEBUG_COUNTER_INC(frame_push_class); return;
338 case VM_FRAME_MAGIC_TOP: RB_DEBUG_COUNTER_INC(frame_push_top); return;
339 case VM_FRAME_MAGIC_CFUNC: RB_DEBUG_COUNTER_INC(frame_push_cfunc); return;
340 case VM_FRAME_MAGIC_IFUNC: RB_DEBUG_COUNTER_INC(frame_push_ifunc); return;
341 case VM_FRAME_MAGIC_EVAL: RB_DEBUG_COUNTER_INC(frame_push_eval); return;
342 case VM_FRAME_MAGIC_RESCUE: RB_DEBUG_COUNTER_INC(frame_push_rescue); return;
343 case VM_FRAME_MAGIC_DUMMY: RB_DEBUG_COUNTER_INC(frame_push_dummy); return;
344 }
345
346 rb_bug("unreachable");
347}
348#else
349#define vm_push_frame_debug_counter_inc(ec, cfp, t) /* void */
350#endif
351
352STATIC_ASSERT(VM_ENV_DATA_INDEX_ME_CREF, VM_ENV_DATA_INDEX_ME_CREF == -2);
353STATIC_ASSERT(VM_ENV_DATA_INDEX_SPECVAL, VM_ENV_DATA_INDEX_SPECVAL == -1);
354STATIC_ASSERT(VM_ENV_DATA_INDEX_FLAGS, VM_ENV_DATA_INDEX_FLAGS == -0);
355
356static void
357vm_push_frame(rb_execution_context_t *ec,
358 const rb_iseq_t *iseq,
359 VALUE type,
360 VALUE self,
361 VALUE specval,
362 VALUE cref_or_me,
363 const VALUE *pc,
364 VALUE *sp,
365 int local_size,
366 int stack_max)
367{
368 rb_control_frame_t *const cfp = RUBY_VM_NEXT_CONTROL_FRAME(ec->cfp);
369
370 vm_check_frame(type, specval, cref_or_me, iseq);
371 VM_ASSERT(local_size >= 0);
372
373 /* check stack overflow */
374 CHECK_VM_STACK_OVERFLOW0(cfp, sp, local_size + stack_max);
375 vm_check_canary(ec, sp);
376
377 /* setup vm value stack */
378
379 /* initialize local variables */
380 for (int i=0; i < local_size; i++) {
381 *sp++ = Qnil;
382 }
383
384 /* setup ep with managing data */
385 *sp++ = cref_or_me; /* ep[-2] / Qnil or T_IMEMO(cref) or T_IMEMO(ment) */
386 *sp++ = specval /* ep[-1] / block handler or prev env ptr */;
387 *sp++ = type; /* ep[-0] / ENV_FLAGS */
388
389 /* setup new frame */
390 *cfp = (const struct rb_control_frame_struct) {
391 .pc = pc,
392 .sp = sp,
393 .iseq = iseq,
394 .self = self,
395 .ep = sp - 1,
396 .block_code = NULL,
397 .__bp__ = sp, /* Store initial value of ep as bp to skip calculation cost of bp on JIT cancellation. */
398#if VM_DEBUG_BP_CHECK
399 .bp_check = sp,
400#endif
401 .jit_return = NULL
402 };
403
404 /* Ensure the initialization of `*cfp` above never gets reordered with the update of `ec->cfp` below.
405 This is a no-op in all cases we've looked at (https://godbolt.org/z/3oxd1446K), but should guarantee it for all
406 future/untested compilers/platforms. */
407
408 #if defined HAVE_DECL_ATOMIC_SIGNAL_FENCE && HAVE_DECL_ATOMIC_SIGNAL_FENCE
409 atomic_signal_fence(memory_order_seq_cst);
410 #endif
411
412 ec->cfp = cfp;
413
414 if (VMDEBUG == 2) {
415 SDR();
416 }
417 vm_push_frame_debug_counter_inc(ec, cfp, type);
418}
419
420void
421rb_vm_pop_frame_no_int(rb_execution_context_t *ec)
422{
423 rb_control_frame_t *cfp = ec->cfp;
424
425 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
426 if (VMDEBUG == 2) SDR();
427
428 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
429}
430
431/* return TRUE if the frame is finished */
432static inline int
433vm_pop_frame(rb_execution_context_t *ec, rb_control_frame_t *cfp, const VALUE *ep)
434{
435 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
436
437 if (VM_CHECK_MODE >= 4) rb_gc_verify_internal_consistency();
438 if (VMDEBUG == 2) SDR();
439
440 RUBY_VM_CHECK_INTS(ec);
441 ec->cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
442
443 return flags & VM_FRAME_FLAG_FINISH;
444}
445
446MJIT_STATIC void
447rb_vm_pop_frame(rb_execution_context_t *ec)
448{
449 vm_pop_frame(ec, ec->cfp, ec->cfp->ep);
450}
451
452// it pushes pseudo-frame with fname filename.
453VALUE
454rb_vm_push_frame_fname(rb_execution_context_t *ec, VALUE fname)
455{
456 VALUE tmpbuf = rb_imemo_tmpbuf_auto_free_pointer();
457 void *ptr = ruby_xcalloc(sizeof(struct rb_iseq_constant_body) + sizeof(struct rb_iseq_struct), 1);
458 rb_imemo_tmpbuf_set_ptr(tmpbuf, ptr);
459
460 struct rb_iseq_struct *dmy_iseq = (struct rb_iseq_struct *)ptr;
461 struct rb_iseq_constant_body *dmy_body = (struct rb_iseq_constant_body *)&dmy_iseq[1];
462 dmy_iseq->body = dmy_body;
463 dmy_body->type = ISEQ_TYPE_TOP;
464 dmy_body->location.pathobj = fname;
465
466 vm_push_frame(ec,
467 dmy_iseq, //const rb_iseq_t *iseq,
468 VM_FRAME_MAGIC_DUMMY | VM_ENV_FLAG_LOCAL | VM_FRAME_FLAG_FINISH, // VALUE type,
469 ec->cfp->self, // VALUE self,
470 VM_BLOCK_HANDLER_NONE, // VALUE specval,
471 Qfalse, // VALUE cref_or_me,
472 NULL, // const VALUE *pc,
473 ec->cfp->sp, // VALUE *sp,
474 0, // int local_size,
475 0); // int stack_max
476
477 return tmpbuf;
478}
479
480/* method dispatch */
481static inline VALUE
482rb_arity_error_new(int argc, int min, int max)
483{
484 VALUE err_mess = rb_sprintf("wrong number of arguments (given %d, expected %d", argc, min);
485 if (min == max) {
486 /* max is not needed */
487 }
488 else if (max == UNLIMITED_ARGUMENTS) {
489 rb_str_cat_cstr(err_mess, "+");
490 }
491 else {
492 rb_str_catf(err_mess, "..%d", max);
493 }
494 rb_str_cat_cstr(err_mess, ")");
495 return rb_exc_new3(rb_eArgError, err_mess);
496}
497
498MJIT_STATIC void
499rb_error_arity(int argc, int min, int max)
500{
501 rb_exc_raise(rb_arity_error_new(argc, min, max));
502}
503
504/* lvar */
505
506NOINLINE(static void vm_env_write_slowpath(const VALUE *ep, int index, VALUE v));
507
508static void
509vm_env_write_slowpath(const VALUE *ep, int index, VALUE v)
510{
511 /* remember env value forcely */
512 rb_gc_writebarrier_remember(VM_ENV_ENVVAL(ep));
513 VM_FORCE_WRITE(&ep[index], v);
514 VM_ENV_FLAGS_UNSET(ep, VM_ENV_FLAG_WB_REQUIRED);
515 RB_DEBUG_COUNTER_INC(lvar_set_slowpath);
516}
517
518static inline void
519vm_env_write(const VALUE *ep, int index, VALUE v)
520{
521 VALUE flags = ep[VM_ENV_DATA_INDEX_FLAGS];
522 if (LIKELY((flags & VM_ENV_FLAG_WB_REQUIRED) == 0)) {
523 VM_STACK_ENV_WRITE(ep, index, v);
524 }
525 else {
526 vm_env_write_slowpath(ep, index, v);
527 }
528}
529
530MJIT_STATIC VALUE
531rb_vm_bh_to_procval(const rb_execution_context_t *ec, VALUE block_handler)
532{
533 if (block_handler == VM_BLOCK_HANDLER_NONE) {
534 return Qnil;
535 }
536 else {
537 switch (vm_block_handler_type(block_handler)) {
538 case block_handler_type_iseq:
539 case block_handler_type_ifunc:
540 return rb_vm_make_proc(ec, VM_BH_TO_CAPT_BLOCK(block_handler), rb_cProc);
541 case block_handler_type_symbol:
542 return rb_sym_to_proc(VM_BH_TO_SYMBOL(block_handler));
543 case block_handler_type_proc:
544 return VM_BH_TO_PROC(block_handler);
545 default:
546 VM_UNREACHABLE(rb_vm_bh_to_procval);
547 }
548 }
549}
550
551/* svar */
552
553#if VM_CHECK_MODE > 0
554static int
555vm_svar_valid_p(VALUE svar)
556{
557 if (RB_TYPE_P((VALUE)svar, T_IMEMO)) {
558 switch (imemo_type(svar)) {
559 case imemo_svar:
560 case imemo_cref:
561 case imemo_ment:
562 return TRUE;
563 default:
564 break;
565 }
566 }
567 rb_bug("vm_svar_valid_p: unknown type: %s", rb_obj_info(svar));
568 return FALSE;
569}
570#endif
571
572static inline struct vm_svar *
573lep_svar(const rb_execution_context_t *ec, const VALUE *lep)
574{
575 VALUE svar;
576
577 if (lep && (ec == NULL || ec->root_lep != lep)) {
578 svar = lep[VM_ENV_DATA_INDEX_ME_CREF];
579 }
580 else {
581 svar = ec->root_svar;
582 }
583
584 VM_ASSERT(svar == Qfalse || vm_svar_valid_p(svar));
585
586 return (struct vm_svar *)svar;
587}
588
589static inline void
590lep_svar_write(const rb_execution_context_t *ec, const VALUE *lep, const struct vm_svar *svar)
591{
592 VM_ASSERT(vm_svar_valid_p((VALUE)svar));
593
594 if (lep && (ec == NULL || ec->root_lep != lep)) {
595 vm_env_write(lep, VM_ENV_DATA_INDEX_ME_CREF, (VALUE)svar);
596 }
597 else {
598 RB_OBJ_WRITE(rb_ec_thread_ptr(ec)->self, &ec->root_svar, svar);
599 }
600}
601
602static VALUE
603lep_svar_get(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key)
604{
605 const struct vm_svar *svar = lep_svar(ec, lep);
606
607 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) return Qnil;
608
609 switch (key) {
610 case VM_SVAR_LASTLINE:
611 return svar->lastline;
612 case VM_SVAR_BACKREF:
613 return svar->backref;
614 default: {
615 const VALUE ary = svar->others;
616
617 if (NIL_P(ary)) {
618 return Qnil;
619 }
620 else {
621 return rb_ary_entry(ary, key - VM_SVAR_EXTRA_START);
622 }
623 }
624 }
625}
626
627static struct vm_svar *
628svar_new(VALUE obj)
629{
630 return (struct vm_svar *)rb_imemo_new(imemo_svar, Qnil, Qnil, Qnil, obj);
631}
632
633static void
634lep_svar_set(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, VALUE val)
635{
636 struct vm_svar *svar = lep_svar(ec, lep);
637
638 if ((VALUE)svar == Qfalse || imemo_type((VALUE)svar) != imemo_svar) {
639 lep_svar_write(ec, lep, svar = svar_new((VALUE)svar));
640 }
641
642 switch (key) {
643 case VM_SVAR_LASTLINE:
644 RB_OBJ_WRITE(svar, &svar->lastline, val);
645 return;
646 case VM_SVAR_BACKREF:
647 RB_OBJ_WRITE(svar, &svar->backref, val);
648 return;
649 default: {
650 VALUE ary = svar->others;
651
652 if (NIL_P(ary)) {
653 RB_OBJ_WRITE(svar, &svar->others, ary = rb_ary_new());
654 }
655 rb_ary_store(ary, key - VM_SVAR_EXTRA_START, val);
656 }
657 }
658}
659
660static inline VALUE
661vm_getspecial(const rb_execution_context_t *ec, const VALUE *lep, rb_num_t key, rb_num_t type)
662{
663 VALUE val;
664
665 if (type == 0) {
666 val = lep_svar_get(ec, lep, key);
667 }
668 else {
669 VALUE backref = lep_svar_get(ec, lep, VM_SVAR_BACKREF);
670
671 if (type & 0x01) {
672 switch (type >> 1) {
673 case '&':
674 val = rb_reg_last_match(backref);
675 break;
676 case '`':
677 val = rb_reg_match_pre(backref);
678 break;
679 case '\'':
680 val = rb_reg_match_post(backref);
681 break;
682 case '+':
683 val = rb_reg_match_last(backref);
684 break;
685 default:
686 rb_bug("unexpected back-ref");
687 }
688 }
689 else {
690 val = rb_reg_nth_match((int)(type >> 1), backref);
691 }
692 }
693 return val;
694}
695
696PUREFUNC(static rb_callable_method_entry_t *check_method_entry(VALUE obj, int can_be_svar));
698check_method_entry(VALUE obj, int can_be_svar)
699{
700 if (obj == Qfalse) return NULL;
701
702#if VM_CHECK_MODE > 0
703 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_method_entry: unknown type: %s", rb_obj_info(obj));
704#endif
705
706 switch (imemo_type(obj)) {
707 case imemo_ment:
708 return (rb_callable_method_entry_t *)obj;
709 case imemo_cref:
710 return NULL;
711 case imemo_svar:
712 if (can_be_svar) {
713 return check_method_entry(((struct vm_svar *)obj)->cref_or_me, FALSE);
714 }
715 default:
716#if VM_CHECK_MODE > 0
717 rb_bug("check_method_entry: svar should not be there:");
718#endif
719 return NULL;
720 }
721}
722
723MJIT_STATIC const rb_callable_method_entry_t *
724rb_vm_frame_method_entry(const rb_control_frame_t *cfp)
725{
726 const VALUE *ep = cfp->ep;
728
729 while (!VM_ENV_LOCAL_P(ep)) {
730 if ((me = check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return me;
731 ep = VM_ENV_PREV_EP(ep);
732 }
733
734 return check_method_entry(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
735}
736
737static const rb_iseq_t *
738method_entry_iseqptr(const rb_callable_method_entry_t *me)
739{
740 switch (me->def->type) {
741 case VM_METHOD_TYPE_ISEQ:
742 return me->def->body.iseq.iseqptr;
743 default:
744 return NULL;
745 }
746}
747
748static rb_cref_t *
749method_entry_cref(const rb_callable_method_entry_t *me)
750{
751 switch (me->def->type) {
752 case VM_METHOD_TYPE_ISEQ:
753 return me->def->body.iseq.cref;
754 default:
755 return NULL;
756 }
757}
758
759#if VM_CHECK_MODE == 0
760PUREFUNC(static rb_cref_t *check_cref(VALUE, int));
761#endif
762static rb_cref_t *
763check_cref(VALUE obj, int can_be_svar)
764{
765 if (obj == Qfalse) return NULL;
766
767#if VM_CHECK_MODE > 0
768 if (!RB_TYPE_P(obj, T_IMEMO)) rb_bug("check_cref: unknown type: %s", rb_obj_info(obj));
769#endif
770
771 switch (imemo_type(obj)) {
772 case imemo_ment:
773 return method_entry_cref((rb_callable_method_entry_t *)obj);
774 case imemo_cref:
775 return (rb_cref_t *)obj;
776 case imemo_svar:
777 if (can_be_svar) {
778 return check_cref(((struct vm_svar *)obj)->cref_or_me, FALSE);
779 }
780 default:
781#if VM_CHECK_MODE > 0
782 rb_bug("check_method_entry: svar should not be there:");
783#endif
784 return NULL;
785 }
786}
787
788static inline rb_cref_t *
789vm_env_cref(const VALUE *ep)
790{
791 rb_cref_t *cref;
792
793 while (!VM_ENV_LOCAL_P(ep)) {
794 if ((cref = check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) != NULL) return cref;
795 ep = VM_ENV_PREV_EP(ep);
796 }
797
798 return check_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
799}
800
801static int
802is_cref(const VALUE v, int can_be_svar)
803{
804 if (RB_TYPE_P(v, T_IMEMO)) {
805 switch (imemo_type(v)) {
806 case imemo_cref:
807 return TRUE;
808 case imemo_svar:
809 if (can_be_svar) return is_cref(((struct vm_svar *)v)->cref_or_me, FALSE);
810 default:
811 break;
812 }
813 }
814 return FALSE;
815}
816
817static int
818vm_env_cref_by_cref(const VALUE *ep)
819{
820 while (!VM_ENV_LOCAL_P(ep)) {
821 if (is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE)) return TRUE;
822 ep = VM_ENV_PREV_EP(ep);
823 }
824 return is_cref(ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE);
825}
826
827static rb_cref_t *
828cref_replace_with_duplicated_cref_each_frame(const VALUE *vptr, int can_be_svar, VALUE parent)
829{
830 const VALUE v = *vptr;
831 rb_cref_t *cref, *new_cref;
832
833 if (RB_TYPE_P(v, T_IMEMO)) {
834 switch (imemo_type(v)) {
835 case imemo_cref:
836 cref = (rb_cref_t *)v;
837 new_cref = vm_cref_dup(cref);
838 if (parent) {
839 RB_OBJ_WRITE(parent, vptr, new_cref);
840 }
841 else {
842 VM_FORCE_WRITE(vptr, (VALUE)new_cref);
843 }
844 return (rb_cref_t *)new_cref;
845 case imemo_svar:
846 if (can_be_svar) {
847 return cref_replace_with_duplicated_cref_each_frame(&((struct vm_svar *)v)->cref_or_me, FALSE, v);
848 }
849 /* fall through */
850 case imemo_ment:
851 rb_bug("cref_replace_with_duplicated_cref_each_frame: unreachable");
852 default:
853 break;
854 }
855 }
856 return NULL;
857}
858
859static rb_cref_t *
860vm_cref_replace_with_duplicated_cref(const VALUE *ep)
861{
862 if (vm_env_cref_by_cref(ep)) {
863 rb_cref_t *cref;
864 VALUE envval;
865
866 while (!VM_ENV_LOCAL_P(ep)) {
867 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
868 if ((cref = cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], FALSE, envval)) != NULL) {
869 return cref;
870 }
871 ep = VM_ENV_PREV_EP(ep);
872 }
873 envval = VM_ENV_ESCAPED_P(ep) ? VM_ENV_ENVVAL(ep) : Qfalse;
874 return cref_replace_with_duplicated_cref_each_frame(&ep[VM_ENV_DATA_INDEX_ME_CREF], TRUE, envval);
875 }
876 else {
877 rb_bug("vm_cref_dup: unreachable");
878 }
879}
880
881static rb_cref_t *
882vm_get_cref(const VALUE *ep)
883{
884 rb_cref_t *cref = vm_env_cref(ep);
885
886 if (cref != NULL) {
887 return cref;
888 }
889 else {
890 rb_bug("vm_get_cref: unreachable");
891 }
892}
893
894rb_cref_t *
895rb_vm_get_cref(const VALUE *ep)
896{
897 return vm_get_cref(ep);
898}
899
900static rb_cref_t *
901vm_ec_cref(const rb_execution_context_t *ec)
902{
903 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
904
905 if (cfp == NULL) {
906 return NULL;
907 }
908 return vm_get_cref(cfp->ep);
909}
910
911static const rb_cref_t *
912vm_get_const_key_cref(const VALUE *ep)
913{
914 const rb_cref_t *cref = vm_get_cref(ep);
915 const rb_cref_t *key_cref = cref;
916
917 while (cref) {
918 if (FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
919 FL_TEST(CREF_CLASS(cref), RCLASS_CLONED)) {
920 return key_cref;
921 }
922 cref = CREF_NEXT(cref);
923 }
924
925 /* does not include singleton class */
926 return NULL;
927}
928
929void
930rb_vm_rewrite_cref(rb_cref_t *cref, VALUE old_klass, VALUE new_klass, rb_cref_t **new_cref_ptr)
931{
932 rb_cref_t *new_cref;
933
934 while (cref) {
935 if (CREF_CLASS(cref) == old_klass) {
936 new_cref = vm_cref_new_use_prev(new_klass, METHOD_VISI_UNDEF, FALSE, cref, FALSE);
937 *new_cref_ptr = new_cref;
938 return;
939 }
940 new_cref = vm_cref_new_use_prev(CREF_CLASS(cref), METHOD_VISI_UNDEF, FALSE, cref, FALSE);
941 cref = CREF_NEXT(cref);
942 *new_cref_ptr = new_cref;
943 new_cref_ptr = &new_cref->next;
944 }
945 *new_cref_ptr = NULL;
946}
947
948static rb_cref_t *
949vm_cref_push(const rb_execution_context_t *ec, VALUE klass, const VALUE *ep, int pushed_by_eval, int singleton)
950{
951 rb_cref_t *prev_cref = NULL;
952
953 if (ep) {
954 prev_cref = vm_env_cref(ep);
955 }
956 else {
957 rb_control_frame_t *cfp = vm_get_ruby_level_caller_cfp(ec, ec->cfp);
958
959 if (cfp) {
960 prev_cref = vm_env_cref(cfp->ep);
961 }
962 }
963
964 return vm_cref_new(klass, METHOD_VISI_PUBLIC, FALSE, prev_cref, pushed_by_eval, singleton);
965}
966
967static inline VALUE
968vm_get_cbase(const VALUE *ep)
969{
970 const rb_cref_t *cref = vm_get_cref(ep);
971
972 return CREF_CLASS_FOR_DEFINITION(cref);
973}
974
975static inline VALUE
976vm_get_const_base(const VALUE *ep)
977{
978 const rb_cref_t *cref = vm_get_cref(ep);
979
980 while (cref) {
981 if (!CREF_PUSHED_BY_EVAL(cref)) {
982 return CREF_CLASS_FOR_DEFINITION(cref);
983 }
984 cref = CREF_NEXT(cref);
985 }
986
987 return Qundef;
988}
989
990static inline void
991vm_check_if_namespace(VALUE klass)
992{
993 if (!RB_TYPE_P(klass, T_CLASS) && !RB_TYPE_P(klass, T_MODULE)) {
994 rb_raise(rb_eTypeError, "%+"PRIsVALUE" is not a class/module", klass);
995 }
996}
997
998static inline void
999vm_ensure_not_refinement_module(VALUE self)
1000{
1001 if (RB_TYPE_P(self, T_MODULE) && FL_TEST(self, RMODULE_IS_REFINEMENT)) {
1002 rb_warn("not defined at the refinement, but at the outer class/module");
1003 }
1004}
1005
1006static inline VALUE
1007vm_get_iclass(const rb_control_frame_t *cfp, VALUE klass)
1008{
1009 return klass;
1010}
1011
1012static inline VALUE
1013vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, bool allow_nil, int is_defined)
1014{
1015 void rb_const_warn_if_deprecated(const rb_const_entry_t *ce, VALUE klass, ID id);
1016 VALUE val;
1017
1018 if (NIL_P(orig_klass) && allow_nil) {
1019 /* in current lexical scope */
1020 const rb_cref_t *root_cref = vm_get_cref(ec->cfp->ep);
1021 const rb_cref_t *cref;
1022 VALUE klass = Qnil;
1023
1024 while (root_cref && CREF_PUSHED_BY_EVAL(root_cref)) {
1025 root_cref = CREF_NEXT(root_cref);
1026 }
1027 cref = root_cref;
1028 while (cref && CREF_NEXT(cref)) {
1029 if (CREF_PUSHED_BY_EVAL(cref)) {
1030 klass = Qnil;
1031 }
1032 else {
1033 klass = CREF_CLASS(cref);
1034 }
1035 cref = CREF_NEXT(cref);
1036
1037 if (!NIL_P(klass)) {
1038 VALUE av, am = 0;
1039 rb_const_entry_t *ce;
1040 search_continue:
1041 if ((ce = rb_const_lookup(klass, id))) {
1042 rb_const_warn_if_deprecated(ce, klass, id);
1043 val = ce->value;
1044 if (UNDEF_P(val)) {
1045 if (am == klass) break;
1046 am = klass;
1047 if (is_defined) return 1;
1048 if (rb_autoloading_value(klass, id, &av, NULL)) return av;
1049 rb_autoload_load(klass, id);
1050 goto search_continue;
1051 }
1052 else {
1053 if (is_defined) {
1054 return 1;
1055 }
1056 else {
1057 if (UNLIKELY(!rb_ractor_main_p())) {
1058 if (!rb_ractor_shareable_p(val)) {
1059 rb_raise(rb_eRactorIsolationError,
1060 "can not access non-shareable objects in constant %"PRIsVALUE"::%s by non-main ractor.", rb_class_path(klass), rb_id2name(id));
1061 }
1062 }
1063 return val;
1064 }
1065 }
1066 }
1067 }
1068 }
1069
1070 /* search self */
1071 if (root_cref && !NIL_P(CREF_CLASS(root_cref))) {
1072 klass = vm_get_iclass(ec->cfp, CREF_CLASS(root_cref));
1073 }
1074 else {
1075 klass = CLASS_OF(ec->cfp->self);
1076 }
1077
1078 if (is_defined) {
1079 return rb_const_defined(klass, id);
1080 }
1081 else {
1082 return rb_const_get(klass, id);
1083 }
1084 }
1085 else {
1086 vm_check_if_namespace(orig_klass);
1087 if (is_defined) {
1088 return rb_public_const_defined_from(orig_klass, id);
1089 }
1090 else {
1091 return rb_public_const_get_from(orig_klass, id);
1092 }
1093 }
1094}
1095
1096VALUE
1097rb_vm_get_ev_const(rb_execution_context_t *ec, VALUE orig_klass, ID id, VALUE allow_nil)
1098{
1099 return vm_get_ev_const(ec, orig_klass, id, allow_nil == Qtrue, 0);
1100}
1101
1102static inline VALUE
1103vm_get_ev_const_chain(rb_execution_context_t *ec, const ID *segments)
1104{
1105 VALUE val = Qnil;
1106 int idx = 0;
1107 int allow_nil = TRUE;
1108 if (segments[0] == idNULL) {
1109 val = rb_cObject;
1110 idx++;
1111 allow_nil = FALSE;
1112 }
1113 while (segments[idx]) {
1114 ID id = segments[idx++];
1115 val = vm_get_ev_const(ec, val, id, allow_nil, 0);
1116 allow_nil = FALSE;
1117 }
1118 return val;
1119}
1120
1121
1122static inline VALUE
1123vm_get_cvar_base(const rb_cref_t *cref, const rb_control_frame_t *cfp, int top_level_raise)
1124{
1125 VALUE klass;
1126
1127 if (!cref) {
1128 rb_bug("vm_get_cvar_base: no cref");
1129 }
1130
1131 while (CREF_NEXT(cref) &&
1132 (NIL_P(CREF_CLASS(cref)) || FL_TEST(CREF_CLASS(cref), FL_SINGLETON) ||
1133 CREF_PUSHED_BY_EVAL(cref) || CREF_SINGLETON(cref))) {
1134 cref = CREF_NEXT(cref);
1135 }
1136 if (top_level_raise && !CREF_NEXT(cref)) {
1137 rb_raise(rb_eRuntimeError, "class variable access from toplevel");
1138 }
1139
1140 klass = vm_get_iclass(cfp, CREF_CLASS(cref));
1141
1142 if (NIL_P(klass)) {
1143 rb_raise(rb_eTypeError, "no class variables available");
1144 }
1145 return klass;
1146}
1147
1148ALWAYS_INLINE(static void fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id));
1149static inline void
1150fill_ivar_cache(const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr, attr_index_t index, shape_id_t shape_id)
1151{
1152 if (is_attr) {
1153 vm_cc_attr_index_set(cc, index, shape_id);
1154 }
1155 else {
1156 vm_ic_attr_index_set(iseq, ic, index, shape_id);
1157 }
1158}
1159
1160#define ractor_incidental_shareable_p(cond, val) \
1161 (!(cond) || rb_ractor_shareable_p(val))
1162#define ractor_object_incidental_shareable_p(obj, val) \
1163 ractor_incidental_shareable_p(rb_ractor_shareable_p(obj), val)
1164
1165#define ATTR_INDEX_NOT_SET (attr_index_t)-1
1166
1167ALWAYS_INLINE(static VALUE vm_getivar(VALUE, ID, const rb_iseq_t *, IVC, const struct rb_callcache *, int));
1168static inline VALUE
1169vm_getivar(VALUE obj, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1170{
1171#if OPT_IC_FOR_IVAR
1172 VALUE val = Qundef;
1173 shape_id_t shape_id;
1174 VALUE * ivar_list;
1175
1176 if (SPECIAL_CONST_P(obj)) {
1177 return Qnil;
1178 }
1179
1180#if SHAPE_IN_BASIC_FLAGS
1181 shape_id = RBASIC_SHAPE_ID(obj);
1182#endif
1183
1184 switch (BUILTIN_TYPE(obj)) {
1185 case T_OBJECT:
1186 ivar_list = ROBJECT_IVPTR(obj);
1187 VM_ASSERT(rb_ractor_shareable_p(obj) ? rb_ractor_shareable_p(val) : true);
1188
1189#if !SHAPE_IN_BASIC_FLAGS
1190 shape_id = ROBJECT_SHAPE_ID(obj);
1191#endif
1192 break;
1193 case T_CLASS:
1194 case T_MODULE:
1195 {
1196 if (UNLIKELY(!rb_ractor_main_p())) {
1197 // For two reasons we can only use the fast path on the main
1198 // ractor.
1199 // First, only the main ractor is allowed to set ivars on classes
1200 // and modules. So we can skip locking.
1201 // Second, other ractors need to check the shareability of the
1202 // values returned from the class ivars.
1203 goto general_path;
1204 }
1205
1206 ivar_list = RCLASS_IVPTR(obj);
1207
1208#if !SHAPE_IN_BASIC_FLAGS
1209 shape_id = RCLASS_SHAPE_ID(obj);
1210#endif
1211
1212 break;
1213 }
1214 default:
1215 if (FL_TEST_RAW(obj, FL_EXIVAR)) {
1216 struct gen_ivtbl *ivtbl;
1217 rb_gen_ivtbl_get(obj, id, &ivtbl);
1218#if !SHAPE_IN_BASIC_FLAGS
1219 shape_id = ivtbl->shape_id;
1220#endif
1221 ivar_list = ivtbl->ivptr;
1222 }
1223 else {
1224 return Qnil;
1225 }
1226 }
1227
1228 shape_id_t cached_id;
1229 attr_index_t index;
1230
1231 if (is_attr) {
1232 vm_cc_atomic_shape_and_index(cc, &cached_id, &index);
1233 }
1234 else {
1235 vm_ic_atomic_shape_and_index(ic, &cached_id, &index);
1236 }
1237
1238 if (LIKELY(cached_id == shape_id)) {
1239 RUBY_ASSERT(cached_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1240
1241 if (index == ATTR_INDEX_NOT_SET) {
1242 return Qnil;
1243 }
1244
1245 val = ivar_list[index];
1246 RUBY_ASSERT(!UNDEF_P(val));
1247 }
1248 else { // cache miss case
1249#if RUBY_DEBUG
1250 if (is_attr) {
1251 if (cached_id != INVALID_SHAPE_ID) {
1252 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_set);
1253 }
1254 else {
1255 RB_DEBUG_COUNTER_INC(ivar_get_cc_miss_unset);
1256 }
1257 }
1258 else {
1259 if (cached_id != INVALID_SHAPE_ID) {
1260 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_set);
1261 }
1262 else {
1263 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss_unset);
1264 }
1265 }
1266#endif
1267
1268 rb_shape_t *shape = rb_shape_get_shape_by_id(shape_id);
1269
1270 if (shape_id == OBJ_TOO_COMPLEX_SHAPE_ID) {
1271 if (!st_lookup(ROBJECT_IV_HASH(obj), id, &val)) {
1272 val = Qnil;
1273 }
1274 }
1275 else {
1276 if (rb_shape_get_iv_index(shape, id, &index)) {
1277 // This fills in the cache with the shared cache object.
1278 // "ent" is the shared cache object
1279 fill_ivar_cache(iseq, ic, cc, is_attr, index, shape_id);
1280
1281 // We fetched the ivar list above
1282 val = ivar_list[index];
1283 RUBY_ASSERT(!UNDEF_P(val));
1284 }
1285 else {
1286 if (is_attr) {
1287 vm_cc_attr_index_initialize(cc, shape_id);
1288 }
1289 else {
1290 vm_ic_attr_index_initialize(ic, shape_id);
1291 }
1292
1293 val = Qnil;
1294 }
1295 }
1296
1297 }
1298
1299 RUBY_ASSERT(!UNDEF_P(val));
1300
1301 return val;
1302
1303general_path:
1304#endif /* OPT_IC_FOR_IVAR */
1305 RB_DEBUG_COUNTER_INC(ivar_get_ic_miss);
1306
1307 if (is_attr) {
1308 return rb_attr_get(obj, id);
1309 }
1310 else {
1311 return rb_ivar_get(obj, id);
1312 }
1313}
1314
1315static void
1316populate_cache(attr_index_t index, shape_id_t next_shape_id, ID id, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, bool is_attr)
1317{
1318 RUBY_ASSERT(next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1319
1320 // Cache population code
1321 if (is_attr) {
1322 vm_cc_attr_index_set(cc, index, next_shape_id);
1323 }
1324 else {
1325 vm_ic_attr_index_set(iseq, ic, index, next_shape_id);
1326 }
1327}
1328
1329ALWAYS_INLINE(static VALUE vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr));
1330NOINLINE(static VALUE vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic));
1331NOINLINE(static VALUE vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc));
1332
1333static VALUE
1334vm_setivar_slowpath(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic, const struct rb_callcache *cc, int is_attr)
1335{
1336#if OPT_IC_FOR_IVAR
1337 switch (BUILTIN_TYPE(obj)) {
1338 case T_OBJECT:
1339 {
1341
1342 attr_index_t index = rb_obj_ivar_set(obj, id, val);
1343
1344 shape_id_t next_shape_id = ROBJECT_SHAPE_ID(obj);
1345
1346 if (next_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID) {
1347 populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
1348 }
1349
1350 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_iv_hit);
1351 return val;
1352 }
1353 case T_CLASS:
1354 case T_MODULE:
1355 break;
1356 default:
1357 {
1358 rb_ivar_set(obj, id, val);
1359 shape_id_t next_shape_id = rb_shape_get_shape_id(obj);
1360 rb_shape_t *next_shape = rb_shape_get_shape_by_id(next_shape_id);
1361 attr_index_t index;
1362
1363 if (rb_shape_get_iv_index(next_shape, id, &index)) { // based off the hash stored in the transition tree
1364 if (index >= MAX_IVARS) {
1365 rb_raise(rb_eArgError, "too many instance variables");
1366 }
1367
1368 populate_cache(index, next_shape_id, id, iseq, ic, cc, is_attr);
1369 }
1370 else {
1371 rb_bug("didn't find the id\n");
1372 }
1373
1374 return val;
1375 }
1376 }
1377#endif
1378 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss);
1379 return rb_ivar_set(obj, id, val);
1380}
1381
1382static VALUE
1383vm_setivar_slowpath_ivar(VALUE obj, ID id, VALUE val, const rb_iseq_t *iseq, IVC ic)
1384{
1385 return vm_setivar_slowpath(obj, id, val, iseq, ic, NULL, false);
1386}
1387
1388static VALUE
1389vm_setivar_slowpath_attr(VALUE obj, ID id, VALUE val, const struct rb_callcache *cc)
1390{
1391 return vm_setivar_slowpath(obj, id, val, NULL, NULL, cc, true);
1392}
1393
1394NOINLINE(static VALUE vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index));
1395static VALUE
1396vm_setivar_default(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1397{
1398#if SHAPE_IN_BASIC_FLAGS
1399 shape_id_t shape_id = RBASIC_SHAPE_ID(obj);
1400#else
1401 shape_id_t shape_id = rb_generic_shape_id(obj);
1402#endif
1403
1404 struct gen_ivtbl *ivtbl = 0;
1405
1406 // Cache hit case
1407 if (shape_id == dest_shape_id) {
1408 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1409
1410 // Just get the IV table
1411 rb_gen_ivtbl_get(obj, 0, &ivtbl);
1412 }
1413 else if (dest_shape_id != INVALID_SHAPE_ID) {
1414 rb_shape_t * dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1415 shape_id_t source_shape_id = dest_shape->parent_id;
1416
1417 if (shape_id == source_shape_id && dest_shape->edge_name == id && dest_shape->type == SHAPE_IVAR) {
1418 ivtbl = rb_ensure_generic_iv_list_size(obj, dest_shape, index + 1);
1419#if SHAPE_IN_BASIC_FLAGS
1420 RBASIC_SET_SHAPE_ID(obj, dest_shape_id);
1421#else
1422 RUBY_ASSERT(ivtbl->shape_id == dest_shape_id);
1423#endif
1424 }
1425 else {
1426 return Qundef;
1427 }
1428 }
1429 else {
1430 return Qundef;
1431 }
1432
1433 VALUE *ptr = ivtbl->ivptr;
1434
1435 RB_OBJ_WRITE(obj, &ptr[index], val);
1436
1437 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1438
1439 return val;
1440}
1441
1442static inline VALUE
1443vm_setivar(VALUE obj, ID id, VALUE val, shape_id_t dest_shape_id, attr_index_t index)
1444{
1445#if OPT_IC_FOR_IVAR
1446 switch (BUILTIN_TYPE(obj)) {
1447 case T_OBJECT:
1448 {
1449 VM_ASSERT(!rb_ractor_shareable_p(obj) || rb_obj_frozen_p(obj));
1450
1451 shape_id_t shape_id = ROBJECT_SHAPE_ID(obj);
1452 RUBY_ASSERT(dest_shape_id != OBJ_TOO_COMPLEX_SHAPE_ID);
1453
1454 if (LIKELY(shape_id == dest_shape_id)) {
1455 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1456 VM_ASSERT(!rb_ractor_shareable_p(obj));
1457 }
1458 else if (dest_shape_id != INVALID_SHAPE_ID) {
1459 rb_shape_t *dest_shape = rb_shape_get_shape_by_id(dest_shape_id);
1460 shape_id_t source_shape_id = dest_shape->parent_id;
1461
1462 if (shape_id == source_shape_id && dest_shape->edge_name == id) {
1463 RUBY_ASSERT(dest_shape_id != INVALID_SHAPE_ID && shape_id != INVALID_SHAPE_ID);
1464
1465 ROBJECT_SET_SHAPE_ID(obj, dest_shape_id);
1466
1467 RUBY_ASSERT(rb_shape_get_next_iv_shape(rb_shape_get_shape_by_id(source_shape_id), id) == dest_shape);
1468 RUBY_ASSERT(index < dest_shape->capacity);
1469 }
1470 else {
1471 break;
1472 }
1473 }
1474 else {
1475 break;
1476 }
1477
1478 VALUE *ptr = ROBJECT_IVPTR(obj);
1479
1480 RUBY_ASSERT(!rb_shape_obj_too_complex(obj));
1481 RB_OBJ_WRITE(obj, &ptr[index], val);
1482
1483 RB_DEBUG_COUNTER_INC(ivar_set_ic_hit);
1484 return val;
1485 }
1486 break;
1487 case T_CLASS:
1488 case T_MODULE:
1489 RB_DEBUG_COUNTER_INC(ivar_set_ic_miss_noobject);
1490 default:
1491 break;
1492 }
1493
1494 return Qundef;
1495#endif /* OPT_IC_FOR_IVAR */
1496}
1497
1498static VALUE
1499update_classvariable_cache(const rb_iseq_t *iseq, VALUE klass, ID id, const rb_cref_t * cref, ICVARC ic)
1500{
1501 VALUE defined_class = 0;
1502 VALUE cvar_value = rb_cvar_find(klass, id, &defined_class);
1503
1504 if (RB_TYPE_P(defined_class, T_ICLASS)) {
1505 defined_class = RBASIC(defined_class)->klass;
1506 }
1507
1508 struct rb_id_table *rb_cvc_tbl = RCLASS_CVC_TBL(defined_class);
1509 if (!rb_cvc_tbl) {
1510 rb_bug("the cvc table should be set");
1511 }
1512
1513 VALUE ent_data;
1514 if (!rb_id_table_lookup(rb_cvc_tbl, id, &ent_data)) {
1515 rb_bug("should have cvar cache entry");
1516 }
1517
1518 struct rb_cvar_class_tbl_entry *ent = (void *)ent_data;
1519
1520 ent->global_cvar_state = GET_GLOBAL_CVAR_STATE();
1521 ent->cref = cref;
1522 ic->entry = ent;
1523
1524 RUBY_ASSERT(BUILTIN_TYPE((VALUE)cref) == T_IMEMO && IMEMO_TYPE_P(cref, imemo_cref));
1525 RB_OBJ_WRITTEN(iseq, Qundef, ent->cref);
1526 RB_OBJ_WRITTEN(iseq, Qundef, ent->class_value);
1527 RB_OBJ_WRITTEN(ent->class_value, Qundef, ent->cref);
1528
1529 return cvar_value;
1530}
1531
1532static inline VALUE
1533vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, ICVARC ic)
1534{
1535 const rb_cref_t *cref;
1536 cref = vm_get_cref(GET_EP());
1537
1538 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1539 RB_DEBUG_COUNTER_INC(cvar_read_inline_hit);
1540
1541 VALUE v = rb_ivar_lookup(ic->entry->class_value, id, Qundef);
1542 RUBY_ASSERT(!UNDEF_P(v));
1543
1544 return v;
1545 }
1546
1547 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1548
1549 return update_classvariable_cache(iseq, klass, id, cref, ic);
1550}
1551
1552VALUE
1553rb_vm_getclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, ICVARC ic)
1554{
1555 return vm_getclassvariable(iseq, cfp, id, ic);
1556}
1557
1558static inline void
1559vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *reg_cfp, ID id, VALUE val, ICVARC ic)
1560{
1561 const rb_cref_t *cref;
1562 cref = vm_get_cref(GET_EP());
1563
1564 if (ic->entry && ic->entry->global_cvar_state == GET_GLOBAL_CVAR_STATE() && ic->entry->cref == cref && LIKELY(rb_ractor_main_p())) {
1565 RB_DEBUG_COUNTER_INC(cvar_write_inline_hit);
1566
1567 rb_class_ivar_set(ic->entry->class_value, id, val);
1568 return;
1569 }
1570
1571 VALUE klass = vm_get_cvar_base(cref, reg_cfp, 1);
1572
1573 rb_cvar_set(klass, id, val);
1574
1575 update_classvariable_cache(iseq, klass, id, cref, ic);
1576}
1577
1578void
1579rb_vm_setclassvariable(const rb_iseq_t *iseq, const rb_control_frame_t *cfp, ID id, VALUE val, ICVARC ic)
1580{
1581 vm_setclassvariable(iseq, cfp, id, val, ic);
1582}
1583
1584static inline VALUE
1585vm_getinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, IVC ic)
1586{
1587 return vm_getivar(obj, id, iseq, ic, NULL, FALSE);
1588}
1589
1590static inline void
1591vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1592{
1593 if (RB_SPECIAL_CONST_P(obj)) {
1595 return;
1596 }
1597
1598 shape_id_t dest_shape_id;
1599 attr_index_t index;
1600 vm_ic_atomic_shape_and_index(ic, &dest_shape_id, &index);
1601
1602 if (UNLIKELY(UNDEF_P(vm_setivar(obj, id, val, dest_shape_id, index)))) {
1603 switch (BUILTIN_TYPE(obj)) {
1604 case T_OBJECT:
1605 case T_CLASS:
1606 case T_MODULE:
1607 break;
1608 default:
1609 if (!UNDEF_P(vm_setivar_default(obj, id, val, dest_shape_id, index))) {
1610 return;
1611 }
1612 }
1613 vm_setivar_slowpath_ivar(obj, id, val, iseq, ic);
1614 }
1615}
1616
1617void
1618rb_vm_setinstancevariable(const rb_iseq_t *iseq, VALUE obj, ID id, VALUE val, IVC ic)
1619{
1620 vm_setinstancevariable(iseq, obj, id, val, ic);
1621}
1622
1623static VALUE
1624vm_throw_continue(const rb_execution_context_t *ec, VALUE err)
1625{
1626 /* continue throw */
1627
1628 if (FIXNUM_P(err)) {
1629 ec->tag->state = FIX2INT(err);
1630 }
1631 else if (SYMBOL_P(err)) {
1632 ec->tag->state = TAG_THROW;
1633 }
1634 else if (THROW_DATA_P(err)) {
1635 ec->tag->state = THROW_DATA_STATE((struct vm_throw_data *)err);
1636 }
1637 else {
1638 ec->tag->state = TAG_RAISE;
1639 }
1640 return err;
1641}
1642
1643static VALUE
1644vm_throw_start(const rb_execution_context_t *ec, rb_control_frame_t *const reg_cfp, enum ruby_tag_type state,
1645 const int flag, const VALUE throwobj)
1646{
1647 const rb_control_frame_t *escape_cfp = NULL;
1648 const rb_control_frame_t * const eocfp = RUBY_VM_END_CONTROL_FRAME(ec); /* end of control frame pointer */
1649
1650 if (flag != 0) {
1651 /* do nothing */
1652 }
1653 else if (state == TAG_BREAK) {
1654 int is_orphan = 1;
1655 const VALUE *ep = GET_EP();
1656 const rb_iseq_t *base_iseq = GET_ISEQ();
1657 escape_cfp = reg_cfp;
1658
1659 while (ISEQ_BODY(base_iseq)->type != ISEQ_TYPE_BLOCK) {
1660 if (ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1661 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1662 ep = escape_cfp->ep;
1663 base_iseq = escape_cfp->iseq;
1664 }
1665 else {
1666 ep = VM_ENV_PREV_EP(ep);
1667 base_iseq = ISEQ_BODY(base_iseq)->parent_iseq;
1668 escape_cfp = rb_vm_search_cf_from_ep(ec, escape_cfp, ep);
1669 VM_ASSERT(escape_cfp->iseq == base_iseq);
1670 }
1671 }
1672
1673 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1674 /* lambda{... break ...} */
1675 is_orphan = 0;
1676 state = TAG_RETURN;
1677 }
1678 else {
1679 ep = VM_ENV_PREV_EP(ep);
1680
1681 while (escape_cfp < eocfp) {
1682 if (escape_cfp->ep == ep) {
1683 const rb_iseq_t *const iseq = escape_cfp->iseq;
1684 const VALUE epc = escape_cfp->pc - ISEQ_BODY(iseq)->iseq_encoded;
1685 const struct iseq_catch_table *const ct = ISEQ_BODY(iseq)->catch_table;
1686 unsigned int i;
1687
1688 if (!ct) break;
1689 for (i=0; i < ct->size; i++) {
1690 const struct iseq_catch_table_entry *const entry =
1691 UNALIGNED_MEMBER_PTR(ct, entries[i]);
1692
1693 if (entry->type == CATCH_TYPE_BREAK &&
1694 entry->iseq == base_iseq &&
1695 entry->start < epc && entry->end >= epc) {
1696 if (entry->cont == epc) { /* found! */
1697 is_orphan = 0;
1698 }
1699 break;
1700 }
1701 }
1702 break;
1703 }
1704
1705 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1706 }
1707 }
1708
1709 if (is_orphan) {
1710 rb_vm_localjump_error("break from proc-closure", throwobj, TAG_BREAK);
1711 }
1712 }
1713 else if (state == TAG_RETRY) {
1714 const VALUE *ep = VM_ENV_PREV_EP(GET_EP());
1715
1716 escape_cfp = rb_vm_search_cf_from_ep(ec, reg_cfp, ep);
1717 }
1718 else if (state == TAG_RETURN) {
1719 const VALUE *current_ep = GET_EP();
1720 const VALUE *target_ep = NULL, *target_lep, *ep = current_ep;
1721 int in_class_frame = 0;
1722 int toplevel = 1;
1723 escape_cfp = reg_cfp;
1724
1725 // find target_lep, target_ep
1726 while (!VM_ENV_LOCAL_P(ep)) {
1727 if (VM_ENV_FLAGS(ep, VM_FRAME_FLAG_LAMBDA) && target_ep == NULL) {
1728 target_ep = ep;
1729 }
1730 ep = VM_ENV_PREV_EP(ep);
1731 }
1732 target_lep = ep;
1733
1734 while (escape_cfp < eocfp) {
1735 const VALUE *lep = VM_CF_LEP(escape_cfp);
1736
1737 if (!target_lep) {
1738 target_lep = lep;
1739 }
1740
1741 if (lep == target_lep &&
1742 VM_FRAME_RUBYFRAME_P(escape_cfp) &&
1743 ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_CLASS) {
1744 in_class_frame = 1;
1745 target_lep = 0;
1746 }
1747
1748 if (lep == target_lep) {
1749 if (VM_FRAME_LAMBDA_P(escape_cfp)) {
1750 toplevel = 0;
1751 if (in_class_frame) {
1752 /* lambda {class A; ... return ...; end} */
1753 goto valid_return;
1754 }
1755 else {
1756 const VALUE *tep = current_ep;
1757
1758 while (target_lep != tep) {
1759 if (escape_cfp->ep == tep) {
1760 /* in lambda */
1761 if (tep == target_ep) {
1762 goto valid_return;
1763 }
1764 else {
1765 goto unexpected_return;
1766 }
1767 }
1768 tep = VM_ENV_PREV_EP(tep);
1769 }
1770 }
1771 }
1772 else if (VM_FRAME_RUBYFRAME_P(escape_cfp)) {
1773 switch (ISEQ_BODY(escape_cfp->iseq)->type) {
1774 case ISEQ_TYPE_TOP:
1775 case ISEQ_TYPE_MAIN:
1776 if (toplevel) {
1777 if (in_class_frame) goto unexpected_return;
1778 if (target_ep == NULL) {
1779 goto valid_return;
1780 }
1781 else {
1782 goto unexpected_return;
1783 }
1784 }
1785 break;
1786 case ISEQ_TYPE_EVAL:
1787 case ISEQ_TYPE_CLASS:
1788 toplevel = 0;
1789 break;
1790 default:
1791 break;
1792 }
1793 }
1794 }
1795
1796 if (escape_cfp->ep == target_lep && ISEQ_BODY(escape_cfp->iseq)->type == ISEQ_TYPE_METHOD) {
1797 if (target_ep == NULL) {
1798 goto valid_return;
1799 }
1800 else {
1801 goto unexpected_return;
1802 }
1803 }
1804
1805 escape_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(escape_cfp);
1806 }
1807 unexpected_return:;
1808 rb_vm_localjump_error("unexpected return", throwobj, TAG_RETURN);
1809
1810 valid_return:;
1811 /* do nothing */
1812 }
1813 else {
1814 rb_bug("isns(throw): unsupported throw type");
1815 }
1816
1817 ec->tag->state = state;
1818 return (VALUE)THROW_DATA_NEW(throwobj, escape_cfp, state);
1819}
1820
1821static VALUE
1822vm_throw(const rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
1823 rb_num_t throw_state, VALUE throwobj)
1824{
1825 const int state = (int)(throw_state & VM_THROW_STATE_MASK);
1826 const int flag = (int)(throw_state & VM_THROW_NO_ESCAPE_FLAG);
1827
1828 if (state != 0) {
1829 return vm_throw_start(ec, reg_cfp, state, flag, throwobj);
1830 }
1831 else {
1832 return vm_throw_continue(ec, throwobj);
1833 }
1834}
1835
1836static inline void
1837vm_expandarray(VALUE *sp, VALUE ary, rb_num_t num, int flag)
1838{
1839 int is_splat = flag & 0x01;
1840 rb_num_t space_size = num + is_splat;
1841 VALUE *base = sp - 1;
1842 const VALUE *ptr;
1843 rb_num_t len;
1844 const VALUE obj = ary;
1845
1846 if (!RB_TYPE_P(ary, T_ARRAY) && NIL_P(ary = rb_check_array_type(ary))) {
1847 ary = obj;
1848 ptr = &ary;
1849 len = 1;
1850 }
1851 else {
1852 ptr = RARRAY_CONST_PTR_TRANSIENT(ary);
1853 len = (rb_num_t)RARRAY_LEN(ary);
1854 }
1855
1856 if (space_size == 0) {
1857 /* no space left on stack */
1858 }
1859 else if (flag & 0x02) {
1860 /* post: ..., nil ,ary[-1], ..., ary[0..-num] # top */
1861 rb_num_t i = 0, j;
1862
1863 if (len < num) {
1864 for (i=0; i<num-len; i++) {
1865 *base++ = Qnil;
1866 }
1867 }
1868 for (j=0; i<num; i++, j++) {
1869 VALUE v = ptr[len - j - 1];
1870 *base++ = v;
1871 }
1872 if (is_splat) {
1873 *base = rb_ary_new4(len - j, ptr);
1874 }
1875 }
1876 else {
1877 /* normal: ary[num..-1], ary[num-2], ary[num-3], ..., ary[0] # top */
1878 rb_num_t i;
1879 VALUE *bptr = &base[space_size - 1];
1880
1881 for (i=0; i<num; i++) {
1882 if (len <= i) {
1883 for (; i<num; i++) {
1884 *bptr-- = Qnil;
1885 }
1886 break;
1887 }
1888 *bptr-- = ptr[i];
1889 }
1890 if (is_splat) {
1891 if (num > len) {
1892 *bptr = rb_ary_new();
1893 }
1894 else {
1895 *bptr = rb_ary_new4(len - num, ptr + num);
1896 }
1897 }
1898 }
1899 RB_GC_GUARD(ary);
1900}
1901
1902static VALUE vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
1903
1904static VALUE vm_mtbl_dump(VALUE klass, ID target_mid);
1905
1906static struct rb_class_cc_entries *
1907vm_ccs_create(VALUE klass, struct rb_id_table *cc_tbl, ID mid, const rb_callable_method_entry_t *cme)
1908{
1909 struct rb_class_cc_entries *ccs = ALLOC(struct rb_class_cc_entries);
1910#if VM_CHECK_MODE > 0
1911 ccs->debug_sig = ~(VALUE)ccs;
1912#endif
1913 ccs->capa = 0;
1914 ccs->len = 0;
1915 ccs->cme = cme;
1916 METHOD_ENTRY_CACHED_SET((rb_callable_method_entry_t *)cme);
1917 ccs->entries = NULL;
1918
1919 rb_id_table_insert(cc_tbl, mid, (VALUE)ccs);
1920 RB_OBJ_WRITTEN(klass, Qundef, cme);
1921 return ccs;
1922}
1923
1924static void
1925vm_ccs_push(VALUE klass, struct rb_class_cc_entries *ccs, const struct rb_callinfo *ci, const struct rb_callcache *cc)
1926{
1927 if (! vm_cc_markable(cc)) {
1928 return;
1929 }
1930 else if (! vm_ci_markable(ci)) {
1931 return;
1932 }
1933
1934 if (UNLIKELY(ccs->len == ccs->capa)) {
1935 if (ccs->capa == 0) {
1936 ccs->capa = 1;
1937 ccs->entries = ALLOC_N(struct rb_class_cc_entries_entry, ccs->capa);
1938 }
1939 else {
1940 ccs->capa *= 2;
1941 REALLOC_N(ccs->entries, struct rb_class_cc_entries_entry, ccs->capa);
1942 }
1943 }
1944 VM_ASSERT(ccs->len < ccs->capa);
1945
1946 const int pos = ccs->len++;
1947 RB_OBJ_WRITE(klass, &ccs->entries[pos].ci, ci);
1948 RB_OBJ_WRITE(klass, &ccs->entries[pos].cc, cc);
1949
1950 if (RB_DEBUG_COUNTER_SETMAX(ccs_maxlen, ccs->len)) {
1951 // for tuning
1952 // vm_mtbl_dump(klass, 0);
1953 }
1954}
1955
1956#if VM_CHECK_MODE > 0
1957void
1958rb_vm_ccs_dump(struct rb_class_cc_entries *ccs)
1959{
1960 ruby_debug_printf("ccs:%p (%d,%d)\n", (void *)ccs, ccs->len, ccs->capa);
1961 for (int i=0; i<ccs->len; i++) {
1962 vm_ci_dump(ccs->entries[i].ci);
1963 rp(ccs->entries[i].cc);
1964 }
1965}
1966
1967static int
1968vm_ccs_verify(struct rb_class_cc_entries *ccs, ID mid, VALUE klass)
1969{
1970 VM_ASSERT(vm_ccs_p(ccs));
1971 VM_ASSERT(ccs->len <= ccs->capa);
1972
1973 for (int i=0; i<ccs->len; i++) {
1974 const struct rb_callinfo *ci = ccs->entries[i].ci;
1975 const struct rb_callcache *cc = ccs->entries[i].cc;
1976
1977 VM_ASSERT(vm_ci_p(ci));
1978 VM_ASSERT(vm_ci_mid(ci) == mid);
1979 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
1980 VM_ASSERT(vm_cc_class_check(cc, klass));
1981 VM_ASSERT(vm_cc_check_cme(cc, ccs->cme));
1982 }
1983 return TRUE;
1984}
1985#endif
1986
1987#ifndef MJIT_HEADER
1988
1989static const rb_callable_method_entry_t *check_overloaded_cme(const rb_callable_method_entry_t *cme, const struct rb_callinfo * const ci);
1990
1991static const struct rb_callcache *
1992vm_search_cc(const VALUE klass, const struct rb_callinfo * const ci)
1993{
1994 const ID mid = vm_ci_mid(ci);
1995 struct rb_id_table *cc_tbl = RCLASS_CC_TBL(klass);
1996 struct rb_class_cc_entries *ccs = NULL;
1997 VALUE ccs_data;
1998
1999 if (cc_tbl) {
2000 if (rb_id_table_lookup(cc_tbl, mid, &ccs_data)) {
2001 ccs = (struct rb_class_cc_entries *)ccs_data;
2002 const int ccs_len = ccs->len;
2003
2004 if (UNLIKELY(METHOD_ENTRY_INVALIDATED(ccs->cme))) {
2005 rb_vm_ccs_free(ccs);
2006 rb_id_table_delete(cc_tbl, mid);
2007 ccs = NULL;
2008 }
2009 else {
2010 VM_ASSERT(vm_ccs_verify(ccs, mid, klass));
2011
2012 for (int i=0; i<ccs_len; i++) {
2013 const struct rb_callinfo *ccs_ci = ccs->entries[i].ci;
2014 const struct rb_callcache *ccs_cc = ccs->entries[i].cc;
2015
2016 VM_ASSERT(vm_ci_p(ccs_ci));
2017 VM_ASSERT(IMEMO_TYPE_P(ccs_cc, imemo_callcache));
2018
2019 if (ccs_ci == ci) { // TODO: equality
2020 RB_DEBUG_COUNTER_INC(cc_found_in_ccs);
2021
2022 VM_ASSERT(vm_cc_cme(ccs_cc)->called_id == mid);
2023 VM_ASSERT(ccs_cc->klass == klass);
2024 VM_ASSERT(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(ccs_cc)));
2025
2026 return ccs_cc;
2027 }
2028 }
2029 }
2030 }
2031 }
2032 else {
2033 cc_tbl = RCLASS_CC_TBL(klass) = rb_id_table_create(2);
2034 }
2035
2036 RB_DEBUG_COUNTER_INC(cc_not_found_in_ccs);
2037
2038 const rb_callable_method_entry_t *cme;
2039
2040 if (ccs) {
2041 cme = ccs->cme;
2042 cme = UNDEFINED_METHOD_ENTRY_P(cme) ? NULL : cme;
2043
2044 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2045 }
2046 else {
2047 cme = rb_callable_method_entry(klass, mid);
2048 }
2049
2050 VM_ASSERT(cme == NULL || IMEMO_TYPE_P(cme, imemo_ment));
2051
2052 if (cme == NULL) {
2053 // undef or not found: can't cache the information
2054 VM_ASSERT(vm_cc_cme(&vm_empty_cc) == NULL);
2055 return &vm_empty_cc;
2056 }
2057
2058 VM_ASSERT(cme == rb_callable_method_entry(klass, mid));
2059
2060 METHOD_ENTRY_CACHED_SET((struct rb_callable_method_entry_struct *)cme);
2061
2062 if (ccs == NULL) {
2063 VM_ASSERT(cc_tbl != NULL);
2064
2065 if (LIKELY(rb_id_table_lookup(cc_tbl, mid, &ccs_data))) {
2066 // rb_callable_method_entry() prepares ccs.
2067 ccs = (struct rb_class_cc_entries *)ccs_data;
2068 }
2069 else {
2070 // TODO: required?
2071 ccs = vm_ccs_create(klass, cc_tbl, mid, cme);
2072 }
2073 }
2074
2075 cme = check_overloaded_cme(cme, ci);
2076
2077 const struct rb_callcache *cc = vm_cc_new(klass, cme, vm_call_general);
2078 vm_ccs_push(klass, ccs, ci, cc);
2079
2080 VM_ASSERT(vm_cc_cme(cc) != NULL);
2081 VM_ASSERT(cme->called_id == mid);
2082 VM_ASSERT(vm_cc_cme(cc)->called_id == mid);
2083
2084 return cc;
2085}
2086
2087MJIT_FUNC_EXPORTED const struct rb_callcache *
2088rb_vm_search_method_slowpath(const struct rb_callinfo *ci, VALUE klass)
2089{
2090 const struct rb_callcache *cc;
2091
2092 VM_ASSERT(RB_TYPE_P(klass, T_CLASS) || RB_TYPE_P(klass, T_ICLASS));
2093
2094 RB_VM_LOCK_ENTER();
2095 {
2096 cc = vm_search_cc(klass, ci);
2097
2098 VM_ASSERT(cc);
2099 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
2100 VM_ASSERT(cc == vm_cc_empty() || cc->klass == klass);
2101 VM_ASSERT(cc == vm_cc_empty() || callable_method_entry_p(vm_cc_cme(cc)));
2102 VM_ASSERT(cc == vm_cc_empty() || !METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)));
2103 VM_ASSERT(cc == vm_cc_empty() || vm_cc_cme(cc)->called_id == vm_ci_mid(ci));
2104 }
2105 RB_VM_LOCK_LEAVE();
2106
2107 return cc;
2108}
2109#endif
2110
2111static const struct rb_callcache *
2112vm_search_method_slowpath0(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2113{
2114#if USE_DEBUG_COUNTER
2115 const struct rb_callcache *old_cc = cd->cc;
2116#endif
2117
2118 const struct rb_callcache *cc = rb_vm_search_method_slowpath(cd->ci, klass);
2119
2120#if OPT_INLINE_METHOD_CACHE
2121 cd->cc = cc;
2122
2123 const struct rb_callcache *empty_cc =
2124#ifdef MJIT_HEADER
2125 rb_vm_empty_cc();
2126#else
2127 &vm_empty_cc;
2128#endif
2129 if (cd_owner && cc != empty_cc) RB_OBJ_WRITTEN(cd_owner, Qundef, cc);
2130
2131#if USE_DEBUG_COUNTER
2132 if (old_cc == empty_cc) {
2133 // empty
2134 RB_DEBUG_COUNTER_INC(mc_inline_miss_empty);
2135 }
2136 else if (old_cc == cc) {
2137 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cc);
2138 }
2139 else if (vm_cc_cme(old_cc) == vm_cc_cme(cc)) {
2140 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_cme);
2141 }
2142 else if (vm_cc_cme(old_cc) && vm_cc_cme(cc) &&
2143 vm_cc_cme(old_cc)->def == vm_cc_cme(cc)->def) {
2144 RB_DEBUG_COUNTER_INC(mc_inline_miss_same_def);
2145 }
2146 else {
2147 RB_DEBUG_COUNTER_INC(mc_inline_miss_diff);
2148 }
2149#endif
2150#endif // OPT_INLINE_METHOD_CACHE
2151
2152 VM_ASSERT(vm_cc_cme(cc) == NULL ||
2153 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci));
2154
2155 return cc;
2156}
2157
2158#ifndef MJIT_HEADER
2159ALWAYS_INLINE(static const struct rb_callcache *vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass));
2160#endif
2161static const struct rb_callcache *
2162vm_search_method_fastpath(VALUE cd_owner, struct rb_call_data *cd, VALUE klass)
2163{
2164 const struct rb_callcache *cc = cd->cc;
2165
2166#if OPT_INLINE_METHOD_CACHE
2167 if (LIKELY(vm_cc_class_check(cc, klass))) {
2168 if (LIKELY(!METHOD_ENTRY_INVALIDATED(vm_cc_cme(cc)))) {
2169 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
2170 RB_DEBUG_COUNTER_INC(mc_inline_hit);
2171 VM_ASSERT(vm_cc_cme(cc) == NULL || // not found
2172 (vm_ci_flag(cd->ci) & VM_CALL_SUPER) || // search_super w/ define_method
2173 vm_cc_cme(cc)->called_id == vm_ci_mid(cd->ci)); // cme->called_id == ci->mid
2174
2175 return cc;
2176 }
2177 RB_DEBUG_COUNTER_INC(mc_inline_miss_invalidated);
2178 }
2179 else {
2180 RB_DEBUG_COUNTER_INC(mc_inline_miss_klass);
2181 }
2182#endif
2183
2184 return vm_search_method_slowpath0(cd_owner, cd, klass);
2185}
2186
2187static const struct rb_callcache *
2188vm_search_method(VALUE cd_owner, struct rb_call_data *cd, VALUE recv)
2189{
2190 VALUE klass = CLASS_OF(recv);
2191 VM_ASSERT(klass != Qfalse);
2192 VM_ASSERT(RBASIC_CLASS(klass) == 0 || rb_obj_is_kind_of(klass, rb_cClass));
2193
2194 return vm_search_method_fastpath(cd_owner, cd, klass);
2195}
2196
2197#if __has_attribute(transparent_union)
2198typedef union {
2199 VALUE (*anyargs)(ANYARGS);
2200 VALUE (*f00)(VALUE);
2201 VALUE (*f01)(VALUE, VALUE);
2202 VALUE (*f02)(VALUE, VALUE, VALUE);
2203 VALUE (*f03)(VALUE, VALUE, VALUE, VALUE);
2204 VALUE (*f04)(VALUE, VALUE, VALUE, VALUE, VALUE);
2205 VALUE (*f05)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2206 VALUE (*f06)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2207 VALUE (*f07)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE, VALUE);
2216 VALUE (*fm1)(int, union { VALUE *x; const VALUE *y; } __attribute__((__transparent_union__)), VALUE);
2217} __attribute__((__transparent_union__)) cfunc_type;
2218#else
2219typedef VALUE (*cfunc_type)(ANYARGS);
2220#endif
2221
2222static inline int
2223check_cfunc(const rb_callable_method_entry_t *me, cfunc_type func)
2224{
2225 if (! me) {
2226 return false;
2227 }
2228 else {
2229 VM_ASSERT(IMEMO_TYPE_P(me, imemo_ment));
2230 VM_ASSERT(callable_method_entry_p(me));
2231 VM_ASSERT(me->def);
2232 if (me->def->type != VM_METHOD_TYPE_CFUNC) {
2233 return false;
2234 }
2235 else {
2236#if __has_attribute(transparent_union)
2237 return me->def->body.cfunc.func == func.anyargs;
2238#else
2239 return me->def->body.cfunc.func == func;
2240#endif
2241 }
2242 }
2243}
2244
2245static inline int
2246vm_method_cfunc_is(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv, cfunc_type func)
2247{
2248 VM_ASSERT(iseq != NULL);
2249 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
2250 return check_cfunc(vm_cc_cme(cc), func);
2251}
2252
2253#define EQ_UNREDEFINED_P(t) BASIC_OP_UNREDEFINED_P(BOP_EQ, t##_REDEFINED_OP_FLAG)
2254
2255static inline bool
2256FIXNUM_2_P(VALUE a, VALUE b)
2257{
2258 /* FIXNUM_P(a) && FIXNUM_P(b)
2259 * == ((a & 1) && (b & 1))
2260 * == a & b & 1 */
2261 SIGNED_VALUE x = a;
2262 SIGNED_VALUE y = b;
2263 SIGNED_VALUE z = x & y & 1;
2264 return z == 1;
2265}
2266
2267static inline bool
2268FLONUM_2_P(VALUE a, VALUE b)
2269{
2270#if USE_FLONUM
2271 /* FLONUM_P(a) && FLONUM_P(b)
2272 * == ((a & 3) == 2) && ((b & 3) == 2)
2273 * == ! ((a ^ 2) | (b ^ 2) & 3)
2274 */
2275 SIGNED_VALUE x = a;
2276 SIGNED_VALUE y = b;
2277 SIGNED_VALUE z = ((x ^ 2) | (y ^ 2)) & 3;
2278 return !z;
2279#else
2280 return false;
2281#endif
2282}
2283
2284static VALUE
2285opt_equality_specialized(VALUE recv, VALUE obj)
2286{
2287 if (FIXNUM_2_P(recv, obj) && EQ_UNREDEFINED_P(INTEGER)) {
2288 goto compare_by_identity;
2289 }
2290 else if (FLONUM_2_P(recv, obj) && EQ_UNREDEFINED_P(FLOAT)) {
2291 goto compare_by_identity;
2292 }
2293 else if (STATIC_SYM_P(recv) && STATIC_SYM_P(obj) && EQ_UNREDEFINED_P(SYMBOL)) {
2294 goto compare_by_identity;
2295 }
2296 else if (SPECIAL_CONST_P(recv)) {
2297 //
2298 }
2299 else if (RBASIC_CLASS(recv) == rb_cFloat && RB_FLOAT_TYPE_P(obj) && EQ_UNREDEFINED_P(FLOAT)) {
2300 double a = RFLOAT_VALUE(recv);
2301 double b = RFLOAT_VALUE(obj);
2302
2303#if MSC_VERSION_BEFORE(1300)
2304 if (isnan(a)) {
2305 return Qfalse;
2306 }
2307 else if (isnan(b)) {
2308 return Qfalse;
2309 }
2310 else
2311#endif
2312 return RBOOL(a == b);
2313 }
2314 else if (RBASIC_CLASS(recv) == rb_cString && EQ_UNREDEFINED_P(STRING)) {
2315 if (recv == obj) {
2316 return Qtrue;
2317 }
2318 else if (RB_TYPE_P(obj, T_STRING)) {
2319 return rb_str_eql_internal(obj, recv);
2320 }
2321 }
2322 return Qundef;
2323
2324 compare_by_identity:
2325 return RBOOL(recv == obj);
2326}
2327
2328static VALUE
2329opt_equality(const rb_iseq_t *cd_owner, VALUE recv, VALUE obj, CALL_DATA cd)
2330{
2331 VM_ASSERT(cd_owner != NULL);
2332
2333 VALUE val = opt_equality_specialized(recv, obj);
2334 if (!UNDEF_P(val)) return val;
2335
2336 if (!vm_method_cfunc_is(cd_owner, cd, recv, rb_obj_equal)) {
2337 return Qundef;
2338 }
2339 else {
2340 return RBOOL(recv == obj);
2341 }
2342}
2343
2344#undef EQ_UNREDEFINED_P
2345
2346#ifndef MJIT_HEADER
2347
2348static inline const struct rb_callcache *gccct_method_search(rb_execution_context_t *ec, VALUE recv, ID mid, int argc); // vm_eval.c
2349NOINLINE(static VALUE opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid));
2350
2351static VALUE
2352opt_equality_by_mid_slowpath(VALUE recv, VALUE obj, ID mid)
2353{
2354 const struct rb_callcache *cc = gccct_method_search(GET_EC(), recv, mid, 1);
2355
2356 if (cc && check_cfunc(vm_cc_cme(cc), rb_obj_equal)) {
2357 return RBOOL(recv == obj);
2358 }
2359 else {
2360 return Qundef;
2361 }
2362}
2363
2364static VALUE
2365opt_equality_by_mid(VALUE recv, VALUE obj, ID mid)
2366{
2367 VALUE val = opt_equality_specialized(recv, obj);
2368 if (!UNDEF_P(val)) {
2369 return val;
2370 }
2371 else {
2372 return opt_equality_by_mid_slowpath(recv, obj, mid);
2373 }
2374}
2375
2376VALUE
2377rb_equal_opt(VALUE obj1, VALUE obj2)
2378{
2379 return opt_equality_by_mid(obj1, obj2, idEq);
2380}
2381
2382VALUE
2383rb_eql_opt(VALUE obj1, VALUE obj2)
2384{
2385 return opt_equality_by_mid(obj1, obj2, idEqlP);
2386}
2387
2388#endif // MJIT_HEADER
2389
2390extern VALUE rb_vm_call0(rb_execution_context_t *ec, VALUE, ID, int, const VALUE*, const rb_callable_method_entry_t *, int kw_splat);
2391extern VALUE rb_vm_call_with_refinements(rb_execution_context_t *, VALUE, ID, int, const VALUE *, int);
2392
2393static VALUE
2394check_match(rb_execution_context_t *ec, VALUE pattern, VALUE target, enum vm_check_match_type type)
2395{
2396 switch (type) {
2397 case VM_CHECKMATCH_TYPE_WHEN:
2398 return pattern;
2399 case VM_CHECKMATCH_TYPE_RESCUE:
2400 if (!rb_obj_is_kind_of(pattern, rb_cModule)) {
2401 rb_raise(rb_eTypeError, "class or module required for rescue clause");
2402 }
2403 /* fall through */
2404 case VM_CHECKMATCH_TYPE_CASE: {
2405 return rb_vm_call_with_refinements(ec, pattern, idEqq, 1, &target, RB_NO_KEYWORDS);
2406 }
2407 default:
2408 rb_bug("check_match: unreachable");
2409 }
2410}
2411
2412
2413#if MSC_VERSION_BEFORE(1300)
2414#define CHECK_CMP_NAN(a, b) if (isnan(a) || isnan(b)) return Qfalse;
2415#else
2416#define CHECK_CMP_NAN(a, b) /* do nothing */
2417#endif
2418
2419static inline VALUE
2420double_cmp_lt(double a, double b)
2421{
2422 CHECK_CMP_NAN(a, b);
2423 return RBOOL(a < b);
2424}
2425
2426static inline VALUE
2427double_cmp_le(double a, double b)
2428{
2429 CHECK_CMP_NAN(a, b);
2430 return RBOOL(a <= b);
2431}
2432
2433static inline VALUE
2434double_cmp_gt(double a, double b)
2435{
2436 CHECK_CMP_NAN(a, b);
2437 return RBOOL(a > b);
2438}
2439
2440static inline VALUE
2441double_cmp_ge(double a, double b)
2442{
2443 CHECK_CMP_NAN(a, b);
2444 return RBOOL(a >= b);
2445}
2446
2447static inline VALUE *
2448vm_base_ptr(const rb_control_frame_t *cfp)
2449{
2450#if 0 // we may optimize and use this once we confirm it does not spoil performance on JIT.
2451 const rb_control_frame_t *prev_cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
2452
2453 if (cfp->iseq && VM_FRAME_RUBYFRAME_P(cfp)) {
2454 VALUE *bp = prev_cfp->sp + ISEQ_BODY(cfp->iseq)->local_table_size + VM_ENV_DATA_SIZE;
2455 if (ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_METHOD) {
2456 /* adjust `self' */
2457 bp += 1;
2458 }
2459#if VM_DEBUG_BP_CHECK
2460 if (bp != cfp->bp_check) {
2461 ruby_debug_printf("bp_check: %ld, bp: %ld\n",
2462 (long)(cfp->bp_check - GET_EC()->vm_stack),
2463 (long)(bp - GET_EC()->vm_stack));
2464 rb_bug("vm_base_ptr: unreachable");
2465 }
2466#endif
2467 return bp;
2468 }
2469 else {
2470 return NULL;
2471 }
2472#else
2473 return cfp->__bp__;
2474#endif
2475}
2476
2477/* method call processes with call_info */
2478
2479#include "vm_args.c"
2480
2481static inline VALUE vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc, int param_size, int local_size);
2482ALWAYS_INLINE(static VALUE vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me, int opt_pc, int param_size, int local_size));
2483static inline VALUE vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc);
2484static VALUE vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
2485static VALUE vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2486static VALUE vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2487static inline VALUE vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling);
2488
2489static vm_call_handler vm_call_iseq_setup_func(const struct rb_callinfo *ci, const int param_size, const int local_size);
2490
2491static VALUE
2492vm_call_iseq_setup_tailcall_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2493{
2494 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_tailcall_0start);
2495
2496 return vm_call_iseq_setup_tailcall(ec, cfp, calling, 0);
2497}
2498
2499static VALUE
2500vm_call_iseq_setup_normal_0start(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2501{
2502 RB_DEBUG_COUNTER_INC(ccf_iseq_setup_0start);
2503
2504 const struct rb_callcache *cc = calling->cc;
2505 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2506 int param = ISEQ_BODY(iseq)->param.size;
2507 int local = ISEQ_BODY(iseq)->local_table_size;
2508 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2509}
2510
2511MJIT_STATIC bool
2512rb_simple_iseq_p(const rb_iseq_t *iseq)
2513{
2514 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2515 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2516 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2517 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2518 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2519 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2520 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2521}
2522
2523MJIT_FUNC_EXPORTED bool
2524rb_iseq_only_optparam_p(const rb_iseq_t *iseq)
2525{
2526 return ISEQ_BODY(iseq)->param.flags.has_opt == TRUE &&
2527 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2528 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2529 ISEQ_BODY(iseq)->param.flags.has_kw == FALSE &&
2530 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2531 ISEQ_BODY(iseq)->param.flags.accepts_no_kwarg == FALSE &&
2532 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2533}
2534
2535MJIT_FUNC_EXPORTED bool
2536rb_iseq_only_kwparam_p(const rb_iseq_t *iseq)
2537{
2538 return ISEQ_BODY(iseq)->param.flags.has_opt == FALSE &&
2539 ISEQ_BODY(iseq)->param.flags.has_rest == FALSE &&
2540 ISEQ_BODY(iseq)->param.flags.has_post == FALSE &&
2541 ISEQ_BODY(iseq)->param.flags.has_kw == TRUE &&
2542 ISEQ_BODY(iseq)->param.flags.has_kwrest == FALSE &&
2543 ISEQ_BODY(iseq)->param.flags.has_block == FALSE;
2544}
2545
2546// If true, cc->call needs to include `CALLER_SETUP_ARG` (i.e. can't be skipped in fastpath)
2547MJIT_STATIC bool
2548rb_splat_or_kwargs_p(const struct rb_callinfo *restrict ci)
2549{
2550 return IS_ARGS_SPLAT(ci) || IS_ARGS_KW_OR_KW_SPLAT(ci);
2551}
2552
2553
2554static inline void
2555CALLER_SETUP_ARG(struct rb_control_frame_struct *restrict cfp,
2556 struct rb_calling_info *restrict calling,
2557 const struct rb_callinfo *restrict ci)
2558{
2559 if (UNLIKELY(IS_ARGS_SPLAT(ci))) {
2560 VALUE final_hash;
2561 /* This expands the rest argument to the stack.
2562 * So, vm_ci_flag(ci) & VM_CALL_ARGS_SPLAT is now inconsistent.
2563 */
2564 vm_caller_setup_arg_splat(cfp, calling);
2565 if (!IS_ARGS_KW_OR_KW_SPLAT(ci) &&
2566 calling->argc > 0 &&
2567 RB_TYPE_P((final_hash = *(cfp->sp - 1)), T_HASH) &&
2568 (((struct RHash *)final_hash)->basic.flags & RHASH_PASS_AS_KEYWORDS)) {
2569 *(cfp->sp - 1) = rb_hash_dup(final_hash);
2570 calling->kw_splat = 1;
2571 }
2572 }
2573 if (UNLIKELY(IS_ARGS_KW_OR_KW_SPLAT(ci))) {
2574 if (IS_ARGS_KEYWORD(ci)) {
2575 /* This converts VM_CALL_KWARG style to VM_CALL_KW_SPLAT style
2576 * by creating a keyword hash.
2577 * So, vm_ci_flag(ci) & VM_CALL_KWARG is now inconsistent.
2578 */
2579 vm_caller_setup_arg_kw(cfp, calling, ci);
2580 }
2581 else {
2582 VALUE keyword_hash = cfp->sp[-1];
2583 if (!RB_TYPE_P(keyword_hash, T_HASH)) {
2584 /* Convert a non-hash keyword splat to a new hash */
2585 cfp->sp[-1] = rb_hash_dup(rb_to_hash_type(keyword_hash));
2586 }
2587 else if (!IS_ARGS_KW_SPLAT_MUT(ci)) {
2588 /* Convert a hash keyword splat to a new hash unless
2589 * a mutable keyword splat was passed.
2590 */
2591 cfp->sp[-1] = rb_hash_dup(keyword_hash);
2592 }
2593 }
2594 }
2595}
2596
2597static inline void
2598CALLER_REMOVE_EMPTY_KW_SPLAT(struct rb_control_frame_struct *restrict cfp,
2599 struct rb_calling_info *restrict calling,
2600 const struct rb_callinfo *restrict ci)
2601{
2602 if (UNLIKELY(calling->kw_splat)) {
2603 /* This removes the last Hash object if it is empty.
2604 * So, vm_ci_flag(ci) & VM_CALL_KW_SPLAT is now inconsistent.
2605 */
2606 if (RHASH_EMPTY_P(cfp->sp[-1])) {
2607 cfp->sp--;
2608 calling->argc--;
2609 calling->kw_splat = 0;
2610 }
2611 }
2612}
2613
2614#define USE_OPT_HIST 0
2615
2616#if USE_OPT_HIST
2617#define OPT_HIST_MAX 64
2618static int opt_hist[OPT_HIST_MAX+1];
2619
2620__attribute__((destructor))
2621static void
2622opt_hist_show_results_at_exit(void)
2623{
2624 for (int i=0; i<OPT_HIST_MAX; i++) {
2625 ruby_debug_printf("opt_hist\t%d\t%d\n", i, opt_hist[i]);
2626 }
2627}
2628#endif
2629
2630static VALUE
2631vm_call_iseq_setup_normal_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2632 struct rb_calling_info *calling)
2633{
2634 const struct rb_callcache *cc = calling->cc;
2635 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2636 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2637 const int opt = calling->argc - lead_num;
2638 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2639 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2640 const int param = ISEQ_BODY(iseq)->param.size;
2641 const int local = ISEQ_BODY(iseq)->local_table_size;
2642 const int delta = opt_num - opt;
2643
2644 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2645
2646#if USE_OPT_HIST
2647 if (opt_pc < OPT_HIST_MAX) {
2648 opt_hist[opt]++;
2649 }
2650 else {
2651 opt_hist[OPT_HIST_MAX]++;
2652 }
2653#endif
2654
2655 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param - delta, local);
2656}
2657
2658static VALUE
2659vm_call_iseq_setup_tailcall_opt_start(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2660 struct rb_calling_info *calling)
2661{
2662 const struct rb_callcache *cc = calling->cc;
2663 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2664 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2665 const int opt = calling->argc - lead_num;
2666 const int opt_pc = (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2667
2668 RB_DEBUG_COUNTER_INC(ccf_iseq_opt);
2669
2670#if USE_OPT_HIST
2671 if (opt_pc < OPT_HIST_MAX) {
2672 opt_hist[opt]++;
2673 }
2674 else {
2675 opt_hist[OPT_HIST_MAX]++;
2676 }
2677#endif
2678
2679 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2680}
2681
2682static void
2683args_setup_kw_parameters(rb_execution_context_t *const ec, const rb_iseq_t *const iseq,
2684 VALUE *const passed_values, const int passed_keyword_len, const VALUE *const passed_keywords,
2685 VALUE *const locals);
2686
2687static VALUE
2688vm_call_iseq_setup_kwparm_kwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2689 struct rb_calling_info *calling)
2690{
2691 const struct rb_callinfo *ci = calling->ci;
2692 const struct rb_callcache *cc = calling->cc;
2693
2694 VM_ASSERT(vm_ci_flag(ci) & VM_CALL_KWARG);
2695 RB_DEBUG_COUNTER_INC(ccf_iseq_kw1);
2696
2697 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2698 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2699 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2700 const int ci_kw_len = kw_arg->keyword_len;
2701 const VALUE * const ci_keywords = kw_arg->keywords;
2702 VALUE *argv = cfp->sp - calling->argc;
2703 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2704 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2705 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2706 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2707 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2708
2709 int param = ISEQ_BODY(iseq)->param.size;
2710 int local = ISEQ_BODY(iseq)->local_table_size;
2711 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2712}
2713
2714static VALUE
2715vm_call_iseq_setup_kwparm_nokwarg(rb_execution_context_t *ec, rb_control_frame_t *cfp,
2716 struct rb_calling_info *calling)
2717{
2718 const struct rb_callinfo *MAYBE_UNUSED(ci) = calling->ci;
2719 const struct rb_callcache *cc = calling->cc;
2720
2721 VM_ASSERT((vm_ci_flag(ci) & VM_CALL_KWARG) == 0);
2722 RB_DEBUG_COUNTER_INC(ccf_iseq_kw2);
2723
2724 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2725 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2726 VALUE * const argv = cfp->sp - calling->argc;
2727 VALUE * const klocals = argv + kw_param->bits_start - kw_param->num;
2728
2729 int i;
2730 for (i=0; i<kw_param->num; i++) {
2731 klocals[i] = kw_param->default_values[i];
2732 }
2733 klocals[i] = INT2FIX(0); // kw specify flag
2734 // NOTE:
2735 // nobody check this value, but it should be cleared because it can
2736 // points invalid VALUE (T_NONE objects, raw pointer and so on).
2737
2738 int param = ISEQ_BODY(iseq)->param.size;
2739 int local = ISEQ_BODY(iseq)->local_table_size;
2740 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), 0, param, local);
2741}
2742
2743static inline int
2744vm_callee_setup_arg(rb_execution_context_t *ec, struct rb_calling_info *calling,
2745 const rb_iseq_t *iseq, VALUE *argv, int param_size, int local_size)
2746{
2747 const struct rb_callinfo *ci = calling->ci;
2748 const struct rb_callcache *cc = calling->cc;
2749 bool cacheable_ci = vm_ci_markable(ci);
2750
2751 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_KW_SPLAT))) {
2752 if (LIKELY(rb_simple_iseq_p(iseq))) {
2753 rb_control_frame_t *cfp = ec->cfp;
2754 CALLER_SETUP_ARG(cfp, calling, ci);
2755 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2756
2757 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
2758 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
2759 }
2760
2761 VM_ASSERT(ci == calling->ci);
2762 VM_ASSERT(cc == calling->cc);
2763 CC_SET_FASTPATH(cc, vm_call_iseq_setup_func(ci, param_size, local_size), cacheable_ci && vm_call_iseq_optimizable_p(ci, cc));
2764 return 0;
2765 }
2766 else if (rb_iseq_only_optparam_p(iseq)) {
2767 rb_control_frame_t *cfp = ec->cfp;
2768 CALLER_SETUP_ARG(cfp, calling, ci);
2769 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
2770
2771 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2772 const int opt_num = ISEQ_BODY(iseq)->param.opt_num;
2773 const int argc = calling->argc;
2774 const int opt = argc - lead_num;
2775
2776 if (opt < 0 || opt > opt_num) {
2777 argument_arity_error(ec, iseq, argc, lead_num, lead_num + opt_num);
2778 }
2779
2780 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2781 CC_SET_FASTPATH(cc, vm_call_iseq_setup_normal_opt_start,
2782 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2783 cacheable_ci && vm_call_cacheable(ci, cc));
2784 }
2785 else {
2786 CC_SET_FASTPATH(cc, vm_call_iseq_setup_tailcall_opt_start,
2787 !IS_ARGS_SPLAT(ci) && !IS_ARGS_KEYWORD(ci) &&
2788 cacheable_ci && vm_call_cacheable(ci, cc));
2789 }
2790
2791 /* initialize opt vars for self-references */
2792 VM_ASSERT((int)ISEQ_BODY(iseq)->param.size == lead_num + opt_num);
2793 for (int i=argc; i<lead_num + opt_num; i++) {
2794 argv[i] = Qnil;
2795 }
2796 return (int)ISEQ_BODY(iseq)->param.opt_table[opt];
2797 }
2798 else if (rb_iseq_only_kwparam_p(iseq) && !IS_ARGS_SPLAT(ci)) {
2799 const int lead_num = ISEQ_BODY(iseq)->param.lead_num;
2800 const int argc = calling->argc;
2801 const struct rb_iseq_param_keyword *kw_param = ISEQ_BODY(iseq)->param.keyword;
2802
2803 if (vm_ci_flag(ci) & VM_CALL_KWARG) {
2804 const struct rb_callinfo_kwarg *kw_arg = vm_ci_kwarg(ci);
2805
2806 if (argc - kw_arg->keyword_len == lead_num) {
2807 const int ci_kw_len = kw_arg->keyword_len;
2808 const VALUE * const ci_keywords = kw_arg->keywords;
2809 VALUE * const ci_kws = ALLOCA_N(VALUE, ci_kw_len);
2810 MEMCPY(ci_kws, argv + lead_num, VALUE, ci_kw_len);
2811
2812 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2813 args_setup_kw_parameters(ec, iseq, ci_kws, ci_kw_len, ci_keywords, klocals);
2814
2815 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_kwarg,
2816 cacheable_ci && vm_call_cacheable(ci, cc));
2817
2818 return 0;
2819 }
2820 }
2821 else if (argc == lead_num) {
2822 /* no kwarg */
2823 VALUE *const klocals = argv + kw_param->bits_start - kw_param->num;
2824 args_setup_kw_parameters(ec, iseq, NULL, 0, NULL, klocals);
2825
2826 if (klocals[kw_param->num] == INT2FIX(0)) {
2827 /* copy from default_values */
2828 CC_SET_FASTPATH(cc, vm_call_iseq_setup_kwparm_nokwarg,
2829 cacheable_ci && vm_call_cacheable(ci, cc));
2830 }
2831
2832 return 0;
2833 }
2834 }
2835 }
2836
2837 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_method);
2838}
2839
2840static VALUE
2841vm_call_iseq_setup(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
2842{
2843 RB_DEBUG_COUNTER_INC(ccf_iseq_setup);
2844
2845 const struct rb_callcache *cc = calling->cc;
2846 const rb_iseq_t *iseq = def_iseq_ptr(vm_cc_cme(cc)->def);
2847 const int param_size = ISEQ_BODY(iseq)->param.size;
2848 const int local_size = ISEQ_BODY(iseq)->local_table_size;
2849 const int opt_pc = vm_callee_setup_arg(ec, calling, def_iseq_ptr(vm_cc_cme(cc)->def), cfp->sp - calling->argc, param_size, local_size);
2850 return vm_call_iseq_setup_2(ec, cfp, calling, opt_pc, param_size, local_size);
2851}
2852
2853static inline VALUE
2854vm_call_iseq_setup_2(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
2855 int opt_pc, int param_size, int local_size)
2856{
2857 const struct rb_callinfo *ci = calling->ci;
2858 const struct rb_callcache *cc = calling->cc;
2859
2860 if (LIKELY(!(vm_ci_flag(ci) & VM_CALL_TAILCALL))) {
2861 return vm_call_iseq_setup_normal(ec, cfp, calling, vm_cc_cme(cc), opt_pc, param_size, local_size);
2862 }
2863 else {
2864 return vm_call_iseq_setup_tailcall(ec, cfp, calling, opt_pc);
2865 }
2866}
2867
2868static inline VALUE
2869vm_call_iseq_setup_normal(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, const rb_callable_method_entry_t *me,
2870 int opt_pc, int param_size, int local_size)
2871{
2872 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2873 VALUE *argv = cfp->sp - calling->argc;
2874 VALUE *sp = argv + param_size;
2875 cfp->sp = argv - 1 /* recv */;
2876
2877 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL, calling->recv,
2878 calling->block_handler, (VALUE)me,
2879 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2880 local_size - param_size,
2881 ISEQ_BODY(iseq)->stack_max);
2882 return Qundef;
2883}
2884
2885static inline VALUE
2886vm_call_iseq_setup_tailcall(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, int opt_pc)
2887{
2888 const struct rb_callcache *cc = calling->cc;
2889 unsigned int i;
2890 VALUE *argv = cfp->sp - calling->argc;
2891 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
2892 const rb_iseq_t *iseq = def_iseq_ptr(me->def);
2893 VALUE *src_argv = argv;
2894 VALUE *sp_orig, *sp;
2895 VALUE finish_flag = VM_FRAME_FINISHED_P(cfp) ? VM_FRAME_FLAG_FINISH : 0;
2896
2897 if (VM_BH_FROM_CFP_P(calling->block_handler, cfp)) {
2898 struct rb_captured_block *dst_captured = VM_CFP_TO_CAPTURED_BLOCK(RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp));
2899 const struct rb_captured_block *src_captured = VM_BH_TO_CAPT_BLOCK(calling->block_handler);
2900 dst_captured->code.val = src_captured->code.val;
2901 if (VM_BH_ISEQ_BLOCK_P(calling->block_handler)) {
2902 calling->block_handler = VM_BH_FROM_ISEQ_BLOCK(dst_captured);
2903 }
2904 else {
2905 calling->block_handler = VM_BH_FROM_IFUNC_BLOCK(dst_captured);
2906 }
2907 }
2908
2909 vm_pop_frame(ec, cfp, cfp->ep);
2910 cfp = ec->cfp;
2911
2912 sp_orig = sp = cfp->sp;
2913
2914 /* push self */
2915 sp[0] = calling->recv;
2916 sp++;
2917
2918 /* copy arguments */
2919 for (i=0; i < ISEQ_BODY(iseq)->param.size; i++) {
2920 *sp++ = src_argv[i];
2921 }
2922
2923 vm_push_frame(ec, iseq, VM_FRAME_MAGIC_METHOD | VM_ENV_FLAG_LOCAL | finish_flag,
2924 calling->recv, calling->block_handler, (VALUE)me,
2925 ISEQ_BODY(iseq)->iseq_encoded + opt_pc, sp,
2926 ISEQ_BODY(iseq)->local_table_size - ISEQ_BODY(iseq)->param.size,
2927 ISEQ_BODY(iseq)->stack_max);
2928
2929 cfp->sp = sp_orig;
2930
2931 return Qundef;
2932}
2933
2934static void
2935ractor_unsafe_check(void)
2936{
2937 if (!rb_ractor_main_p()) {
2938 rb_raise(rb_eRactorUnsafeError, "ractor unsafe method called from not main ractor");
2939 }
2940}
2941
2942static VALUE
2943call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2944{
2945 ractor_unsafe_check();
2946 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2947 return (*f)(recv, rb_ary_new4(argc, argv));
2948}
2949
2950static VALUE
2951call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2952{
2953 ractor_unsafe_check();
2954 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
2955 return (*f)(argc, argv, recv);
2956}
2957
2958static VALUE
2959call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2960{
2961 ractor_unsafe_check();
2962 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
2963 return (*f)(recv);
2964}
2965
2966static VALUE
2967call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2968{
2969 ractor_unsafe_check();
2970 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
2971 return (*f)(recv, argv[0]);
2972}
2973
2974static VALUE
2975call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2976{
2977 ractor_unsafe_check();
2978 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
2979 return (*f)(recv, argv[0], argv[1]);
2980}
2981
2982static VALUE
2983call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2984{
2985 ractor_unsafe_check();
2986 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
2987 return (*f)(recv, argv[0], argv[1], argv[2]);
2988}
2989
2990static VALUE
2991call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
2992{
2993 ractor_unsafe_check();
2994 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
2995 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
2996}
2997
2998static VALUE
2999call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3000{
3001 ractor_unsafe_check();
3002 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
3003 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3004}
3005
3006static VALUE
3007call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3008{
3009 ractor_unsafe_check();
3011 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3012}
3013
3014static VALUE
3015call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3016{
3017 ractor_unsafe_check();
3019 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3020}
3021
3022static VALUE
3023call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3024{
3025 ractor_unsafe_check();
3027 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3028}
3029
3030static VALUE
3031call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3032{
3033 ractor_unsafe_check();
3035 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3036}
3037
3038static VALUE
3039call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3040{
3041 ractor_unsafe_check();
3043 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3044}
3045
3046static VALUE
3047call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3048{
3049 ractor_unsafe_check();
3051 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3052}
3053
3054static VALUE
3055call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3056{
3057 ractor_unsafe_check();
3059 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3060}
3061
3062static VALUE
3063call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3064{
3065 ractor_unsafe_check();
3067 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3068}
3069
3070static VALUE
3071call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3072{
3073 ractor_unsafe_check();
3075 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3076}
3077
3078static VALUE
3079call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3080{
3081 ractor_unsafe_check();
3083 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3084}
3085
3086static VALUE
3087ractor_safe_call_cfunc_m2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3088{
3089 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3090 return (*f)(recv, rb_ary_new4(argc, argv));
3091}
3092
3093static VALUE
3094ractor_safe_call_cfunc_m1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3095{
3096 VALUE(*f)(int, const VALUE *, VALUE) = (VALUE(*)(int, const VALUE *, VALUE))func;
3097 return (*f)(argc, argv, recv);
3098}
3099
3100static VALUE
3101ractor_safe_call_cfunc_0(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3102{
3103 VALUE(*f)(VALUE) = (VALUE(*)(VALUE))func;
3104 return (*f)(recv);
3105}
3106
3107static VALUE
3108ractor_safe_call_cfunc_1(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3109{
3110 VALUE(*f)(VALUE, VALUE) = (VALUE(*)(VALUE, VALUE))func;
3111 return (*f)(recv, argv[0]);
3112}
3113
3114static VALUE
3115ractor_safe_call_cfunc_2(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3116{
3117 VALUE(*f)(VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE))func;
3118 return (*f)(recv, argv[0], argv[1]);
3119}
3120
3121static VALUE
3122ractor_safe_call_cfunc_3(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3123{
3124 VALUE(*f)(VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE))func;
3125 return (*f)(recv, argv[0], argv[1], argv[2]);
3126}
3127
3128static VALUE
3129ractor_safe_call_cfunc_4(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3130{
3131 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE))func;
3132 return (*f)(recv, argv[0], argv[1], argv[2], argv[3]);
3133}
3134
3135static VALUE
3136ractor_safe_call_cfunc_5(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3137{
3138 VALUE(*f)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE) = (VALUE(*)(VALUE, VALUE, VALUE, VALUE, VALUE, VALUE))func;
3139 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4]);
3140}
3141
3142static VALUE
3143ractor_safe_call_cfunc_6(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3144{
3146 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
3147}
3148
3149static VALUE
3150ractor_safe_call_cfunc_7(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3151{
3153 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
3154}
3155
3156static VALUE
3157ractor_safe_call_cfunc_8(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3158{
3160 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
3161}
3162
3163static VALUE
3164ractor_safe_call_cfunc_9(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3165{
3167 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
3168}
3169
3170static VALUE
3171ractor_safe_call_cfunc_10(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3172{
3174 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
3175}
3176
3177static VALUE
3178ractor_safe_call_cfunc_11(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3179{
3181 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
3182}
3183
3184static VALUE
3185ractor_safe_call_cfunc_12(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3186{
3188 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
3189}
3190
3191static VALUE
3192ractor_safe_call_cfunc_13(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3193{
3195 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
3196}
3197
3198static VALUE
3199ractor_safe_call_cfunc_14(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3200{
3202 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
3203}
3204
3205static VALUE
3206ractor_safe_call_cfunc_15(VALUE recv, int argc, const VALUE *argv, VALUE (*func)(ANYARGS))
3207{
3209 return (*f)(recv, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
3210}
3211
3212static inline int
3213vm_cfp_consistent_p(rb_execution_context_t *ec, const rb_control_frame_t *reg_cfp)
3214{
3215 const int ov_flags = RAISED_STACKOVERFLOW;
3216 if (LIKELY(reg_cfp == ec->cfp + 1)) return TRUE;
3217 if (rb_ec_raised_p(ec, ov_flags)) {
3218 rb_ec_raised_reset(ec, ov_flags);
3219 return TRUE;
3220 }
3221 return FALSE;
3222}
3223
3224#define CHECK_CFP_CONSISTENCY(func) \
3225 (LIKELY(vm_cfp_consistent_p(ec, reg_cfp)) ? (void)0 : \
3226 rb_bug(func ": cfp consistency error (%p, %p)", (void *)reg_cfp, (void *)(ec->cfp+1)))
3227
3228static inline
3229const rb_method_cfunc_t *
3230vm_method_cfunc_entry(const rb_callable_method_entry_t *me)
3231{
3232#if VM_DEBUG_VERIFY_METHOD_CACHE
3233 switch (me->def->type) {
3234 case VM_METHOD_TYPE_CFUNC:
3235 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3236 break;
3237# define METHOD_BUG(t) case VM_METHOD_TYPE_##t: rb_bug("wrong method type: " #t)
3238 METHOD_BUG(ISEQ);
3239 METHOD_BUG(ATTRSET);
3240 METHOD_BUG(IVAR);
3241 METHOD_BUG(BMETHOD);
3242 METHOD_BUG(ZSUPER);
3243 METHOD_BUG(UNDEF);
3244 METHOD_BUG(OPTIMIZED);
3245 METHOD_BUG(MISSING);
3246 METHOD_BUG(REFINED);
3247 METHOD_BUG(ALIAS);
3248# undef METHOD_BUG
3249 default:
3250 rb_bug("wrong method type: %d", me->def->type);
3251 }
3252#endif
3253 return UNALIGNED_MEMBER_PTR(me->def, body.cfunc);
3254}
3255
3256static VALUE
3257vm_call_cfunc_with_frame(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3258{
3259 RB_DEBUG_COUNTER_INC(ccf_cfunc_with_frame);
3260 const struct rb_callinfo *ci = calling->ci;
3261 const struct rb_callcache *cc = calling->cc;
3262 VALUE val;
3263 const rb_callable_method_entry_t *me = vm_cc_cme(cc);
3264 const rb_method_cfunc_t *cfunc = vm_method_cfunc_entry(me);
3265 int len = cfunc->argc;
3266
3267 VALUE recv = calling->recv;
3268 VALUE block_handler = calling->block_handler;
3269 VALUE frame_type = VM_FRAME_MAGIC_CFUNC | VM_FRAME_FLAG_CFRAME | VM_ENV_FLAG_LOCAL;
3270 int argc = calling->argc;
3271 int orig_argc = argc;
3272
3273 if (UNLIKELY(calling->kw_splat)) {
3274 frame_type |= VM_FRAME_FLAG_CFRAME_KW;
3275 }
3276
3277 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, me->owner, me->def->original_id);
3278 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, recv, me->def->original_id, vm_ci_mid(ci), me->owner, Qundef);
3279
3280 vm_push_frame(ec, NULL, frame_type, recv,
3281 block_handler, (VALUE)me,
3282 0, ec->cfp->sp, 0, 0);
3283
3284 if (len >= 0) rb_check_arity(argc, len, len);
3285
3286 reg_cfp->sp -= orig_argc + 1;
3287 val = (*cfunc->invoker)(recv, argc, reg_cfp->sp + 1, cfunc->func);
3288
3289 CHECK_CFP_CONSISTENCY("vm_call_cfunc");
3290
3291 rb_vm_pop_frame(ec);
3292
3293 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, recv, me->def->original_id, vm_ci_mid(ci), me->owner, val);
3294 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, me->owner, me->def->original_id);
3295
3296 return val;
3297}
3298
3299static VALUE
3300vm_call_cfunc(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3301{
3302 const struct rb_callinfo *ci = calling->ci;
3303 RB_DEBUG_COUNTER_INC(ccf_cfunc);
3304
3305 CALLER_SETUP_ARG(reg_cfp, calling, ci);
3306 CALLER_REMOVE_EMPTY_KW_SPLAT(reg_cfp, calling, ci);
3307 CC_SET_FASTPATH(calling->cc, vm_call_cfunc_with_frame, !rb_splat_or_kwargs_p(ci) && !calling->kw_splat);
3308 return vm_call_cfunc_with_frame(ec, reg_cfp, calling);
3309}
3310
3311static VALUE
3312vm_call_ivar(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3313{
3314 const struct rb_callcache *cc = calling->cc;
3315 RB_DEBUG_COUNTER_INC(ccf_ivar);
3316 cfp->sp -= 1;
3317 VALUE ivar = vm_getivar(calling->recv, vm_cc_cme(cc)->def->body.attr.id, NULL, NULL, cc, TRUE);
3318 return ivar;
3319}
3320
3321static VALUE
3322vm_call_attrset_direct(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_callcache *cc, VALUE obj)
3323{
3324 RB_DEBUG_COUNTER_INC(ccf_attrset);
3325 VALUE val = *(cfp->sp - 1);
3326 cfp->sp -= 2;
3327 attr_index_t index = vm_cc_attr_index(cc);
3328 shape_id_t dest_shape_id = vm_cc_attr_index_dest_shape_id(cc);
3329 ID id = vm_cc_cme(cc)->def->body.attr.id;
3331 VALUE res = vm_setivar(obj, id, val, dest_shape_id, index);
3332 if (UNDEF_P(res)) {
3333 switch (BUILTIN_TYPE(obj)) {
3334 case T_OBJECT:
3335 case T_CLASS:
3336 case T_MODULE:
3337 break;
3338 default:
3339 {
3340 res = vm_setivar_default(obj, id, val, dest_shape_id, index);
3341 if (!UNDEF_P(res)) {
3342 return res;
3343 }
3344 }
3345 }
3346 res = vm_setivar_slowpath_attr(obj, id, val, cc);
3347 }
3348 return res;
3349}
3350
3351static VALUE
3352vm_call_attrset(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3353{
3354 return vm_call_attrset_direct(ec, cfp, calling->cc, calling->recv);
3355}
3356
3357bool
3358rb_vm_call_ivar_attrset_p(const vm_call_handler ch)
3359{
3360 return (ch == vm_call_ivar || ch == vm_call_attrset);
3361}
3362
3363static inline VALUE
3364vm_call_bmethod_body(rb_execution_context_t *ec, struct rb_calling_info *calling, const VALUE *argv)
3365{
3366 rb_proc_t *proc;
3367 VALUE val;
3368 const struct rb_callcache *cc = calling->cc;
3369 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3370 VALUE procv = cme->def->body.bmethod.proc;
3371
3372 if (!RB_OBJ_SHAREABLE_P(procv) &&
3373 cme->def->body.bmethod.defined_ractor != rb_ractor_self(rb_ec_ractor_ptr(ec))) {
3374 rb_raise(rb_eRuntimeError, "defined with an un-shareable Proc in a different Ractor");
3375 }
3376
3377 /* control block frame */
3378 GetProcPtr(procv, proc);
3379 val = rb_vm_invoke_bmethod(ec, proc, calling->recv, calling->argc, argv, calling->kw_splat, calling->block_handler, vm_cc_cme(cc));
3380
3381 return val;
3382}
3383
3384static VALUE
3385vm_call_bmethod(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3386{
3387 RB_DEBUG_COUNTER_INC(ccf_bmethod);
3388
3389 VALUE *argv;
3390 int argc;
3391 const struct rb_callinfo *ci = calling->ci;
3392
3393 CALLER_SETUP_ARG(cfp, calling, ci);
3394 argc = calling->argc;
3395 argv = ALLOCA_N(VALUE, argc);
3396 MEMCPY(argv, cfp->sp - argc, VALUE, argc);
3397 cfp->sp += - argc - 1;
3398
3399 return vm_call_bmethod_body(ec, calling, argv);
3400}
3401
3402MJIT_FUNC_EXPORTED VALUE
3403rb_find_defined_class_by_owner(VALUE current_class, VALUE target_owner)
3404{
3405 VALUE klass = current_class;
3406
3407 /* for prepended Module, then start from cover class */
3408 if (RB_TYPE_P(klass, T_ICLASS) && FL_TEST(klass, RICLASS_IS_ORIGIN) &&
3409 RB_TYPE_P(RBASIC_CLASS(klass), T_CLASS)) {
3410 klass = RBASIC_CLASS(klass);
3411 }
3412
3413 while (RTEST(klass)) {
3414 VALUE owner = RB_TYPE_P(klass, T_ICLASS) ? RBASIC_CLASS(klass) : klass;
3415 if (owner == target_owner) {
3416 return klass;
3417 }
3418 klass = RCLASS_SUPER(klass);
3419 }
3420
3421 return current_class; /* maybe module function */
3422}
3423
3424static const rb_callable_method_entry_t *
3425aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3426{
3427 const rb_method_entry_t *orig_me = me->def->body.alias.original_me;
3428 const rb_callable_method_entry_t *cme;
3429
3430 if (orig_me->defined_class == 0) {
3431 VALUE defined_class = rb_find_defined_class_by_owner(me->defined_class, orig_me->owner);
3432 VM_ASSERT(RB_TYPE_P(orig_me->owner, T_MODULE));
3433 cme = rb_method_entry_complement_defined_class(orig_me, me->called_id, defined_class);
3434
3435 if (me->def->reference_count == 1) {
3436 RB_OBJ_WRITE(me, &me->def->body.alias.original_me, cme);
3437 }
3438 else {
3440 rb_method_definition_create(VM_METHOD_TYPE_ALIAS, me->def->original_id);
3441 rb_method_definition_set((rb_method_entry_t *)me, def, (void *)cme);
3442 }
3443 }
3444 else {
3445 cme = (const rb_callable_method_entry_t *)orig_me;
3446 }
3447
3448 VM_ASSERT(callable_method_entry_p(cme));
3449 return cme;
3450}
3451
3453rb_aliased_callable_method_entry(const rb_callable_method_entry_t *me)
3454{
3455 return aliased_callable_method_entry(me);
3456}
3457
3458static VALUE
3459vm_call_alias(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3460{
3461 calling->cc = &VM_CC_ON_STACK(Qundef,
3462 vm_call_general,
3463 {{0}},
3464 aliased_callable_method_entry(vm_cc_cme(calling->cc)));
3465
3466 return vm_call_method_each_type(ec, cfp, calling);
3467}
3468
3469static enum method_missing_reason
3470ci_missing_reason(const struct rb_callinfo *ci)
3471{
3472 enum method_missing_reason stat = MISSING_NOENTRY;
3473 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
3474 if (vm_ci_flag(ci) & VM_CALL_FCALL) stat |= MISSING_FCALL;
3475 if (vm_ci_flag(ci) & VM_CALL_SUPER) stat |= MISSING_SUPER;
3476 return stat;
3477}
3478
3479static VALUE vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling);
3480
3481static VALUE
3482vm_call_symbol(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3483 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE symbol, int flags)
3484{
3485 ASSUME(calling->argc >= 0);
3486 /* Also assumes CALLER_SETUP_ARG is already done. */
3487
3488 enum method_missing_reason missing_reason = MISSING_NOENTRY;
3489 int argc = calling->argc;
3490 VALUE recv = calling->recv;
3491 VALUE klass = CLASS_OF(recv);
3492 ID mid = rb_check_id(&symbol);
3493 flags |= VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3494
3495 if (UNLIKELY(! mid)) {
3496 mid = idMethodMissing;
3497 missing_reason = ci_missing_reason(ci);
3498 ec->method_missing_reason = missing_reason;
3499
3500 /* E.g. when argc == 2
3501 *
3502 * | | | | TOPN
3503 * | | +------+
3504 * | | +---> | arg1 | 0
3505 * +------+ | +------+
3506 * | arg1 | -+ +-> | arg0 | 1
3507 * +------+ | +------+
3508 * | arg0 | ---+ | sym | 2
3509 * +------+ +------+
3510 * | recv | | recv | 3
3511 * --+------+--------+------+------
3512 */
3513 int i = argc;
3514 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3515 INC_SP(1);
3516 MEMMOVE(&TOPN(i - 1), &TOPN(i), VALUE, i);
3517 argc = ++calling->argc;
3518
3519 if (rb_method_basic_definition_p(klass, idMethodMissing)) {
3520 /* Inadvertent symbol creation shall be forbidden, see [Feature #5112] */
3521 TOPN(i) = symbol;
3522 int priv = vm_ci_flag(ci) & (VM_CALL_FCALL | VM_CALL_VCALL);
3523 const VALUE *argv = STACK_ADDR_FROM_TOP(argc);
3524 VALUE exc = rb_make_no_method_exception(
3525 rb_eNoMethodError, 0, recv, argc, argv, priv);
3526
3527 rb_exc_raise(exc);
3528 }
3529 else {
3530 TOPN(i) = rb_str_intern(symbol);
3531 }
3532 }
3533
3534 calling->ci = &VM_CI_ON_STACK(mid, flags, argc, vm_ci_kwarg(ci));
3535 calling->cc = &VM_CC_ON_STACK(klass,
3536 vm_call_general,
3537 { .method_missing_reason = missing_reason },
3538 rb_callable_method_entry_with_refinements(klass, mid, NULL));
3539
3540 if (flags & VM_CALL_FCALL) {
3541 return vm_call_method(ec, reg_cfp, calling);
3542 }
3543
3544 const struct rb_callcache *cc = calling->cc;
3545 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
3546
3547 if (vm_cc_cme(cc) != NULL) {
3548 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
3549 case METHOD_VISI_PUBLIC: /* likely */
3550 return vm_call_method_each_type(ec, reg_cfp, calling);
3551 case METHOD_VISI_PRIVATE:
3552 vm_cc_method_missing_reason_set(cc, MISSING_PRIVATE);
3553 break;
3554 case METHOD_VISI_PROTECTED:
3555 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
3556 break;
3557 default:
3558 VM_UNREACHABLE(vm_call_method);
3559 }
3560 return vm_call_method_missing(ec, reg_cfp, calling);
3561 }
3562
3563 return vm_call_method_nome(ec, reg_cfp, calling);
3564}
3565
3566static VALUE
3567vm_call_opt_send(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3568{
3569 RB_DEBUG_COUNTER_INC(ccf_opt_send);
3570
3571 int i;
3572 VALUE sym;
3573
3574 CALLER_SETUP_ARG(reg_cfp, calling, calling->ci);
3575
3576 i = calling->argc - 1;
3577
3578 if (calling->argc == 0) {
3579 rb_raise(rb_eArgError, "no method name given");
3580 }
3581 else {
3582 sym = TOPN(i);
3583 /* E.g. when i == 2
3584 *
3585 * | | | | TOPN
3586 * +------+ | |
3587 * | arg1 | ---+ | | 0
3588 * +------+ | +------+
3589 * | arg0 | -+ +-> | arg1 | 1
3590 * +------+ | +------+
3591 * | sym | +---> | arg0 | 2
3592 * +------+ +------+
3593 * | recv | | recv | 3
3594 * --+------+--------+------+------
3595 */
3596 /* shift arguments */
3597 if (i > 0) {
3598 MEMMOVE(&TOPN(i), &TOPN(i-1), VALUE, i);
3599 }
3600 calling->argc -= 1;
3601 DEC_SP(1);
3602
3603 return vm_call_symbol(ec, reg_cfp, calling, calling->ci, sym, VM_CALL_FCALL);
3604 }
3605}
3606
3607static VALUE
3608vm_call_method_missing_body(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling,
3609 const struct rb_callinfo *orig_ci, enum method_missing_reason reason)
3610{
3611 RB_DEBUG_COUNTER_INC(ccf_method_missing);
3612
3613 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
3614 unsigned int argc;
3615
3616 CALLER_SETUP_ARG(reg_cfp, calling, orig_ci);
3617 argc = calling->argc + 1;
3618
3619 unsigned int flag = VM_CALL_FCALL | VM_CALL_OPT_SEND | (calling->kw_splat ? VM_CALL_KW_SPLAT : 0);
3620 calling->argc = argc;
3621
3622 /* shift arguments: m(a, b, c) #=> method_missing(:m, a, b, c) */
3623 CHECK_VM_STACK_OVERFLOW(reg_cfp, 1);
3624 vm_check_canary(ec, reg_cfp->sp);
3625 if (argc > 1) {
3626 MEMMOVE(argv+1, argv, VALUE, argc-1);
3627 }
3628 argv[0] = ID2SYM(vm_ci_mid(orig_ci));
3629 INC_SP(1);
3630
3631 ec->method_missing_reason = reason;
3632 calling->ci = &VM_CI_ON_STACK(idMethodMissing, flag, argc, vm_ci_kwarg(orig_ci));
3633 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
3634 rb_callable_method_entry_without_refinements(CLASS_OF(calling->recv), idMethodMissing, NULL));
3635 return vm_call_method(ec, reg_cfp, calling);
3636}
3637
3638static VALUE
3639vm_call_method_missing(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3640{
3641 return vm_call_method_missing_body(ec, reg_cfp, calling, calling->ci, vm_cc_cmethod_missing_reason(calling->cc));
3642}
3643
3644static const rb_callable_method_entry_t *refined_method_callable_without_refinement(const rb_callable_method_entry_t *me);
3645static VALUE
3646vm_call_zsuper(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling, VALUE klass)
3647{
3648 klass = RCLASS_SUPER(klass);
3649
3650 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, vm_ci_mid(calling->ci)) : NULL;
3651 if (cme == NULL) {
3652 return vm_call_method_nome(ec, cfp, calling);
3653 }
3654 if (cme->def->type == VM_METHOD_TYPE_REFINED &&
3655 cme->def->body.refined.orig_me) {
3656 cme = refined_method_callable_without_refinement(cme);
3657 }
3658
3659 calling->cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }}, cme);
3660
3661 return vm_call_method_each_type(ec, cfp, calling);
3662}
3663
3664static inline VALUE
3665find_refinement(VALUE refinements, VALUE klass)
3666{
3667 if (NIL_P(refinements)) {
3668 return Qnil;
3669 }
3670 return rb_hash_lookup(refinements, klass);
3671}
3672
3673PUREFUNC(static rb_control_frame_t * current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp));
3674static rb_control_frame_t *
3675current_method_entry(const rb_execution_context_t *ec, rb_control_frame_t *cfp)
3676{
3677 rb_control_frame_t *top_cfp = cfp;
3678
3679 if (cfp->iseq && ISEQ_BODY(cfp->iseq)->type == ISEQ_TYPE_BLOCK) {
3680 const rb_iseq_t *local_iseq = ISEQ_BODY(cfp->iseq)->local_iseq;
3681
3682 do {
3683 cfp = RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp);
3684 if (RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(ec, cfp)) {
3685 /* TODO: orphan block */
3686 return top_cfp;
3687 }
3688 } while (cfp->iseq != local_iseq);
3689 }
3690 return cfp;
3691}
3692
3693static const rb_callable_method_entry_t *
3694refined_method_callable_without_refinement(const rb_callable_method_entry_t *me)
3695{
3696 const rb_method_entry_t *orig_me = me->def->body.refined.orig_me;
3697 const rb_callable_method_entry_t *cme;
3698
3699 if (orig_me->defined_class == 0) {
3700 cme = NULL;
3702 }
3703 else {
3704 cme = (const rb_callable_method_entry_t *)orig_me;
3705 }
3706
3707 VM_ASSERT(callable_method_entry_p(cme));
3708
3709 if (UNDEFINED_METHOD_ENTRY_P(cme)) {
3710 cme = NULL;
3711 }
3712
3713 return cme;
3714}
3715
3716static const rb_callable_method_entry_t *
3717search_refined_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3718{
3719 ID mid = vm_ci_mid(calling->ci);
3720 const rb_cref_t *cref = vm_get_cref(cfp->ep);
3721 const struct rb_callcache * const cc = calling->cc;
3722 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3723
3724 for (; cref; cref = CREF_NEXT(cref)) {
3725 const VALUE refinement = find_refinement(CREF_REFINEMENTS(cref), vm_cc_cme(cc)->owner);
3726 if (NIL_P(refinement)) continue;
3727
3728 const rb_callable_method_entry_t *const ref_me =
3729 rb_callable_method_entry(refinement, mid);
3730
3731 if (ref_me) {
3732 if (vm_cc_call(cc) == vm_call_super_method) {
3733 const rb_control_frame_t *top_cfp = current_method_entry(ec, cfp);
3734 const rb_callable_method_entry_t *top_me = rb_vm_frame_method_entry(top_cfp);
3735 if (top_me && rb_method_definition_eq(ref_me->def, top_me->def)) {
3736 continue;
3737 }
3738 }
3739
3740 if (cme->def->type != VM_METHOD_TYPE_REFINED ||
3741 cme->def != ref_me->def) {
3742 cme = ref_me;
3743 }
3744 if (ref_me->def->type != VM_METHOD_TYPE_REFINED) {
3745 return cme;
3746 }
3747 }
3748 else {
3749 return NULL;
3750 }
3751 }
3752
3753 if (vm_cc_cme(cc)->def->body.refined.orig_me) {
3754 return refined_method_callable_without_refinement(vm_cc_cme(cc));
3755 }
3756 else {
3757 VALUE klass = RCLASS_SUPER(vm_cc_cme(cc)->defined_class);
3758 const rb_callable_method_entry_t *cme = klass ? rb_callable_method_entry(klass, mid) : NULL;
3759 return cme;
3760 }
3761}
3762
3763static VALUE
3764vm_call_refined(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3765{
3766 struct rb_callcache *ref_cc = &VM_CC_ON_STACK(Qundef, vm_call_general, {{ 0 }},
3767 search_refined_method(ec, cfp, calling));
3768
3769 if (vm_cc_cme(ref_cc)) {
3770 calling->cc= ref_cc;
3771 return vm_call_method(ec, cfp, calling);
3772 }
3773 else {
3774 return vm_call_method_nome(ec, cfp, calling);
3775 }
3776}
3777
3778static inline VALUE vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling, const struct rb_callinfo *ci, bool is_lambda, VALUE block_handler);
3779
3780NOINLINE(static VALUE
3781 vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3782 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler));
3783
3784static VALUE
3785vm_invoke_block_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
3786 struct rb_calling_info *calling, const struct rb_callinfo *ci, VALUE block_handler)
3787{
3788 int argc = calling->argc;
3789
3790 /* remove self */
3791 if (argc > 0) MEMMOVE(&TOPN(argc), &TOPN(argc-1), VALUE, argc);
3792 DEC_SP(1);
3793
3794 return vm_invoke_block(ec, reg_cfp, calling, ci, false, block_handler);
3795}
3796
3797static VALUE
3798vm_call_opt_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3799{
3800 RB_DEBUG_COUNTER_INC(ccf_opt_call);
3801
3802 const struct rb_callinfo *ci = calling->ci;
3803 VALUE procval = calling->recv;
3804 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, VM_BH_FROM_PROC(procval));
3805}
3806
3807static VALUE
3808vm_call_opt_block_call(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3809{
3810 RB_DEBUG_COUNTER_INC(ccf_opt_block_call);
3811
3812 VALUE block_handler = VM_ENV_BLOCK_HANDLER(VM_CF_LEP(reg_cfp));
3813 const struct rb_callinfo *ci = calling->ci;
3814
3815 if (BASIC_OP_UNREDEFINED_P(BOP_CALL, PROC_REDEFINED_OP_FLAG)) {
3816 return vm_invoke_block_opt_call(ec, reg_cfp, calling, ci, block_handler);
3817 }
3818 else {
3819 calling->recv = rb_vm_bh_to_procval(ec, block_handler);
3820 calling->cc = rb_vm_search_method_slowpath(ci, CLASS_OF(calling->recv));
3821 return vm_call_general(ec, reg_cfp, calling);
3822 }
3823}
3824
3825static VALUE
3826vm_call_opt_struct_aref0(rb_execution_context_t *ec, struct rb_calling_info *calling)
3827{
3828 VALUE recv = calling->recv;
3829
3830 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
3831 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3832 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_AREF);
3833
3834 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3835 return internal_RSTRUCT_GET(recv, off);
3836}
3837
3838static VALUE
3839vm_call_opt_struct_aref(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3840{
3841 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aref);
3842
3843 VALUE ret = vm_call_opt_struct_aref0(ec, calling);
3844 reg_cfp->sp -= 1;
3845 return ret;
3846}
3847
3848static VALUE
3849vm_call_opt_struct_aset0(rb_execution_context_t *ec, struct rb_calling_info *calling, VALUE val)
3850{
3851 VALUE recv = calling->recv;
3852
3853 VM_ASSERT(RB_TYPE_P(recv, T_STRUCT));
3854 VM_ASSERT(vm_cc_cme(calling->cc)->def->type == VM_METHOD_TYPE_OPTIMIZED);
3855 VM_ASSERT(vm_cc_cme(calling->cc)->def->body.optimized.type == OPTIMIZED_METHOD_TYPE_STRUCT_ASET);
3856
3857 rb_check_frozen(recv);
3858
3859 const unsigned int off = vm_cc_cme(calling->cc)->def->body.optimized.index;
3860 internal_RSTRUCT_SET(recv, off, val);
3861
3862 return val;
3863}
3864
3865static VALUE
3866vm_call_opt_struct_aset(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
3867{
3868 RB_DEBUG_COUNTER_INC(ccf_opt_struct_aset);
3869
3870 VALUE ret = vm_call_opt_struct_aset0(ec, calling, *(reg_cfp->sp - 1));
3871 reg_cfp->sp -= 2;
3872 return ret;
3873}
3874
3875NOINLINE(static VALUE vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3876 const struct rb_callinfo *ci, const struct rb_callcache *cc));
3877
3878static VALUE
3879vm_call_optimized(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling,
3880 const struct rb_callinfo *ci, const struct rb_callcache *cc)
3881{
3882 switch (vm_cc_cme(cc)->def->body.optimized.type) {
3883 case OPTIMIZED_METHOD_TYPE_SEND:
3884 CC_SET_FASTPATH(cc, vm_call_opt_send, TRUE);
3885 return vm_call_opt_send(ec, cfp, calling);
3886 case OPTIMIZED_METHOD_TYPE_CALL:
3887 CC_SET_FASTPATH(cc, vm_call_opt_call, TRUE);
3888 return vm_call_opt_call(ec, cfp, calling);
3889 case OPTIMIZED_METHOD_TYPE_BLOCK_CALL:
3890 CC_SET_FASTPATH(cc, vm_call_opt_block_call, TRUE);
3891 return vm_call_opt_block_call(ec, cfp, calling);
3892 case OPTIMIZED_METHOD_TYPE_STRUCT_AREF:
3893 CALLER_SETUP_ARG(cfp, calling, ci);
3894 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3895 rb_check_arity(calling->argc, 0, 0);
3896 CC_SET_FASTPATH(cc, vm_call_opt_struct_aref, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3897 return vm_call_opt_struct_aref(ec, cfp, calling);
3898
3899 case OPTIMIZED_METHOD_TYPE_STRUCT_ASET:
3900 CALLER_SETUP_ARG(cfp, calling, ci);
3901 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3902 rb_check_arity(calling->argc, 1, 1);
3903 CC_SET_FASTPATH(cc, vm_call_opt_struct_aset, (vm_ci_flag(ci) & VM_CALL_ARGS_SIMPLE));
3904 return vm_call_opt_struct_aset(ec, cfp, calling);
3905 default:
3906 rb_bug("vm_call_method: unsupported optimized method type (%d)", vm_cc_cme(cc)->def->body.optimized.type);
3907 }
3908}
3909
3910#define VM_CALL_METHOD_ATTR(var, func, nohook) \
3911 if (UNLIKELY(ruby_vm_event_flags & (RUBY_EVENT_C_CALL | RUBY_EVENT_C_RETURN))) { \
3912 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_CALL, calling->recv, vm_cc_cme(cc)->def->original_id, \
3913 vm_ci_mid(ci), vm_cc_cme(cc)->owner, Qundef); \
3914 var = func; \
3915 EXEC_EVENT_HOOK(ec, RUBY_EVENT_C_RETURN, calling->recv, vm_cc_cme(cc)->def->original_id, \
3916 vm_ci_mid(ci), vm_cc_cme(cc)->owner, (var)); \
3917 } \
3918 else { \
3919 nohook; \
3920 var = func; \
3921 }
3922
3923static VALUE
3924vm_call_method_each_type(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
3925{
3926 const struct rb_callinfo *ci = calling->ci;
3927 const struct rb_callcache *cc = calling->cc;
3928 const rb_callable_method_entry_t *cme = vm_cc_cme(cc);
3929 VALUE v;
3930
3931 switch (cme->def->type) {
3932 case VM_METHOD_TYPE_ISEQ:
3933 CC_SET_FASTPATH(cc, vm_call_iseq_setup, TRUE);
3934 return vm_call_iseq_setup(ec, cfp, calling);
3935
3936 case VM_METHOD_TYPE_NOTIMPLEMENTED:
3937 case VM_METHOD_TYPE_CFUNC:
3938 CC_SET_FASTPATH(cc, vm_call_cfunc, TRUE);
3939 return vm_call_cfunc(ec, cfp, calling);
3940
3941 case VM_METHOD_TYPE_ATTRSET:
3942 CALLER_SETUP_ARG(cfp, calling, ci);
3943 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3944
3945 rb_check_arity(calling->argc, 1, 1);
3946
3947 const unsigned int aset_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT | VM_CALL_KWARG);
3948
3949 if (vm_cc_markable(cc)) {
3950 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3951 VM_CALL_METHOD_ATTR(v,
3952 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3953 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3954 }
3955 else {
3956 cc = &((struct rb_callcache) {
3957 .flags = T_IMEMO |
3958 (imemo_callcache << FL_USHIFT) |
3959 VM_CALLCACHE_UNMARKABLE |
3960 ((VALUE)INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT) |
3961 VM_CALLCACHE_ON_STACK,
3962 .klass = cc->klass,
3963 .cme_ = cc->cme_,
3964 .call_ = cc->call_,
3965 .aux_ = {
3966 .attr = {
3967 .value = INVALID_SHAPE_ID << SHAPE_FLAG_SHIFT,
3968 }
3969 },
3970 });
3971
3972 VM_CALL_METHOD_ATTR(v,
3973 vm_call_attrset_direct(ec, cfp, cc, calling->recv),
3974 CC_SET_FASTPATH(cc, vm_call_attrset, !(vm_ci_flag(ci) & aset_mask)));
3975 }
3976 return v;
3977
3978 case VM_METHOD_TYPE_IVAR:
3979 CALLER_SETUP_ARG(cfp, calling, ci);
3980 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
3981 rb_check_arity(calling->argc, 0, 0);
3982 vm_cc_attr_index_initialize(cc, INVALID_SHAPE_ID);
3983 const unsigned int ivar_mask = (VM_CALL_ARGS_SPLAT | VM_CALL_KW_SPLAT);
3984 VM_CALL_METHOD_ATTR(v,
3985 vm_call_ivar(ec, cfp, calling),
3986 CC_SET_FASTPATH(cc, vm_call_ivar, !(vm_ci_flag(ci) & ivar_mask)));
3987 return v;
3988
3989 case VM_METHOD_TYPE_MISSING:
3990 vm_cc_method_missing_reason_set(cc, 0);
3991 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
3992 return vm_call_method_missing(ec, cfp, calling);
3993
3994 case VM_METHOD_TYPE_BMETHOD:
3995 CC_SET_FASTPATH(cc, vm_call_bmethod, TRUE);
3996 return vm_call_bmethod(ec, cfp, calling);
3997
3998 case VM_METHOD_TYPE_ALIAS:
3999 CC_SET_FASTPATH(cc, vm_call_alias, TRUE);
4000 return vm_call_alias(ec, cfp, calling);
4001
4002 case VM_METHOD_TYPE_OPTIMIZED:
4003 return vm_call_optimized(ec, cfp, calling, ci, cc);
4004
4005 case VM_METHOD_TYPE_UNDEF:
4006 break;
4007
4008 case VM_METHOD_TYPE_ZSUPER:
4009 return vm_call_zsuper(ec, cfp, calling, RCLASS_ORIGIN(vm_cc_cme(cc)->defined_class));
4010
4011 case VM_METHOD_TYPE_REFINED:
4012 // CC_SET_FASTPATH(cc, vm_call_refined, TRUE);
4013 // should not set FASTPATH since vm_call_refined assumes cc->call is vm_call_super_method on invokesuper.
4014 return vm_call_refined(ec, cfp, calling);
4015 }
4016
4017 rb_bug("vm_call_method: unsupported method type (%d)", vm_cc_cme(cc)->def->type);
4018}
4019
4020NORETURN(static void vm_raise_method_missing(rb_execution_context_t *ec, int argc, const VALUE *argv, VALUE obj, int call_status));
4021
4022static VALUE
4023vm_call_method_nome(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4024{
4025 /* method missing */
4026 const struct rb_callinfo *ci = calling->ci;
4027 const int stat = ci_missing_reason(ci);
4028
4029 if (vm_ci_mid(ci) == idMethodMissing) {
4030 rb_control_frame_t *reg_cfp = cfp;
4031 VALUE *argv = STACK_ADDR_FROM_TOP(calling->argc);
4032 vm_raise_method_missing(ec, calling->argc, argv, calling->recv, stat);
4033 }
4034 else {
4035 return vm_call_method_missing_body(ec, cfp, calling, ci, stat);
4036 }
4037}
4038
4039/* Protected method calls and super invocations need to check that the receiver
4040 * (self for super) inherits the module on which the method is defined.
4041 * In the case of refinements, it should consider the original class not the
4042 * refinement.
4043 */
4044static VALUE
4045vm_defined_class_for_protected_call(const rb_callable_method_entry_t *me)
4046{
4047 VALUE defined_class = me->defined_class;
4048 VALUE refined_class = RCLASS_REFINED_CLASS(defined_class);
4049 return NIL_P(refined_class) ? defined_class : refined_class;
4050}
4051
4052static inline VALUE
4053vm_call_method(rb_execution_context_t *ec, rb_control_frame_t *cfp, struct rb_calling_info *calling)
4054{
4055 const struct rb_callinfo *ci = calling->ci;
4056 const struct rb_callcache *cc = calling->cc;
4057
4058 VM_ASSERT(callable_method_entry_p(vm_cc_cme(cc)));
4059
4060 if (vm_cc_cme(cc) != NULL) {
4061 switch (METHOD_ENTRY_VISI(vm_cc_cme(cc))) {
4062 case METHOD_VISI_PUBLIC: /* likely */
4063 return vm_call_method_each_type(ec, cfp, calling);
4064
4065 case METHOD_VISI_PRIVATE:
4066 if (!(vm_ci_flag(ci) & VM_CALL_FCALL)) {
4067 enum method_missing_reason stat = MISSING_PRIVATE;
4068 if (vm_ci_flag(ci) & VM_CALL_VCALL) stat |= MISSING_VCALL;
4069
4070 vm_cc_method_missing_reason_set(cc, stat);
4071 CC_SET_FASTPATH(cc, vm_call_method_missing, TRUE);
4072 return vm_call_method_missing(ec, cfp, calling);
4073 }
4074 return vm_call_method_each_type(ec, cfp, calling);
4075
4076 case METHOD_VISI_PROTECTED:
4077 if (!(vm_ci_flag(ci) & (VM_CALL_OPT_SEND | VM_CALL_FCALL))) {
4078 VALUE defined_class = vm_defined_class_for_protected_call(vm_cc_cme(cc));
4079 if (!rb_obj_is_kind_of(cfp->self, defined_class)) {
4080 vm_cc_method_missing_reason_set(cc, MISSING_PROTECTED);
4081 return vm_call_method_missing(ec, cfp, calling);
4082 }
4083 else {
4084 /* caching method info to dummy cc */
4085 VM_ASSERT(vm_cc_cme(cc) != NULL);
4086 struct rb_callcache cc_on_stack = *cc;
4087 FL_SET_RAW((VALUE)&cc_on_stack, VM_CALLCACHE_UNMARKABLE);
4088 calling->cc = &cc_on_stack;
4089 return vm_call_method_each_type(ec, cfp, calling);
4090 }
4091 }
4092 return vm_call_method_each_type(ec, cfp, calling);
4093
4094 default:
4095 rb_bug("unreachable");
4096 }
4097 }
4098 else {
4099 return vm_call_method_nome(ec, cfp, calling);
4100 }
4101}
4102
4103static VALUE
4104vm_call_general(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4105{
4106 RB_DEBUG_COUNTER_INC(ccf_general);
4107 return vm_call_method(ec, reg_cfp, calling);
4108}
4109
4110void
4111rb_vm_cc_general(const struct rb_callcache *cc)
4112{
4113 VM_ASSERT(IMEMO_TYPE_P(cc, imemo_callcache));
4114 VM_ASSERT(cc != vm_cc_empty());
4115
4116 *(vm_call_handler *)&cc->call_ = vm_call_general;
4117}
4118
4119static VALUE
4120vm_call_super_method(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, struct rb_calling_info *calling)
4121{
4122 RB_DEBUG_COUNTER_INC(ccf_super_method);
4123
4124 // This line is introduced to make different from `vm_call_general` because some compilers (VC we found)
4125 // can merge the function and the address of the function becomes same.
4126 // The address of `vm_call_super_method` is used in `search_refined_method`, so it should be different.
4127 if (ec == NULL) rb_bug("unreachable");
4128
4129 /* this check is required to distinguish with other functions. */
4130 VM_ASSERT(vm_cc_call(calling->cc) == vm_call_super_method);
4131 return vm_call_method(ec, reg_cfp, calling);
4132}
4133
4134/* super */
4135
4136static inline VALUE
4137vm_search_normal_superclass(VALUE klass)
4138{
4139 if (BUILTIN_TYPE(klass) == T_ICLASS &&
4140 RB_TYPE_P(RBASIC(klass)->klass, T_MODULE) &&
4141 FL_TEST_RAW(RBASIC(klass)->klass, RMODULE_IS_REFINEMENT)) {
4142 klass = RBASIC(klass)->klass;
4143 }
4144 klass = RCLASS_ORIGIN(klass);
4145 return RCLASS_SUPER(klass);
4146}
4147
4148NORETURN(static void vm_super_outside(void));
4149
4150static void
4151vm_super_outside(void)
4152{
4153 rb_raise(rb_eNoMethodError, "super called outside of method");
4154}
4155
4156static const struct rb_callcache *
4157empty_cc_for_super(void)
4158{
4159#ifdef MJIT_HEADER
4160 return rb_vm_empty_cc_for_super();
4161#else
4162 return &vm_empty_cc_for_super;
4163#endif
4164}
4165
4166static const struct rb_callcache *
4167vm_search_super_method(const rb_control_frame_t *reg_cfp, struct rb_call_data *cd, VALUE recv)
4168{
4169 VALUE current_defined_class;
4170 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
4171
4172 if (!me) {
4173 vm_super_outside();
4174 }
4175
4176 current_defined_class = vm_defined_class_for_protected_call(me);
4177
4178 if (BUILTIN_TYPE(current_defined_class) != T_MODULE &&
4179 reg_cfp->iseq != method_entry_iseqptr(me) &&
4180 !rb_obj_is_kind_of(recv, current_defined_class)) {
4181 VALUE m = RB_TYPE_P(current_defined_class, T_ICLASS) ?
4182 RCLASS_INCLUDER(current_defined_class) : current_defined_class;
4183
4184 if (m) { /* not bound UnboundMethod */
4186 "self has wrong type to call super in this context: "
4187 "%"PRIsVALUE" (expected %"PRIsVALUE")",
4188 rb_obj_class(recv), m);
4189 }
4190 }
4191
4192 if (me->def->type == VM_METHOD_TYPE_BMETHOD && (vm_ci_flag(cd->ci) & VM_CALL_ZSUPER)) {
4194 "implicit argument passing of super from method defined"
4195 " by define_method() is not supported."
4196 " Specify all arguments explicitly.");
4197 }
4198
4199 ID mid = me->def->original_id;
4200
4201 // update iseq. really? (TODO)
4202 cd->ci = vm_ci_new_runtime(mid,
4203 vm_ci_flag(cd->ci),
4204 vm_ci_argc(cd->ci),
4205 vm_ci_kwarg(cd->ci));
4206
4207 RB_OBJ_WRITTEN(reg_cfp->iseq, Qundef, cd->ci);
4208
4209 const struct rb_callcache *cc;
4210
4211 VALUE klass = vm_search_normal_superclass(me->defined_class);
4212
4213 if (!klass) {
4214 /* bound instance method of module */
4215 cc = vm_cc_new(klass, NULL, vm_call_method_missing);
4216 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4217 }
4218 else {
4219 cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, klass);
4220 const rb_callable_method_entry_t *cached_cme = vm_cc_cme(cc);
4221
4222 // define_method can cache for different method id
4223 if (cached_cme == NULL) {
4224 // empty_cc_for_super is not markable object
4225 cd->cc = empty_cc_for_super();
4226 }
4227 else if (cached_cme->called_id != mid) {
4228 const rb_callable_method_entry_t *cme = rb_callable_method_entry(klass, mid);
4229 if (cme) {
4230 cc = vm_cc_new(klass, cme, vm_call_super_method);
4231 RB_OBJ_WRITE(reg_cfp->iseq, &cd->cc, cc);
4232 }
4233 else {
4234 cd->cc = cc = empty_cc_for_super();
4235 }
4236 }
4237 else {
4238 switch (cached_cme->def->type) {
4239 // vm_call_refined (search_refined_method) assumes cc->call is vm_call_super_method on invokesuper
4240 case VM_METHOD_TYPE_REFINED:
4241 // cc->klass is superclass of receiver class. Checking cc->klass is not enough to invalidate IVC for the receiver class.
4242 case VM_METHOD_TYPE_ATTRSET:
4243 case VM_METHOD_TYPE_IVAR:
4244 vm_cc_call_set(cc, vm_call_super_method); // invalidate fastpath
4245 break;
4246 default:
4247 break; // use fastpath
4248 }
4249 }
4250 }
4251
4252 VM_ASSERT((vm_cc_cme(cc), true));
4253
4254 return cc;
4255}
4256
4257/* yield */
4258
4259static inline int
4260block_proc_is_lambda(const VALUE procval)
4261{
4262 rb_proc_t *proc;
4263
4264 if (procval) {
4265 GetProcPtr(procval, proc);
4266 return proc->is_lambda;
4267 }
4268 else {
4269 return 0;
4270 }
4271}
4272
4273static VALUE
4274vm_yield_with_cfunc(rb_execution_context_t *ec,
4275 const struct rb_captured_block *captured,
4276 VALUE self, int argc, const VALUE *argv, int kw_splat, VALUE block_handler,
4278{
4279 int is_lambda = FALSE; /* TODO */
4280 VALUE val, arg, blockarg;
4281 int frame_flag;
4282 const struct vm_ifunc *ifunc = captured->code.ifunc;
4283
4284 if (is_lambda) {
4285 arg = rb_ary_new4(argc, argv);
4286 }
4287 else if (argc == 0) {
4288 arg = Qnil;
4289 }
4290 else {
4291 arg = argv[0];
4292 }
4293
4294 blockarg = rb_vm_bh_to_procval(ec, block_handler);
4295
4296 frame_flag = VM_FRAME_MAGIC_IFUNC | VM_FRAME_FLAG_CFRAME | (me ? VM_FRAME_FLAG_BMETHOD : 0);
4297 if (kw_splat) {
4298 frame_flag |= VM_FRAME_FLAG_CFRAME_KW;
4299 }
4300
4301 vm_push_frame(ec, (const rb_iseq_t *)captured->code.ifunc,
4302 frame_flag,
4303 self,
4304 VM_GUARDED_PREV_EP(captured->ep),
4305 (VALUE)me,
4306 0, ec->cfp->sp, 0, 0);
4307 val = (*ifunc->func)(arg, (VALUE)ifunc->data, argc, argv, blockarg);
4308 rb_vm_pop_frame(ec);
4309
4310 return val;
4311}
4312
4313static VALUE
4314vm_yield_with_symbol(rb_execution_context_t *ec, VALUE symbol, int argc, const VALUE *argv, int kw_splat, VALUE block_handler)
4315{
4316 return rb_sym_proc_call(SYM2ID(symbol), argc, argv, kw_splat, rb_vm_bh_to_procval(ec, block_handler));
4317}
4318
4319static inline int
4320vm_callee_setup_block_arg_arg0_splat(rb_control_frame_t *cfp, const rb_iseq_t *iseq, VALUE *argv, VALUE ary)
4321{
4322 int i;
4323 long len = RARRAY_LEN(ary);
4324
4325 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4326
4327 for (i=0; i<len && i<ISEQ_BODY(iseq)->param.lead_num; i++) {
4328 argv[i] = RARRAY_AREF(ary, i);
4329 }
4330
4331 return i;
4332}
4333
4334static inline VALUE
4335vm_callee_setup_block_arg_arg0_check(VALUE *argv)
4336{
4337 VALUE ary, arg0 = argv[0];
4338 ary = rb_check_array_type(arg0);
4339#if 0
4340 argv[0] = arg0;
4341#else
4342 VM_ASSERT(argv[0] == arg0);
4343#endif
4344 return ary;
4345}
4346
4347static int
4348vm_callee_setup_block_arg(rb_execution_context_t *ec, struct rb_calling_info *calling, const struct rb_callinfo *ci, const rb_iseq_t *iseq, VALUE *argv, const enum arg_setup_type arg_setup_type)
4349{
4350 if (rb_simple_iseq_p(iseq)) {
4351 rb_control_frame_t *cfp = ec->cfp;
4352 VALUE arg0;
4353
4354 CALLER_SETUP_ARG(cfp, calling, ci);
4355 CALLER_REMOVE_EMPTY_KW_SPLAT(cfp, calling, ci);
4356
4357 if (arg_setup_type == arg_setup_block &&
4358 calling->argc == 1 &&
4359 ISEQ_BODY(iseq)->param.flags.has_lead &&
4360 !ISEQ_BODY(iseq)->param.flags.ambiguous_param0 &&
4361 !NIL_P(arg0 = vm_callee_setup_block_arg_arg0_check(argv))) {
4362 calling->argc = vm_callee_setup_block_arg_arg0_splat(cfp, iseq, argv, arg0);
4363 }
4364
4365 if (calling->argc != ISEQ_BODY(iseq)->param.lead_num) {
4366 if (arg_setup_type == arg_setup_block) {
4367 if (calling->argc < ISEQ_BODY(iseq)->param.lead_num) {
4368 int i;
4369 CHECK_VM_STACK_OVERFLOW(cfp, ISEQ_BODY(iseq)->param.lead_num);
4370 for (i=calling->argc; i<ISEQ_BODY(iseq)->param.lead_num; i++) argv[i] = Qnil;
4371 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* fill rest parameters */
4372 }
4373 else if (calling->argc > ISEQ_BODY(iseq)->param.lead_num) {
4374 calling->argc = ISEQ_BODY(iseq)->param.lead_num; /* simply truncate arguments */
4375 }
4376 }
4377 else {
4378 argument_arity_error(ec, iseq, calling->argc, ISEQ_BODY(iseq)->param.lead_num, ISEQ_BODY(iseq)->param.lead_num);
4379 }
4380 }
4381
4382 return 0;
4383 }
4384 else {
4385 return setup_parameters_complex(ec, iseq, calling, ci, argv, arg_setup_type);
4386 }
4387}
4388
4389static int
4390vm_yield_setup_args(rb_execution_context_t *ec, const rb_iseq_t *iseq, const int argc, VALUE *argv, int kw_splat, VALUE block_handler, enum arg_setup_type arg_setup_type)
4391{
4392 struct rb_calling_info calling_entry, *calling;
4393
4394 calling = &calling_entry;
4395 calling->argc = argc;
4396 calling->block_handler = block_handler;
4397 calling->kw_splat = kw_splat;
4398 calling->recv = Qundef;
4399 struct rb_callinfo dummy_ci = VM_CI_ON_STACK(0, (kw_splat ? VM_CALL_KW_SPLAT : 0), 0, 0);
4400
4401 return vm_callee_setup_block_arg(ec, calling, &dummy_ci, iseq, argv, arg_setup_type);
4402}
4403
4404/* ruby iseq -> ruby block */
4405
4406static VALUE
4407vm_invoke_iseq_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4408 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4409 bool is_lambda, VALUE block_handler)
4410{
4411 const struct rb_captured_block *captured = VM_BH_TO_ISEQ_BLOCK(block_handler);
4412 const rb_iseq_t *iseq = rb_iseq_check(captured->code.iseq);
4413 const int arg_size = ISEQ_BODY(iseq)->param.size;
4414 VALUE * const rsp = GET_SP() - calling->argc;
4415 int opt_pc = vm_callee_setup_block_arg(ec, calling, ci, iseq, rsp, is_lambda ? arg_setup_method : arg_setup_block);
4416
4417 SET_SP(rsp);
4418
4419 vm_push_frame(ec, iseq,
4420 VM_FRAME_MAGIC_BLOCK | (is_lambda ? VM_FRAME_FLAG_LAMBDA : 0),
4421 captured->self,
4422 VM_GUARDED_PREV_EP(captured->ep), 0,
4423 ISEQ_BODY(iseq)->iseq_encoded + opt_pc,
4424 rsp + arg_size,
4425 ISEQ_BODY(iseq)->local_table_size - arg_size, ISEQ_BODY(iseq)->stack_max);
4426
4427 return Qundef;
4428}
4429
4430static VALUE
4431vm_invoke_symbol_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4432 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4433 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4434{
4435 if (calling->argc < 1) {
4436 rb_raise(rb_eArgError, "no receiver given");
4437 }
4438 else {
4439 VALUE symbol = VM_BH_TO_SYMBOL(block_handler);
4440 CALLER_SETUP_ARG(reg_cfp, calling, ci);
4441 calling->recv = TOPN(--calling->argc);
4442 return vm_call_symbol(ec, reg_cfp, calling, ci, symbol, 0);
4443 }
4444}
4445
4446static VALUE
4447vm_invoke_ifunc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4448 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4449 MAYBE_UNUSED(bool is_lambda), VALUE block_handler)
4450{
4451 VALUE val;
4452 int argc;
4453 const struct rb_captured_block *captured = VM_BH_TO_IFUNC_BLOCK(block_handler);
4454 CALLER_SETUP_ARG(ec->cfp, calling, ci);
4455 CALLER_REMOVE_EMPTY_KW_SPLAT(ec->cfp, calling, ci);
4456 argc = calling->argc;
4457 val = vm_yield_with_cfunc(ec, captured, captured->self, argc, STACK_ADDR_FROM_TOP(argc), calling->kw_splat, calling->block_handler, NULL);
4458 POPN(argc); /* TODO: should put before C/yield? */
4459 return val;
4460}
4461
4462static VALUE
4463vm_proc_to_block_handler(VALUE procval)
4464{
4465 const struct rb_block *block = vm_proc_block(procval);
4466
4467 switch (vm_block_type(block)) {
4468 case block_type_iseq:
4469 return VM_BH_FROM_ISEQ_BLOCK(&block->as.captured);
4470 case block_type_ifunc:
4471 return VM_BH_FROM_IFUNC_BLOCK(&block->as.captured);
4472 case block_type_symbol:
4473 return VM_BH_FROM_SYMBOL(block->as.symbol);
4474 case block_type_proc:
4475 return VM_BH_FROM_PROC(block->as.proc);
4476 }
4477 VM_UNREACHABLE(vm_yield_with_proc);
4478 return Qundef;
4479}
4480
4481static VALUE
4482vm_invoke_proc_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4483 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4484 bool is_lambda, VALUE block_handler)
4485{
4486 while (vm_block_handler_type(block_handler) == block_handler_type_proc) {
4487 VALUE proc = VM_BH_TO_PROC(block_handler);
4488 is_lambda = block_proc_is_lambda(proc);
4489 block_handler = vm_proc_to_block_handler(proc);
4490 }
4491
4492 return vm_invoke_block(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4493}
4494
4495static inline VALUE
4496vm_invoke_block(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4497 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4498 bool is_lambda, VALUE block_handler)
4499{
4500 VALUE (*func)(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp,
4501 struct rb_calling_info *calling, const struct rb_callinfo *ci,
4502 bool is_lambda, VALUE block_handler);
4503
4504 switch (vm_block_handler_type(block_handler)) {
4505 case block_handler_type_iseq: func = vm_invoke_iseq_block; break;
4506 case block_handler_type_ifunc: func = vm_invoke_ifunc_block; break;
4507 case block_handler_type_proc: func = vm_invoke_proc_block; break;
4508 case block_handler_type_symbol: func = vm_invoke_symbol_block; break;
4509 default: rb_bug("vm_invoke_block: unreachable");
4510 }
4511
4512 return func(ec, reg_cfp, calling, ci, is_lambda, block_handler);
4513}
4514
4515static VALUE
4516vm_make_proc_with_iseq(const rb_iseq_t *blockiseq)
4517{
4518 const rb_execution_context_t *ec = GET_EC();
4519 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4520 struct rb_captured_block *captured;
4521
4522 if (cfp == 0) {
4523 rb_bug("vm_make_proc_with_iseq: unreachable");
4524 }
4525
4526 captured = VM_CFP_TO_CAPTURED_BLOCK(cfp);
4527 captured->code.iseq = blockiseq;
4528
4529 return rb_vm_make_proc(ec, captured, rb_cProc);
4530}
4531
4532static VALUE
4533vm_once_exec(VALUE iseq)
4534{
4535 VALUE proc = vm_make_proc_with_iseq((rb_iseq_t *)iseq);
4536 return rb_proc_call_with_block(proc, 0, 0, Qnil);
4537}
4538
4539static VALUE
4540vm_once_clear(VALUE data)
4541{
4542 union iseq_inline_storage_entry *is = (union iseq_inline_storage_entry *)data;
4543 is->once.running_thread = NULL;
4544 return Qnil;
4545}
4546
4547/* defined insn */
4548
4549static bool
4550check_respond_to_missing(VALUE obj, VALUE v)
4551{
4552 VALUE args[2];
4553 VALUE r;
4554
4555 args[0] = obj; args[1] = Qfalse;
4556 r = rb_check_funcall(v, idRespond_to_missing, 2, args);
4557 if (!UNDEF_P(r) && RTEST(r)) {
4558 return true;
4559 }
4560 else {
4561 return false;
4562 }
4563}
4564
4565static bool
4566vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
4567{
4568 VALUE klass;
4569 enum defined_type type = (enum defined_type)op_type;
4570
4571 switch (type) {
4572 case DEFINED_IVAR:
4573 return rb_ivar_defined(GET_SELF(), SYM2ID(obj));
4574 break;
4575 case DEFINED_GVAR:
4576 return rb_gvar_defined(SYM2ID(obj));
4577 break;
4578 case DEFINED_CVAR: {
4579 const rb_cref_t *cref = vm_get_cref(GET_EP());
4580 klass = vm_get_cvar_base(cref, GET_CFP(), 0);
4581 return rb_cvar_defined(klass, SYM2ID(obj));
4582 break;
4583 }
4584 case DEFINED_CONST:
4585 case DEFINED_CONST_FROM: {
4586 bool allow_nil = type == DEFINED_CONST;
4587 klass = v;
4588 return vm_get_ev_const(ec, klass, SYM2ID(obj), allow_nil, true);
4589 break;
4590 }
4591 case DEFINED_FUNC:
4592 klass = CLASS_OF(v);
4593 return rb_ec_obj_respond_to(ec, v, SYM2ID(obj), TRUE);
4594 break;
4595 case DEFINED_METHOD:{
4596 VALUE klass = CLASS_OF(v);
4597 const rb_method_entry_t *me = rb_method_entry_with_refinements(klass, SYM2ID(obj), NULL);
4598
4599 if (me) {
4600 switch (METHOD_ENTRY_VISI(me)) {
4601 case METHOD_VISI_PRIVATE:
4602 break;
4603 case METHOD_VISI_PROTECTED:
4604 if (!rb_obj_is_kind_of(GET_SELF(), rb_class_real(me->defined_class))) {
4605 break;
4606 }
4607 case METHOD_VISI_PUBLIC:
4608 return true;
4609 break;
4610 default:
4611 rb_bug("vm_defined: unreachable: %u", (unsigned int)METHOD_ENTRY_VISI(me));
4612 }
4613 }
4614 else {
4615 return check_respond_to_missing(obj, v);
4616 }
4617 break;
4618 }
4619 case DEFINED_YIELD:
4620 if (GET_BLOCK_HANDLER() != VM_BLOCK_HANDLER_NONE) {
4621 return true;
4622 }
4623 break;
4624 case DEFINED_ZSUPER:
4625 {
4626 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(GET_CFP());
4627
4628 if (me) {
4629 VALUE klass = vm_search_normal_superclass(me->defined_class);
4630 ID id = me->def->original_id;
4631
4632 return rb_method_boundp(klass, id, 0);
4633 }
4634 }
4635 break;
4636 case DEFINED_REF:{
4637 return vm_getspecial(ec, GET_LEP(), Qfalse, FIX2INT(obj)) != Qnil;
4638 break;
4639 }
4640 default:
4641 rb_bug("unimplemented defined? type (VM)");
4642 break;
4643 }
4644
4645 return false;
4646}
4647
4648bool
4649rb_vm_defined(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, rb_num_t op_type, VALUE obj, VALUE v)
4650{
4651 return vm_defined(ec, reg_cfp, op_type, obj, v);
4652}
4653
4654static const VALUE *
4655vm_get_ep(const VALUE *const reg_ep, rb_num_t lv)
4656{
4657 rb_num_t i;
4658 const VALUE *ep = reg_ep;
4659 for (i = 0; i < lv; i++) {
4660 ep = GET_PREV_EP(ep);
4661 }
4662 return ep;
4663}
4664
4665static VALUE
4666vm_get_special_object(const VALUE *const reg_ep,
4667 enum vm_special_object_type type)
4668{
4669 switch (type) {
4670 case VM_SPECIAL_OBJECT_VMCORE:
4671 return rb_mRubyVMFrozenCore;
4672 case VM_SPECIAL_OBJECT_CBASE:
4673 return vm_get_cbase(reg_ep);
4674 case VM_SPECIAL_OBJECT_CONST_BASE:
4675 return vm_get_const_base(reg_ep);
4676 default:
4677 rb_bug("putspecialobject insn: unknown value_type %d", type);
4678 }
4679}
4680
4681static VALUE
4682vm_concat_array(VALUE ary1, VALUE ary2st)
4683{
4684 const VALUE ary2 = ary2st;
4685 VALUE tmp1 = rb_check_to_array(ary1);
4686 VALUE tmp2 = rb_check_to_array(ary2);
4687
4688 if (NIL_P(tmp1)) {
4689 tmp1 = rb_ary_new3(1, ary1);
4690 }
4691
4692 if (NIL_P(tmp2)) {
4693 tmp2 = rb_ary_new3(1, ary2);
4694 }
4695
4696 if (tmp1 == ary1) {
4697 tmp1 = rb_ary_dup(ary1);
4698 }
4699 return rb_ary_concat(tmp1, tmp2);
4700}
4701
4702// YJIT implementation is using the C function
4703// and needs to call a non-static function
4704VALUE
4705rb_vm_concat_array(VALUE ary1, VALUE ary2st)
4706{
4707 return vm_concat_array(ary1, ary2st);
4708}
4709
4710static VALUE
4711vm_splat_array(VALUE flag, VALUE ary)
4712{
4713 VALUE tmp = rb_check_to_array(ary);
4714 if (NIL_P(tmp)) {
4715 return rb_ary_new3(1, ary);
4716 }
4717 else if (RTEST(flag)) {
4718 return rb_ary_dup(tmp);
4719 }
4720 else {
4721 return tmp;
4722 }
4723}
4724
4725// YJIT implementation is using the C function
4726// and needs to call a non-static function
4727VALUE
4728rb_vm_splat_array(VALUE flag, VALUE ary)
4729{
4730 return vm_splat_array(flag, ary);
4731}
4732
4733static VALUE
4734vm_check_match(rb_execution_context_t *ec, VALUE target, VALUE pattern, rb_num_t flag)
4735{
4736 enum vm_check_match_type type = ((int)flag) & VM_CHECKMATCH_TYPE_MASK;
4737
4738 if (flag & VM_CHECKMATCH_ARRAY) {
4739 long i;
4740 const long n = RARRAY_LEN(pattern);
4741
4742 for (i = 0; i < n; i++) {
4743 VALUE v = RARRAY_AREF(pattern, i);
4744 VALUE c = check_match(ec, v, target, type);
4745
4746 if (RTEST(c)) {
4747 return c;
4748 }
4749 }
4750 return Qfalse;
4751 }
4752 else {
4753 return check_match(ec, pattern, target, type);
4754 }
4755}
4756
4757static VALUE
4758vm_check_keyword(lindex_t bits, lindex_t idx, const VALUE *ep)
4759{
4760 const VALUE kw_bits = *(ep - bits);
4761
4762 if (FIXNUM_P(kw_bits)) {
4763 unsigned int b = (unsigned int)FIX2ULONG(kw_bits);
4764 if ((idx < KW_SPECIFIED_BITS_MAX) && (b & (0x01 << idx)))
4765 return Qfalse;
4766 }
4767 else {
4768 VM_ASSERT(RB_TYPE_P(kw_bits, T_HASH));
4769 if (rb_hash_has_key(kw_bits, INT2FIX(idx))) return Qfalse;
4770 }
4771 return Qtrue;
4772}
4773
4774static void
4775vm_dtrace(rb_event_flag_t flag, rb_execution_context_t *ec)
4776{
4777 if (RUBY_DTRACE_METHOD_ENTRY_ENABLED() ||
4778 RUBY_DTRACE_METHOD_RETURN_ENABLED() ||
4779 RUBY_DTRACE_CMETHOD_ENTRY_ENABLED() ||
4780 RUBY_DTRACE_CMETHOD_RETURN_ENABLED()) {
4781
4782 switch (flag) {
4783 case RUBY_EVENT_CALL:
4784 RUBY_DTRACE_METHOD_ENTRY_HOOK(ec, 0, 0);
4785 return;
4786 case RUBY_EVENT_C_CALL:
4787 RUBY_DTRACE_CMETHOD_ENTRY_HOOK(ec, 0, 0);
4788 return;
4789 case RUBY_EVENT_RETURN:
4790 RUBY_DTRACE_METHOD_RETURN_HOOK(ec, 0, 0);
4791 return;
4793 RUBY_DTRACE_CMETHOD_RETURN_HOOK(ec, 0, 0);
4794 return;
4795 }
4796 }
4797}
4798
4799static VALUE
4800vm_const_get_under(ID id, rb_num_t flags, VALUE cbase)
4801{
4802 if (!rb_const_defined_at(cbase, id)) {
4803 return 0;
4804 }
4805 else if (VM_DEFINECLASS_SCOPED_P(flags)) {
4806 return rb_public_const_get_at(cbase, id);
4807 }
4808 else {
4809 return rb_const_get_at(cbase, id);
4810 }
4811}
4812
4813static VALUE
4814vm_check_if_class(ID id, rb_num_t flags, VALUE super, VALUE klass)
4815{
4816 if (!RB_TYPE_P(klass, T_CLASS)) {
4817 return 0;
4818 }
4819 else if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags)) {
4820 VALUE tmp = rb_class_real(RCLASS_SUPER(klass));
4821
4822 if (tmp != super) {
4824 "superclass mismatch for class %"PRIsVALUE"",
4825 rb_id2str(id));
4826 }
4827 else {
4828 return klass;
4829 }
4830 }
4831 else {
4832 return klass;
4833 }
4834}
4835
4836static VALUE
4837vm_check_if_module(ID id, VALUE mod)
4838{
4839 if (!RB_TYPE_P(mod, T_MODULE)) {
4840 return 0;
4841 }
4842 else {
4843 return mod;
4844 }
4845}
4846
4847static VALUE
4848declare_under(ID id, VALUE cbase, VALUE c)
4849{
4850 rb_set_class_path_string(c, cbase, rb_id2str(id));
4851 rb_const_set(cbase, id, c);
4852 return c;
4853}
4854
4855static VALUE
4856vm_declare_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
4857{
4858 /* new class declaration */
4859 VALUE s = VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) ? super : rb_cObject;
4860 VALUE c = declare_under(id, cbase, rb_define_class_id(id, s));
4862 rb_class_inherited(s, c);
4863 return c;
4864}
4865
4866static VALUE
4867vm_declare_module(ID id, VALUE cbase)
4868{
4869 /* new module declaration */
4870 return declare_under(id, cbase, rb_module_new());
4871}
4872
4873NORETURN(static void unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old));
4874static void
4875unmatched_redefinition(const char *type, VALUE cbase, ID id, VALUE old)
4876{
4877 VALUE name = rb_id2str(id);
4878 VALUE message = rb_sprintf("%"PRIsVALUE" is not a %s",
4879 name, type);
4880 VALUE location = rb_const_source_location_at(cbase, id);
4881 if (!NIL_P(location)) {
4882 rb_str_catf(message, "\n%"PRIsVALUE":%"PRIsVALUE":"
4883 " previous definition of %"PRIsVALUE" was here",
4884 rb_ary_entry(location, 0), rb_ary_entry(location, 1), name);
4885 }
4887}
4888
4889static VALUE
4890vm_define_class(ID id, rb_num_t flags, VALUE cbase, VALUE super)
4891{
4892 VALUE klass;
4893
4894 if (VM_DEFINECLASS_HAS_SUPERCLASS_P(flags) && !RB_TYPE_P(super, T_CLASS)) {
4896 "superclass must be an instance of Class (given an instance of %"PRIsVALUE")",
4897 rb_obj_class(super));
4898 }
4899
4900 vm_check_if_namespace(cbase);
4901
4902 /* find klass */
4903 rb_autoload_load(cbase, id);
4904 if ((klass = vm_const_get_under(id, flags, cbase)) != 0) {
4905 if (!vm_check_if_class(id, flags, super, klass))
4906 unmatched_redefinition("class", cbase, id, klass);
4907 return klass;
4908 }
4909 else {
4910 return vm_declare_class(id, flags, cbase, super);
4911 }
4912}
4913
4914static VALUE
4915vm_define_module(ID id, rb_num_t flags, VALUE cbase)
4916{
4917 VALUE mod;
4918
4919 vm_check_if_namespace(cbase);
4920 if ((mod = vm_const_get_under(id, flags, cbase)) != 0) {
4921 if (!vm_check_if_module(id, mod))
4922 unmatched_redefinition("module", cbase, id, mod);
4923 return mod;
4924 }
4925 else {
4926 return vm_declare_module(id, cbase);
4927 }
4928}
4929
4930static VALUE
4931vm_find_or_create_class_by_id(ID id,
4932 rb_num_t flags,
4933 VALUE cbase,
4934 VALUE super)
4935{
4936 rb_vm_defineclass_type_t type = VM_DEFINECLASS_TYPE(flags);
4937
4938 switch (type) {
4939 case VM_DEFINECLASS_TYPE_CLASS:
4940 /* classdef returns class scope value */
4941 return vm_define_class(id, flags, cbase, super);
4942
4943 case VM_DEFINECLASS_TYPE_SINGLETON_CLASS:
4944 /* classdef returns class scope value */
4945 return rb_singleton_class(cbase);
4946
4947 case VM_DEFINECLASS_TYPE_MODULE:
4948 /* classdef returns class scope value */
4949 return vm_define_module(id, flags, cbase);
4950
4951 default:
4952 rb_bug("unknown defineclass type: %d", (int)type);
4953 }
4954}
4955
4956static rb_method_visibility_t
4957vm_scope_visibility_get(const rb_execution_context_t *ec)
4958{
4959 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4960
4961 if (!vm_env_cref_by_cref(cfp->ep)) {
4962 return METHOD_VISI_PUBLIC;
4963 }
4964 else {
4965 return CREF_SCOPE_VISI(vm_ec_cref(ec))->method_visi;
4966 }
4967}
4968
4969static int
4970vm_scope_module_func_check(const rb_execution_context_t *ec)
4971{
4972 const rb_control_frame_t *cfp = rb_vm_get_ruby_level_next_cfp(ec, ec->cfp);
4973
4974 if (!vm_env_cref_by_cref(cfp->ep)) {
4975 return FALSE;
4976 }
4977 else {
4978 return CREF_SCOPE_VISI(vm_ec_cref(ec))->module_func;
4979 }
4980}
4981
4982static void
4983vm_define_method(const rb_execution_context_t *ec, VALUE obj, ID id, VALUE iseqval, int is_singleton)
4984{
4985 VALUE klass;
4986 rb_method_visibility_t visi;
4987 rb_cref_t *cref = vm_ec_cref(ec);
4988
4989 if (is_singleton) {
4990 klass = rb_singleton_class(obj); /* class and frozen checked in this API */
4991 visi = METHOD_VISI_PUBLIC;
4992 }
4993 else {
4994 klass = CREF_CLASS_FOR_DEFINITION(cref);
4995 visi = vm_scope_visibility_get(ec);
4996 }
4997
4998 if (NIL_P(klass)) {
4999 rb_raise(rb_eTypeError, "no class/module to add method");
5000 }
5001
5002 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, visi);
5003 // Set max_iv_count on klasses based on number of ivar sets that are in the initialize method
5004 if (id == rb_intern("initialize") && klass != rb_cObject && RB_TYPE_P(klass, T_CLASS) && (rb_get_alloc_func(klass) == rb_class_allocate_instance)) {
5005
5006 RCLASS_EXT(klass)->max_iv_count = rb_estimate_iv_count(klass, (const rb_iseq_t *)iseqval);
5007 }
5008
5009 if (!is_singleton && vm_scope_module_func_check(ec)) {
5010 klass = rb_singleton_class(klass);
5011 rb_add_method_iseq(klass, id, (const rb_iseq_t *)iseqval, cref, METHOD_VISI_PUBLIC);
5012 }
5013}
5014
5015static VALUE
5016vm_invokeblock_i(struct rb_execution_context_struct *ec,
5017 struct rb_control_frame_struct *reg_cfp,
5018 struct rb_calling_info *calling)
5019{
5020 const struct rb_callinfo *ci = calling->ci;
5021 VALUE block_handler = VM_CF_BLOCK_HANDLER(GET_CFP());
5022
5023 if (block_handler == VM_BLOCK_HANDLER_NONE) {
5024 rb_vm_localjump_error("no block given (yield)", Qnil, 0);
5025 }
5026 else {
5027 return vm_invoke_block(ec, GET_CFP(), calling, ci, false, block_handler);
5028 }
5029}
5030
5031#ifdef MJIT_HEADER
5032static const struct rb_callcache *
5033vm_search_method_wrap(const struct rb_control_frame_struct *reg_cfp, struct rb_call_data *cd, VALUE recv)
5034{
5035 return vm_search_method((VALUE)reg_cfp->iseq, cd, recv);
5036}
5037
5038static const struct rb_callcache *
5039vm_search_invokeblock(const struct rb_control_frame_struct *reg_cfp, struct rb_call_data *cd, VALUE recv)
5040{
5041 static const struct rb_callcache cc = {
5042 .flags = T_IMEMO | (imemo_callcache << FL_USHIFT) | VM_CALLCACHE_UNMARKABLE,
5043 .klass = 0,
5044 .cme_ = 0,
5045 .call_ = vm_invokeblock_i,
5046 .aux_ = {0},
5047 };
5048 return &cc;
5049}
5050
5051# define mexp_search_method vm_search_method_wrap
5052# define mexp_search_super vm_search_super_method
5053# define mexp_search_invokeblock vm_search_invokeblock
5054#else
5055enum method_explorer_type {
5056 mexp_search_method,
5057 mexp_search_invokeblock,
5058 mexp_search_super,
5059};
5060#endif
5061
5062static
5063#ifndef MJIT_HEADER
5064inline
5065#endif
5066VALUE
5067vm_sendish(
5068 struct rb_execution_context_struct *ec,
5069 struct rb_control_frame_struct *reg_cfp,
5070 struct rb_call_data *cd,
5071 VALUE block_handler,
5072#ifdef MJIT_HEADER
5073 const struct rb_callcache *(*method_explorer)(const struct rb_control_frame_struct *cfp, struct rb_call_data *cd, VALUE recv)
5074#else
5075 enum method_explorer_type method_explorer
5076#endif
5077) {
5078 VALUE val = Qundef;
5079 const struct rb_callinfo *ci = cd->ci;
5080 const struct rb_callcache *cc;
5081 int argc = vm_ci_argc(ci);
5082 VALUE recv = TOPN(argc);
5083 struct rb_calling_info calling = {
5084 .block_handler = block_handler,
5085 .kw_splat = IS_ARGS_KW_SPLAT(ci) > 0,
5086 .recv = recv,
5087 .argc = argc,
5088 .ci = ci,
5089 };
5090
5091// The enum-based branch and inlining are faster in VM, but function pointers without inlining are faster in JIT.
5092#ifdef MJIT_HEADER
5093 calling.cc = cc = method_explorer(GET_CFP(), cd, recv);
5094 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5095#else
5096 switch (method_explorer) {
5097 case mexp_search_method:
5098 calling.cc = cc = vm_search_method_fastpath((VALUE)reg_cfp->iseq, cd, CLASS_OF(recv));
5099 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5100 break;
5101 case mexp_search_super:
5102 calling.cc = cc = vm_search_super_method(reg_cfp, cd, recv);
5103 calling.ci = cd->ci; // TODO: does it safe?
5104 val = vm_cc_call(cc)(ec, GET_CFP(), &calling);
5105 break;
5106 case mexp_search_invokeblock:
5107 val = vm_invokeblock_i(ec, GET_CFP(), &calling);
5108 break;
5109 }
5110#endif
5111
5112 if (!UNDEF_P(val)) {
5113 return val; /* CFUNC normal return */
5114 }
5115 else {
5116 RESTORE_REGS(); /* CFP pushed in cc->call() */
5117 }
5118
5119#ifdef MJIT_HEADER
5120 /* When calling ISeq which may catch an exception from JIT-ed
5121 code, we should not call jit_exec directly to prevent the
5122 caller frame from being canceled. That's because the caller
5123 frame may have stack values in the local variables and the
5124 cancelling the caller frame will purge them. But directly
5125 calling jit_exec is faster... */
5126 if (ISEQ_BODY(GET_ISEQ())->catch_except_p) {
5127 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5128 return vm_exec(ec, true);
5129 }
5130 else if (UNDEF_P(val = jit_exec(ec))) {
5131 VM_ENV_FLAGS_SET(GET_EP(), VM_FRAME_FLAG_FINISH);
5132 return vm_exec(ec, false);
5133 }
5134 else {
5135 return val;
5136 }
5137#else
5138 /* When calling from VM, longjmp in the callee won't purge any
5139 JIT-ed caller frames. So it's safe to directly call jit_exec. */
5140 return jit_exec(ec);
5141#endif
5142}
5143
5144/* object.c */
5145VALUE rb_nil_to_s(VALUE);
5146VALUE rb_true_to_s(VALUE);
5147VALUE rb_false_to_s(VALUE);
5148/* numeric.c */
5149VALUE rb_int_to_s(int argc, VALUE *argv, VALUE x);
5150VALUE rb_fix_to_s(VALUE);
5151/* variable.c */
5152VALUE rb_mod_to_s(VALUE);
5154
5155static VALUE
5156vm_objtostring(const rb_iseq_t *iseq, VALUE recv, CALL_DATA cd)
5157{
5158 int type = TYPE(recv);
5159 if (type == T_STRING) {
5160 return recv;
5161 }
5162
5163 const struct rb_callcache *cc = vm_search_method((VALUE)iseq, cd, recv);
5164
5165 switch (type) {
5166 case T_SYMBOL:
5167 if (check_cfunc(vm_cc_cme(cc), rb_sym_to_s)) {
5168 // rb_sym_to_s() allocates a mutable string, but since we are only
5169 // going to use this string for interpolation, it's fine to use the
5170 // frozen string.
5171 return rb_sym2str(recv);
5172 }
5173 break;
5174 case T_MODULE:
5175 case T_CLASS:
5176 if (check_cfunc(vm_cc_cme(cc), rb_mod_to_s)) {
5177 // rb_mod_to_s() allocates a mutable string, but since we are only
5178 // going to use this string for interpolation, it's fine to use the
5179 // frozen string.
5180 VALUE val = rb_mod_name(recv);
5181 if (NIL_P(val)) {
5182 val = rb_mod_to_s(recv);
5183 }
5184 return val;
5185 }
5186 break;
5187 case T_NIL:
5188 if (check_cfunc(vm_cc_cme(cc), rb_nil_to_s)) {
5189 return rb_nil_to_s(recv);
5190 }
5191 break;
5192 case T_TRUE:
5193 if (check_cfunc(vm_cc_cme(cc), rb_true_to_s)) {
5194 return rb_true_to_s(recv);
5195 }
5196 break;
5197 case T_FALSE:
5198 if (check_cfunc(vm_cc_cme(cc), rb_false_to_s)) {
5199 return rb_false_to_s(recv);
5200 }
5201 break;
5202 case T_FIXNUM:
5203 if (check_cfunc(vm_cc_cme(cc), rb_int_to_s)) {
5204 return rb_fix_to_s(recv);
5205 }
5206 break;
5207 }
5208 return Qundef;
5209}
5210
5211static VALUE
5212vm_opt_str_freeze(VALUE str, int bop, ID id)
5213{
5214 if (BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5215 return str;
5216 }
5217 else {
5218 return Qundef;
5219 }
5220}
5221
5222/* this macro is mandatory to use OPTIMIZED_CMP. What a design! */
5223#define id_cmp idCmp
5224
5225static VALUE
5226vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5227{
5228 if (BASIC_OP_UNREDEFINED_P(BOP_MAX, ARRAY_REDEFINED_OP_FLAG)) {
5229 if (num == 0) {
5230 return Qnil;
5231 }
5232 else {
5233 VALUE result = *ptr;
5234 rb_snum_t i = num - 1;
5235 while (i-- > 0) {
5236 const VALUE v = *++ptr;
5237 if (OPTIMIZED_CMP(v, result) > 0) {
5238 result = v;
5239 }
5240 }
5241 return result;
5242 }
5243 }
5244 else {
5245 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMax, 0, NULL, RB_NO_KEYWORDS);
5246 }
5247}
5248
5249VALUE
5250rb_vm_opt_newarray_max(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5251{
5252 return vm_opt_newarray_max(ec, num, ptr);
5253}
5254
5255static VALUE
5256vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5257{
5258 if (BASIC_OP_UNREDEFINED_P(BOP_MIN, ARRAY_REDEFINED_OP_FLAG)) {
5259 if (num == 0) {
5260 return Qnil;
5261 }
5262 else {
5263 VALUE result = *ptr;
5264 rb_snum_t i = num - 1;
5265 while (i-- > 0) {
5266 const VALUE v = *++ptr;
5267 if (OPTIMIZED_CMP(v, result) < 0) {
5268 result = v;
5269 }
5270 }
5271 return result;
5272 }
5273 }
5274 else {
5275 return rb_vm_call_with_refinements(ec, rb_ary_new4(num, ptr), idMin, 0, NULL, RB_NO_KEYWORDS);
5276 }
5277}
5278
5279VALUE
5280rb_vm_opt_newarray_min(rb_execution_context_t *ec, rb_num_t num, const VALUE *ptr)
5281{
5282 return vm_opt_newarray_min(ec, num, ptr);
5283}
5284
5285#undef id_cmp
5286
5287#define IMEMO_CONST_CACHE_SHAREABLE IMEMO_FL_USER0
5288
5289static void
5290vm_track_constant_cache(ID id, void *ic)
5291{
5292 struct rb_id_table *const_cache = GET_VM()->constant_cache;
5293 VALUE lookup_result;
5294 st_table *ics;
5295
5296 if (rb_id_table_lookup(const_cache, id, &lookup_result)) {
5297 ics = (st_table *)lookup_result;
5298 }
5299 else {
5300 ics = st_init_numtable();
5301 rb_id_table_insert(const_cache, id, (VALUE)ics);
5302 }
5303
5304 st_insert(ics, (st_data_t) ic, (st_data_t) Qtrue);
5305}
5306
5307static void
5308vm_ic_track_const_chain(rb_control_frame_t *cfp, IC ic, const ID *segments)
5309{
5310 RB_VM_LOCK_ENTER();
5311
5312 for (int i = 0; segments[i]; i++) {
5313 ID id = segments[i];
5314 if (id == idNULL) continue;
5315 vm_track_constant_cache(id, ic);
5316 }
5317
5318 RB_VM_LOCK_LEAVE();
5319}
5320
5321// For MJIT inlining
5322static inline bool
5323vm_inlined_ic_hit_p(VALUE flags, VALUE value, const rb_cref_t *ic_cref, const VALUE *reg_ep)
5324{
5325 if ((flags & IMEMO_CONST_CACHE_SHAREABLE) || rb_ractor_main_p()) {
5326 VM_ASSERT(ractor_incidental_shareable_p(flags & IMEMO_CONST_CACHE_SHAREABLE, value));
5327
5328 return (ic_cref == NULL || // no need to check CREF
5329 ic_cref == vm_get_cref(reg_ep));
5330 }
5331 return false;
5332}
5333
5334static bool
5335vm_ic_hit_p(const struct iseq_inline_constant_cache_entry *ice, const VALUE *reg_ep)
5336{
5337 VM_ASSERT(IMEMO_TYPE_P(ice, imemo_constcache));
5338 return vm_inlined_ic_hit_p(ice->flags, ice->value, ice->ic_cref, reg_ep);
5339}
5340
5341// YJIT needs this function to never allocate and never raise
5342bool
5343rb_vm_ic_hit_p(IC ic, const VALUE *reg_ep)
5344{
5345 return ic->entry && vm_ic_hit_p(ic->entry, reg_ep);
5346}
5347
5348static void
5349vm_ic_update(const rb_iseq_t *iseq, IC ic, VALUE val, const VALUE *reg_ep, const VALUE *pc)
5350{
5351 if (ruby_vm_const_missing_count > 0) {
5352 ruby_vm_const_missing_count = 0;
5353 ic->entry = NULL;
5354 return;
5355 }
5356
5357 struct iseq_inline_constant_cache_entry *ice = (struct iseq_inline_constant_cache_entry *)rb_imemo_new(imemo_constcache, 0, 0, 0, 0);
5358 RB_OBJ_WRITE(ice, &ice->value, val);
5359 ice->ic_cref = vm_get_const_key_cref(reg_ep);
5360 if (rb_ractor_shareable_p(val)) ice->flags |= IMEMO_CONST_CACHE_SHAREABLE;
5361 RB_OBJ_WRITE(iseq, &ic->entry, ice);
5362
5363 RUBY_ASSERT(pc >= ISEQ_BODY(iseq)->iseq_encoded);
5364 unsigned pos = (unsigned)(pc - ISEQ_BODY(iseq)->iseq_encoded);
5365 rb_yjit_constant_ic_update(iseq, ic, pos);
5366 rb_mjit_constant_ic_update(iseq, ic, pos);
5367}
5368
5369static VALUE
5370vm_once_dispatch(rb_execution_context_t *ec, ISEQ iseq, ISE is)
5371{
5372 rb_thread_t *th = rb_ec_thread_ptr(ec);
5373 rb_thread_t *const RUNNING_THREAD_ONCE_DONE = (rb_thread_t *)(0x1);
5374
5375 again:
5376 if (is->once.running_thread == RUNNING_THREAD_ONCE_DONE) {
5377 return is->once.value;
5378 }
5379 else if (is->once.running_thread == NULL) {
5380 VALUE val;
5381 is->once.running_thread = th;
5382 val = rb_ensure(vm_once_exec, (VALUE)iseq, vm_once_clear, (VALUE)is);
5383 RB_OBJ_WRITE(ec->cfp->iseq, &is->once.value, val);
5384 /* is->once.running_thread is cleared by vm_once_clear() */
5385 is->once.running_thread = RUNNING_THREAD_ONCE_DONE; /* success */
5386 return val;
5387 }
5388 else if (is->once.running_thread == th) {
5389 /* recursive once */
5390 return vm_once_exec((VALUE)iseq);
5391 }
5392 else {
5393 /* waiting for finish */
5394 RUBY_VM_CHECK_INTS(ec);
5396 goto again;
5397 }
5398}
5399
5400static OFFSET
5401vm_case_dispatch(CDHASH hash, OFFSET else_offset, VALUE key)
5402{
5403 switch (OBJ_BUILTIN_TYPE(key)) {
5404 case -1:
5405 case T_FLOAT:
5406 case T_SYMBOL:
5407 case T_BIGNUM:
5408 case T_STRING:
5409 if (BASIC_OP_UNREDEFINED_P(BOP_EQQ,
5410 SYMBOL_REDEFINED_OP_FLAG |
5411 INTEGER_REDEFINED_OP_FLAG |
5412 FLOAT_REDEFINED_OP_FLAG |
5413 NIL_REDEFINED_OP_FLAG |
5414 TRUE_REDEFINED_OP_FLAG |
5415 FALSE_REDEFINED_OP_FLAG |
5416 STRING_REDEFINED_OP_FLAG)) {
5417 st_data_t val;
5418 if (RB_FLOAT_TYPE_P(key)) {
5419 double kval = RFLOAT_VALUE(key);
5420 if (!isinf(kval) && modf(kval, &kval) == 0.0) {
5421 key = FIXABLE(kval) ? LONG2FIX((long)kval) : rb_dbl2big(kval);
5422 }
5423 }
5424 if (rb_hash_stlike_lookup(hash, key, &val)) {
5425 return FIX2LONG((VALUE)val);
5426 }
5427 else {
5428 return else_offset;
5429 }
5430 }
5431 }
5432 return 0;
5433}
5434
5435NORETURN(static void
5436 vm_stack_consistency_error(const rb_execution_context_t *ec,
5437 const rb_control_frame_t *,
5438 const VALUE *));
5439static void
5440vm_stack_consistency_error(const rb_execution_context_t *ec,
5441 const rb_control_frame_t *cfp,
5442 const VALUE *bp)
5443{
5444 const ptrdiff_t nsp = VM_SP_CNT(ec, cfp->sp);
5445 const ptrdiff_t nbp = VM_SP_CNT(ec, bp);
5446 static const char stack_consistency_error[] =
5447 "Stack consistency error (sp: %"PRIdPTRDIFF", bp: %"PRIdPTRDIFF")";
5448#if defined RUBY_DEVEL
5449 VALUE mesg = rb_sprintf(stack_consistency_error, nsp, nbp);
5450 rb_str_cat_cstr(mesg, "\n");
5451 rb_str_append(mesg, rb_iseq_disasm(cfp->iseq));
5453#else
5454 rb_bug(stack_consistency_error, nsp, nbp);
5455#endif
5456}
5457
5458static VALUE
5459vm_opt_plus(VALUE recv, VALUE obj)
5460{
5461 if (FIXNUM_2_P(recv, obj) &&
5462 BASIC_OP_UNREDEFINED_P(BOP_PLUS, INTEGER_REDEFINED_OP_FLAG)) {
5463 return rb_fix_plus_fix(recv, obj);
5464 }
5465 else if (FLONUM_2_P(recv, obj) &&
5466 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5467 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5468 }
5469 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5470 return Qundef;
5471 }
5472 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5473 RBASIC_CLASS(obj) == rb_cFloat &&
5474 BASIC_OP_UNREDEFINED_P(BOP_PLUS, FLOAT_REDEFINED_OP_FLAG)) {
5475 return DBL2NUM(RFLOAT_VALUE(recv) + RFLOAT_VALUE(obj));
5476 }
5477 else if (RBASIC_CLASS(recv) == rb_cString &&
5478 RBASIC_CLASS(obj) == rb_cString &&
5479 BASIC_OP_UNREDEFINED_P(BOP_PLUS, STRING_REDEFINED_OP_FLAG)) {
5480 return rb_str_opt_plus(recv, obj);
5481 }
5482 else if (RBASIC_CLASS(recv) == rb_cArray &&
5483 RBASIC_CLASS(obj) == rb_cArray &&
5484 BASIC_OP_UNREDEFINED_P(BOP_PLUS, ARRAY_REDEFINED_OP_FLAG)) {
5485 return rb_ary_plus(recv, obj);
5486 }
5487 else {
5488 return Qundef;
5489 }
5490}
5491
5492static VALUE
5493vm_opt_minus(VALUE recv, VALUE obj)
5494{
5495 if (FIXNUM_2_P(recv, obj) &&
5496 BASIC_OP_UNREDEFINED_P(BOP_MINUS, INTEGER_REDEFINED_OP_FLAG)) {
5497 return rb_fix_minus_fix(recv, obj);
5498 }
5499 else if (FLONUM_2_P(recv, obj) &&
5500 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5501 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
5502 }
5503 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5504 return Qundef;
5505 }
5506 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5507 RBASIC_CLASS(obj) == rb_cFloat &&
5508 BASIC_OP_UNREDEFINED_P(BOP_MINUS, FLOAT_REDEFINED_OP_FLAG)) {
5509 return DBL2NUM(RFLOAT_VALUE(recv) - RFLOAT_VALUE(obj));
5510 }
5511 else {
5512 return Qundef;
5513 }
5514}
5515
5516static VALUE
5517vm_opt_mult(VALUE recv, VALUE obj)
5518{
5519 if (FIXNUM_2_P(recv, obj) &&
5520 BASIC_OP_UNREDEFINED_P(BOP_MULT, INTEGER_REDEFINED_OP_FLAG)) {
5521 return rb_fix_mul_fix(recv, obj);
5522 }
5523 else if (FLONUM_2_P(recv, obj) &&
5524 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5525 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
5526 }
5527 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5528 return Qundef;
5529 }
5530 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5531 RBASIC_CLASS(obj) == rb_cFloat &&
5532 BASIC_OP_UNREDEFINED_P(BOP_MULT, FLOAT_REDEFINED_OP_FLAG)) {
5533 return DBL2NUM(RFLOAT_VALUE(recv) * RFLOAT_VALUE(obj));
5534 }
5535 else {
5536 return Qundef;
5537 }
5538}
5539
5540static VALUE
5541vm_opt_div(VALUE recv, VALUE obj)
5542{
5543 if (FIXNUM_2_P(recv, obj) &&
5544 BASIC_OP_UNREDEFINED_P(BOP_DIV, INTEGER_REDEFINED_OP_FLAG)) {
5545 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_div_fix(recv, obj);
5546 }
5547 else if (FLONUM_2_P(recv, obj) &&
5548 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5549 return rb_flo_div_flo(recv, obj);
5550 }
5551 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5552 return Qundef;
5553 }
5554 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5555 RBASIC_CLASS(obj) == rb_cFloat &&
5556 BASIC_OP_UNREDEFINED_P(BOP_DIV, FLOAT_REDEFINED_OP_FLAG)) {
5557 return rb_flo_div_flo(recv, obj);
5558 }
5559 else {
5560 return Qundef;
5561 }
5562}
5563
5564static VALUE
5565vm_opt_mod(VALUE recv, VALUE obj)
5566{
5567 if (FIXNUM_2_P(recv, obj) &&
5568 BASIC_OP_UNREDEFINED_P(BOP_MOD, INTEGER_REDEFINED_OP_FLAG)) {
5569 return (FIX2LONG(obj) == 0) ? Qundef : rb_fix_mod_fix(recv, obj);
5570 }
5571 else if (FLONUM_2_P(recv, obj) &&
5572 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5573 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
5574 }
5575 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5576 return Qundef;
5577 }
5578 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5579 RBASIC_CLASS(obj) == rb_cFloat &&
5580 BASIC_OP_UNREDEFINED_P(BOP_MOD, FLOAT_REDEFINED_OP_FLAG)) {
5581 return DBL2NUM(ruby_float_mod(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj)));
5582 }
5583 else {
5584 return Qundef;
5585 }
5586}
5587
5588static VALUE
5589vm_opt_neq(const rb_iseq_t *iseq, CALL_DATA cd, CALL_DATA cd_eq, VALUE recv, VALUE obj)
5590{
5591 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not_equal)) {
5592 VALUE val = opt_equality(iseq, recv, obj, cd_eq);
5593
5594 if (!UNDEF_P(val)) {
5595 return RBOOL(!RTEST(val));
5596 }
5597 }
5598
5599 return Qundef;
5600}
5601
5602static VALUE
5603vm_opt_lt(VALUE recv, VALUE obj)
5604{
5605 if (FIXNUM_2_P(recv, obj) &&
5606 BASIC_OP_UNREDEFINED_P(BOP_LT, INTEGER_REDEFINED_OP_FLAG)) {
5607 return RBOOL((SIGNED_VALUE)recv < (SIGNED_VALUE)obj);
5608 }
5609 else if (FLONUM_2_P(recv, obj) &&
5610 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5611 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
5612 }
5613 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5614 return Qundef;
5615 }
5616 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5617 RBASIC_CLASS(obj) == rb_cFloat &&
5618 BASIC_OP_UNREDEFINED_P(BOP_LT, FLOAT_REDEFINED_OP_FLAG)) {
5619 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5620 return RBOOL(RFLOAT_VALUE(recv) < RFLOAT_VALUE(obj));
5621 }
5622 else {
5623 return Qundef;
5624 }
5625}
5626
5627static VALUE
5628vm_opt_le(VALUE recv, VALUE obj)
5629{
5630 if (FIXNUM_2_P(recv, obj) &&
5631 BASIC_OP_UNREDEFINED_P(BOP_LE, INTEGER_REDEFINED_OP_FLAG)) {
5632 return RBOOL((SIGNED_VALUE)recv <= (SIGNED_VALUE)obj);
5633 }
5634 else if (FLONUM_2_P(recv, obj) &&
5635 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5636 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
5637 }
5638 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5639 return Qundef;
5640 }
5641 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5642 RBASIC_CLASS(obj) == rb_cFloat &&
5643 BASIC_OP_UNREDEFINED_P(BOP_LE, FLOAT_REDEFINED_OP_FLAG)) {
5644 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5645 return RBOOL(RFLOAT_VALUE(recv) <= RFLOAT_VALUE(obj));
5646 }
5647 else {
5648 return Qundef;
5649 }
5650}
5651
5652static VALUE
5653vm_opt_gt(VALUE recv, VALUE obj)
5654{
5655 if (FIXNUM_2_P(recv, obj) &&
5656 BASIC_OP_UNREDEFINED_P(BOP_GT, INTEGER_REDEFINED_OP_FLAG)) {
5657 return RBOOL((SIGNED_VALUE)recv > (SIGNED_VALUE)obj);
5658 }
5659 else if (FLONUM_2_P(recv, obj) &&
5660 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5661 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
5662 }
5663 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5664 return Qundef;
5665 }
5666 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5667 RBASIC_CLASS(obj) == rb_cFloat &&
5668 BASIC_OP_UNREDEFINED_P(BOP_GT, FLOAT_REDEFINED_OP_FLAG)) {
5669 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5670 return RBOOL(RFLOAT_VALUE(recv) > RFLOAT_VALUE(obj));
5671 }
5672 else {
5673 return Qundef;
5674 }
5675}
5676
5677static VALUE
5678vm_opt_ge(VALUE recv, VALUE obj)
5679{
5680 if (FIXNUM_2_P(recv, obj) &&
5681 BASIC_OP_UNREDEFINED_P(BOP_GE, INTEGER_REDEFINED_OP_FLAG)) {
5682 return RBOOL((SIGNED_VALUE)recv >= (SIGNED_VALUE)obj);
5683 }
5684 else if (FLONUM_2_P(recv, obj) &&
5685 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5686 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
5687 }
5688 else if (SPECIAL_CONST_P(recv) || SPECIAL_CONST_P(obj)) {
5689 return Qundef;
5690 }
5691 else if (RBASIC_CLASS(recv) == rb_cFloat &&
5692 RBASIC_CLASS(obj) == rb_cFloat &&
5693 BASIC_OP_UNREDEFINED_P(BOP_GE, FLOAT_REDEFINED_OP_FLAG)) {
5694 CHECK_CMP_NAN(RFLOAT_VALUE(recv), RFLOAT_VALUE(obj));
5695 return RBOOL(RFLOAT_VALUE(recv) >= RFLOAT_VALUE(obj));
5696 }
5697 else {
5698 return Qundef;
5699 }
5700}
5701
5702
5703static VALUE
5704vm_opt_ltlt(VALUE recv, VALUE obj)
5705{
5706 if (SPECIAL_CONST_P(recv)) {
5707 return Qundef;
5708 }
5709 else if (RBASIC_CLASS(recv) == rb_cString &&
5710 BASIC_OP_UNREDEFINED_P(BOP_LTLT, STRING_REDEFINED_OP_FLAG)) {
5711 if (LIKELY(RB_TYPE_P(obj, T_STRING))) {
5712 return rb_str_buf_append(recv, obj);
5713 }
5714 else {
5715 return rb_str_concat(recv, obj);
5716 }
5717 }
5718 else if (RBASIC_CLASS(recv) == rb_cArray &&
5719 BASIC_OP_UNREDEFINED_P(BOP_LTLT, ARRAY_REDEFINED_OP_FLAG)) {
5720 return rb_ary_push(recv, obj);
5721 }
5722 else {
5723 return Qundef;
5724 }
5725}
5726
5727static VALUE
5728vm_opt_and(VALUE recv, VALUE obj)
5729{
5730 // If recv and obj are both fixnums, then the bottom tag bit
5731 // will be 1 on both. 1 & 1 == 1, so the result value will also
5732 // be a fixnum. If either side is *not* a fixnum, then the tag bit
5733 // will be 0, and we return Qundef.
5734 VALUE ret = ((SIGNED_VALUE) recv) & ((SIGNED_VALUE) obj);
5735
5736 if (FIXNUM_P(ret) &&
5737 BASIC_OP_UNREDEFINED_P(BOP_AND, INTEGER_REDEFINED_OP_FLAG)) {
5738 return ret;
5739 }
5740 else {
5741 return Qundef;
5742 }
5743}
5744
5745static VALUE
5746vm_opt_or(VALUE recv, VALUE obj)
5747{
5748 if (FIXNUM_2_P(recv, obj) &&
5749 BASIC_OP_UNREDEFINED_P(BOP_OR, INTEGER_REDEFINED_OP_FLAG)) {
5750 return recv | obj;
5751 }
5752 else {
5753 return Qundef;
5754 }
5755}
5756
5757static VALUE
5758vm_opt_aref(VALUE recv, VALUE obj)
5759{
5760 if (SPECIAL_CONST_P(recv)) {
5761 if (FIXNUM_2_P(recv, obj) &&
5762 BASIC_OP_UNREDEFINED_P(BOP_AREF, INTEGER_REDEFINED_OP_FLAG)) {
5763 return rb_fix_aref(recv, obj);
5764 }
5765 return Qundef;
5766 }
5767 else if (RBASIC_CLASS(recv) == rb_cArray &&
5768 BASIC_OP_UNREDEFINED_P(BOP_AREF, ARRAY_REDEFINED_OP_FLAG)) {
5769 if (FIXNUM_P(obj)) {
5770 return rb_ary_entry_internal(recv, FIX2LONG(obj));
5771 }
5772 else {
5773 return rb_ary_aref1(recv, obj);
5774 }
5775 }
5776 else if (RBASIC_CLASS(recv) == rb_cHash &&
5777 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG)) {
5778 return rb_hash_aref(recv, obj);
5779 }
5780 else {
5781 return Qundef;
5782 }
5783}
5784
5785static VALUE
5786vm_opt_aset(VALUE recv, VALUE obj, VALUE set)
5787{
5788 if (SPECIAL_CONST_P(recv)) {
5789 return Qundef;
5790 }
5791 else if (RBASIC_CLASS(recv) == rb_cArray &&
5792 BASIC_OP_UNREDEFINED_P(BOP_ASET, ARRAY_REDEFINED_OP_FLAG) &&
5793 FIXNUM_P(obj)) {
5794 rb_ary_store(recv, FIX2LONG(obj), set);
5795 return set;
5796 }
5797 else if (RBASIC_CLASS(recv) == rb_cHash &&
5798 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG)) {
5799 rb_hash_aset(recv, obj, set);
5800 return set;
5801 }
5802 else {
5803 return Qundef;
5804 }
5805}
5806
5807static VALUE
5808vm_opt_aref_with(VALUE recv, VALUE key)
5809{
5810 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
5811 BASIC_OP_UNREDEFINED_P(BOP_AREF, HASH_REDEFINED_OP_FLAG) &&
5812 rb_hash_compare_by_id_p(recv) == Qfalse &&
5813 !FL_TEST(recv, RHASH_PROC_DEFAULT)) {
5814 return rb_hash_aref(recv, key);
5815 }
5816 else {
5817 return Qundef;
5818 }
5819}
5820
5821static VALUE
5822vm_opt_aset_with(VALUE recv, VALUE key, VALUE val)
5823{
5824 if (!SPECIAL_CONST_P(recv) && RBASIC_CLASS(recv) == rb_cHash &&
5825 BASIC_OP_UNREDEFINED_P(BOP_ASET, HASH_REDEFINED_OP_FLAG) &&
5826 rb_hash_compare_by_id_p(recv) == Qfalse) {
5827 return rb_hash_aset(recv, key, val);
5828 }
5829 else {
5830 return Qundef;
5831 }
5832}
5833
5834static VALUE
5835vm_opt_length(VALUE recv, int bop)
5836{
5837 if (SPECIAL_CONST_P(recv)) {
5838 return Qundef;
5839 }
5840 else if (RBASIC_CLASS(recv) == rb_cString &&
5841 BASIC_OP_UNREDEFINED_P(bop, STRING_REDEFINED_OP_FLAG)) {
5842 if (bop == BOP_EMPTY_P) {
5843 return LONG2NUM(RSTRING_LEN(recv));
5844 }
5845 else {
5846 return rb_str_length(recv);
5847 }
5848 }
5849 else if (RBASIC_CLASS(recv) == rb_cArray &&
5850 BASIC_OP_UNREDEFINED_P(bop, ARRAY_REDEFINED_OP_FLAG)) {
5851 return LONG2NUM(RARRAY_LEN(recv));
5852 }
5853 else if (RBASIC_CLASS(recv) == rb_cHash &&
5854 BASIC_OP_UNREDEFINED_P(bop, HASH_REDEFINED_OP_FLAG)) {
5855 return INT2FIX(RHASH_SIZE(recv));
5856 }
5857 else {
5858 return Qundef;
5859 }
5860}
5861
5862static VALUE
5863vm_opt_empty_p(VALUE recv)
5864{
5865 switch (vm_opt_length(recv, BOP_EMPTY_P)) {
5866 case Qundef: return Qundef;
5867 case INT2FIX(0): return Qtrue;
5868 default: return Qfalse;
5869 }
5870}
5871
5872VALUE rb_false(VALUE obj);
5873
5874static VALUE
5875vm_opt_nil_p(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
5876{
5877 if (NIL_P(recv) &&
5878 BASIC_OP_UNREDEFINED_P(BOP_NIL_P, NIL_REDEFINED_OP_FLAG)) {
5879 return Qtrue;
5880 }
5881 else if (vm_method_cfunc_is(iseq, cd, recv, rb_false)) {
5882 return Qfalse;
5883 }
5884 else {
5885 return Qundef;
5886 }
5887}
5888
5889static VALUE
5890fix_succ(VALUE x)
5891{
5892 switch (x) {
5893 case ~0UL:
5894 /* 0xFFFF_FFFF == INT2FIX(-1)
5895 * `-1.succ` is of course 0. */
5896 return INT2FIX(0);
5897 case RSHIFT(~0UL, 1):
5898 /* 0x7FFF_FFFF == LONG2FIX(0x3FFF_FFFF)
5899 * 0x3FFF_FFFF + 1 == 0x4000_0000, which is a Bignum. */
5900 return rb_uint2big(1UL << (SIZEOF_LONG * CHAR_BIT - 2));
5901 default:
5902 /* LONG2FIX(FIX2LONG(x)+FIX2LONG(y))
5903 * == ((lx*2+1)/2 + (ly*2+1)/2)*2+1
5904 * == lx*2 + ly*2 + 1
5905 * == (lx*2+1) + (ly*2+1) - 1
5906 * == x + y - 1
5907 *
5908 * Here, if we put y := INT2FIX(1):
5909 *
5910 * == x + INT2FIX(1) - 1
5911 * == x + 2 .
5912 */
5913 return x + 2;
5914 }
5915}
5916
5917static VALUE
5918vm_opt_succ(VALUE recv)
5919{
5920 if (FIXNUM_P(recv) &&
5921 BASIC_OP_UNREDEFINED_P(BOP_SUCC, INTEGER_REDEFINED_OP_FLAG)) {
5922 return fix_succ(recv);
5923 }
5924 else if (SPECIAL_CONST_P(recv)) {
5925 return Qundef;
5926 }
5927 else if (RBASIC_CLASS(recv) == rb_cString &&
5928 BASIC_OP_UNREDEFINED_P(BOP_SUCC, STRING_REDEFINED_OP_FLAG)) {
5929 return rb_str_succ(recv);
5930 }
5931 else {
5932 return Qundef;
5933 }
5934}
5935
5936static VALUE
5937vm_opt_not(const rb_iseq_t *iseq, CALL_DATA cd, VALUE recv)
5938{
5939 if (vm_method_cfunc_is(iseq, cd, recv, rb_obj_not)) {
5940 return RBOOL(!RTEST(recv));
5941 }
5942 else {
5943 return Qundef;
5944 }
5945}
5946
5947static VALUE
5948vm_opt_regexpmatch2(VALUE recv, VALUE obj)
5949{
5950 if (SPECIAL_CONST_P(recv)) {
5951 return Qundef;
5952 }
5953 else if (RBASIC_CLASS(recv) == rb_cString &&
5954 CLASS_OF(obj) == rb_cRegexp &&
5955 BASIC_OP_UNREDEFINED_P(BOP_MATCH, STRING_REDEFINED_OP_FLAG)) {
5956 return rb_reg_match(obj, recv);
5957 }
5958 else if (RBASIC_CLASS(recv) == rb_cRegexp &&
5959 BASIC_OP_UNREDEFINED_P(BOP_MATCH, REGEXP_REDEFINED_OP_FLAG)) {
5960 return rb_reg_match(recv, obj);
5961 }
5962 else {
5963 return Qundef;
5964 }
5965}
5966
5967rb_event_flag_t rb_iseq_event_flags(const rb_iseq_t *iseq, size_t pos);
5968
5969NOINLINE(static void vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp));
5970
5971static inline void
5972vm_trace_hook(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const VALUE *pc,
5973 rb_event_flag_t pc_events, rb_event_flag_t target_event,
5974 rb_hook_list_t *global_hooks, rb_hook_list_t *const *local_hooks_ptr, VALUE val)
5975{
5976 rb_event_flag_t event = pc_events & target_event;
5977 VALUE self = GET_SELF();
5978
5979 VM_ASSERT(rb_popcount64((uint64_t)event) == 1);
5980
5981 if (event & global_hooks->events) {
5982 /* increment PC because source line is calculated with PC-1 */
5983 reg_cfp->pc++;
5984 vm_dtrace(event, ec);
5985 rb_exec_event_hook_orig(ec, global_hooks, event, self, 0, 0, 0 , val, 0);
5986 reg_cfp->pc--;
5987 }
5988
5989 // Load here since global hook above can add and free local hooks
5990 rb_hook_list_t *local_hooks = *local_hooks_ptr;
5991 if (local_hooks != NULL) {
5992 if (event & local_hooks->events) {
5993 /* increment PC because source line is calculated with PC-1 */
5994 reg_cfp->pc++;
5995 rb_exec_event_hook_orig(ec, local_hooks, event, self, 0, 0, 0 , val, 0);
5996 reg_cfp->pc--;
5997 }
5998 }
5999}
6000
6001// Return true if given cc has cfunc which is NOT handled by opt_send_without_block.
6002bool
6003rb_vm_opt_cfunc_p(CALL_CACHE cc, int insn)
6004{
6005 switch (insn) {
6006 case BIN(opt_eq):
6007 return check_cfunc(vm_cc_cme(cc), rb_obj_equal);
6008 case BIN(opt_nil_p):
6009 return check_cfunc(vm_cc_cme(cc), rb_false);
6010 case BIN(opt_not):
6011 return check_cfunc(vm_cc_cme(cc), rb_obj_not);
6012 default:
6013 return false;
6014 }
6015}
6016
6017#define VM_TRACE_HOOK(target_event, val) do { \
6018 if ((pc_events & (target_event)) & enabled_flags) { \
6019 vm_trace_hook(ec, reg_cfp, pc, pc_events, (target_event), global_hooks, local_hooks_ptr, (val)); \
6020 } \
6021} while (0)
6022
6023static void
6024vm_trace(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp)
6025{
6026 const VALUE *pc = reg_cfp->pc;
6027 rb_event_flag_t enabled_flags = ruby_vm_event_flags & ISEQ_TRACE_EVENTS;
6028 rb_event_flag_t global_events = enabled_flags;
6029
6030 if (enabled_flags == 0 && ruby_vm_event_local_num == 0) {
6031 return;
6032 }
6033 else {
6034 const rb_iseq_t *iseq = reg_cfp->iseq;
6035 VALUE iseq_val = (VALUE)iseq;
6036 size_t pos = pc - ISEQ_BODY(iseq)->iseq_encoded;
6037 rb_event_flag_t pc_events = rb_iseq_event_flags(iseq, pos);
6038 rb_hook_list_t *local_hooks = iseq->aux.exec.local_hooks;
6039 rb_hook_list_t *const *local_hooks_ptr = &iseq->aux.exec.local_hooks;
6040 rb_event_flag_t iseq_local_events = local_hooks != NULL ? local_hooks->events : 0;
6041 rb_hook_list_t *bmethod_local_hooks = NULL;
6042 rb_hook_list_t **bmethod_local_hooks_ptr = NULL;
6043 rb_event_flag_t bmethod_local_events = 0;
6044 const bool bmethod_frame = VM_FRAME_BMETHOD_P(reg_cfp);
6045 enabled_flags |= iseq_local_events;
6046
6047 VM_ASSERT((iseq_local_events & ~ISEQ_TRACE_EVENTS) == 0);
6048
6049 if (bmethod_frame) {
6050 const rb_callable_method_entry_t *me = rb_vm_frame_method_entry(reg_cfp);
6051 VM_ASSERT(me->def->type == VM_METHOD_TYPE_BMETHOD);
6052 bmethod_local_hooks = me->def->body.bmethod.hooks;
6053 bmethod_local_hooks_ptr = &me->def->body.bmethod.hooks;
6054 if (bmethod_local_hooks) {
6055 bmethod_local_events = bmethod_local_hooks->events;
6056 }
6057 }
6058
6059
6060 if ((pc_events & enabled_flags) == 0 && !bmethod_frame) {
6061#if 0
6062 /* disable trace */
6063 /* TODO: incomplete */
6064 rb_iseq_trace_set(iseq, vm_event_flags & ISEQ_TRACE_EVENTS);
6065#else
6066 /* do not disable trace because of performance problem
6067 * (re-enable overhead)
6068 */
6069#endif
6070 return;
6071 }
6072 else if (ec->trace_arg != NULL) {
6073 /* already tracing */
6074 return;
6075 }
6076 else {
6077 rb_hook_list_t *global_hooks = rb_ec_ractor_hooks(ec);
6078 /* Note, not considering iseq local events here since the same
6079 * iseq could be used in multiple bmethods. */
6080 rb_event_flag_t bmethod_events = global_events | bmethod_local_events;
6081
6082 if (0) {
6083 ruby_debug_printf("vm_trace>>%4d (%4x) - %s:%d %s\n",
6084 (int)pos,
6085 (int)pc_events,
6086 RSTRING_PTR(rb_iseq_path(iseq)),
6087 (int)rb_iseq_line_no(iseq, pos),
6088 RSTRING_PTR(rb_iseq_label(iseq)));
6089 }
6090 VM_ASSERT(reg_cfp->pc == pc);
6091 VM_ASSERT(pc_events != 0);
6092
6093 /* check traces */
6094 if ((pc_events & RUBY_EVENT_B_CALL) && bmethod_frame && (bmethod_events & RUBY_EVENT_CALL)) {
6095 /* b_call instruction running as a method. Fire call event. */
6096 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_CALL, RUBY_EVENT_CALL, global_hooks, bmethod_local_hooks_ptr, Qundef);
6097 }
6099 VM_TRACE_HOOK(RUBY_EVENT_LINE, Qundef);
6100 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_LINE, Qundef);
6101 VM_TRACE_HOOK(RUBY_EVENT_COVERAGE_BRANCH, Qundef);
6102 VM_TRACE_HOOK(RUBY_EVENT_END | RUBY_EVENT_RETURN | RUBY_EVENT_B_RETURN, TOPN(0));
6103 if ((pc_events & RUBY_EVENT_B_RETURN) && bmethod_frame && (bmethod_events & RUBY_EVENT_RETURN)) {
6104 /* b_return instruction running as a method. Fire return event. */
6105 vm_trace_hook(ec, reg_cfp, pc, RUBY_EVENT_RETURN, RUBY_EVENT_RETURN, global_hooks, bmethod_local_hooks_ptr, TOPN(0));
6106 }
6107
6108 // Pin the iseq since `local_hooks_ptr` points inside the iseq's slot on the GC heap.
6109 // We need the pointer to stay valid in case compaction happens in a trace hook.
6110 //
6111 // Similar treatment is unnecessary for `bmethod_local_hooks_ptr` since
6112 // storage for `rb_method_definition_t` is not on the GC heap.
6113 RB_GC_GUARD(iseq_val);
6114 }
6115 }
6116}
6117#undef VM_TRACE_HOOK
6118
6119#if VM_CHECK_MODE > 0
6120NORETURN( NOINLINE( COLDFUNC
6121void rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)));
6122
6123void
6124Init_vm_stack_canary(void)
6125{
6126 /* This has to be called _after_ our PRNG is properly set up. */
6127 int n = ruby_fill_random_bytes(&vm_stack_canary, sizeof vm_stack_canary, false);
6128 vm_stack_canary |= 0x01; // valid VALUE (Fixnum)
6129
6130 vm_stack_canary_was_born = true;
6131 VM_ASSERT(n == 0);
6132}
6133
6134#ifndef MJIT_HEADER
6135MJIT_FUNC_EXPORTED void
6136rb_vm_canary_is_found_dead(enum ruby_vminsn_type i, VALUE c)
6137{
6138 /* Because a method has already been called, why not call
6139 * another one. */
6140 const char *insn = rb_insns_name(i);
6141 VALUE inspection = rb_inspect(c);
6142 const char *str = StringValueCStr(inspection);
6143
6144 rb_bug("dead canary found at %s: %s", insn, str);
6145}
6146#endif
6147
6148#else
6149void Init_vm_stack_canary(void) { /* nothing to do */ }
6150#endif
6151
6152
6153/* a part of the following code is generated by this ruby script:
6154
615516.times{|i|
6156 typedef_args = (0...i).map{|j| "VALUE v#{j+1}"}.join(", ")
6157 typedef_args.prepend(", ") if i != 0
6158 call_args = (0...i).map{|j| "argv[#{j}]"}.join(", ")
6159 call_args.prepend(", ") if i != 0
6160 puts %Q{
6161static VALUE
6162builtin_invoker#{i}(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6163{
6164 typedef VALUE (*rb_invoke_funcptr#{i}_t)(rb_execution_context_t *ec, VALUE self#{typedef_args});
6165 return (*(rb_invoke_funcptr#{i}_t)funcptr)(ec, self#{call_args});
6166}}
6167}
6168
6169puts
6170puts "static VALUE (* const cfunc_invokers[])(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr) = {"
617116.times{|i|
6172 puts " builtin_invoker#{i},"
6173}
6174puts "};"
6175*/
6176
6177static VALUE
6178builtin_invoker0(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6179{
6180 typedef VALUE (*rb_invoke_funcptr0_t)(rb_execution_context_t *ec, VALUE self);
6181 return (*(rb_invoke_funcptr0_t)funcptr)(ec, self);
6182}
6183
6184static VALUE
6185builtin_invoker1(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6186{
6187 typedef VALUE (*rb_invoke_funcptr1_t)(rb_execution_context_t *ec, VALUE self, VALUE v1);
6188 return (*(rb_invoke_funcptr1_t)funcptr)(ec, self, argv[0]);
6189}
6190
6191static VALUE
6192builtin_invoker2(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6193{
6194 typedef VALUE (*rb_invoke_funcptr2_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2);
6195 return (*(rb_invoke_funcptr2_t)funcptr)(ec, self, argv[0], argv[1]);
6196}
6197
6198static VALUE
6199builtin_invoker3(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6200{
6201 typedef VALUE (*rb_invoke_funcptr3_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3);
6202 return (*(rb_invoke_funcptr3_t)funcptr)(ec, self, argv[0], argv[1], argv[2]);
6203}
6204
6205static VALUE
6206builtin_invoker4(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6207{
6208 typedef VALUE (*rb_invoke_funcptr4_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4);
6209 return (*(rb_invoke_funcptr4_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3]);
6210}
6211
6212static VALUE
6213builtin_invoker5(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6214{
6215 typedef VALUE (*rb_invoke_funcptr5_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5);
6216 return (*(rb_invoke_funcptr5_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4]);
6217}
6218
6219static VALUE
6220builtin_invoker6(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6221{
6222 typedef VALUE (*rb_invoke_funcptr6_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6);
6223 return (*(rb_invoke_funcptr6_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5]);
6224}
6225
6226static VALUE
6227builtin_invoker7(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6228{
6229 typedef VALUE (*rb_invoke_funcptr7_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7);
6230 return (*(rb_invoke_funcptr7_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6]);
6231}
6232
6233static VALUE
6234builtin_invoker8(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6235{
6236 typedef VALUE (*rb_invoke_funcptr8_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8);
6237 return (*(rb_invoke_funcptr8_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7]);
6238}
6239
6240static VALUE
6241builtin_invoker9(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6242{
6243 typedef VALUE (*rb_invoke_funcptr9_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9);
6244 return (*(rb_invoke_funcptr9_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8]);
6245}
6246
6247static VALUE
6248builtin_invoker10(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6249{
6250 typedef VALUE (*rb_invoke_funcptr10_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10);
6251 return (*(rb_invoke_funcptr10_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9]);
6252}
6253
6254static VALUE
6255builtin_invoker11(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6256{
6257 typedef VALUE (*rb_invoke_funcptr11_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11);
6258 return (*(rb_invoke_funcptr11_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10]);
6259}
6260
6261static VALUE
6262builtin_invoker12(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6263{
6264 typedef VALUE (*rb_invoke_funcptr12_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12);
6265 return (*(rb_invoke_funcptr12_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11]);
6266}
6267
6268static VALUE
6269builtin_invoker13(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6270{
6271 typedef VALUE (*rb_invoke_funcptr13_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13);
6272 return (*(rb_invoke_funcptr13_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12]);
6273}
6274
6275static VALUE
6276builtin_invoker14(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6277{
6278 typedef VALUE (*rb_invoke_funcptr14_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14);
6279 return (*(rb_invoke_funcptr14_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13]);
6280}
6281
6282static VALUE
6283builtin_invoker15(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr)
6284{
6285 typedef VALUE (*rb_invoke_funcptr15_t)(rb_execution_context_t *ec, VALUE self, VALUE v1, VALUE v2, VALUE v3, VALUE v4, VALUE v5, VALUE v6, VALUE v7, VALUE v8, VALUE v9, VALUE v10, VALUE v11, VALUE v12, VALUE v13, VALUE v14, VALUE v15);
6286 return (*(rb_invoke_funcptr15_t)funcptr)(ec, self, argv[0], argv[1], argv[2], argv[3], argv[4], argv[5], argv[6], argv[7], argv[8], argv[9], argv[10], argv[11], argv[12], argv[13], argv[14]);
6287}
6288
6289typedef VALUE (*builtin_invoker)(rb_execution_context_t *ec, VALUE self, const VALUE *argv, rb_insn_func_t funcptr);
6290
6291static builtin_invoker
6292lookup_builtin_invoker(int argc)
6293{
6294 static const builtin_invoker invokers[] = {
6295 builtin_invoker0,
6296 builtin_invoker1,
6297 builtin_invoker2,
6298 builtin_invoker3,
6299 builtin_invoker4,
6300 builtin_invoker5,
6301 builtin_invoker6,
6302 builtin_invoker7,
6303 builtin_invoker8,
6304 builtin_invoker9,
6305 builtin_invoker10,
6306 builtin_invoker11,
6307 builtin_invoker12,
6308 builtin_invoker13,
6309 builtin_invoker14,
6310 builtin_invoker15,
6311 };
6312
6313 return invokers[argc];
6314}
6315
6316static inline VALUE
6317invoke_bf(rb_execution_context_t *ec, rb_control_frame_t *reg_cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6318{
6319 const bool canary_p = ISEQ_BODY(reg_cfp->iseq)->builtin_inline_p; // Verify an assumption of `Primitive.attr! 'inline'`
6320 SETUP_CANARY(canary_p);
6321 VALUE ret = (*lookup_builtin_invoker(bf->argc))(ec, reg_cfp->self, argv, (rb_insn_func_t)bf->func_ptr);
6322 CHECK_CANARY(canary_p, BIN(invokebuiltin));
6323 return ret;
6324}
6325
6326static VALUE
6327vm_invoke_builtin(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function* bf, const VALUE *argv)
6328{
6329 return invoke_bf(ec, cfp, bf, argv);
6330}
6331
6332static VALUE
6333vm_invoke_builtin_delegate(rb_execution_context_t *ec, rb_control_frame_t *cfp, const struct rb_builtin_function *bf, unsigned int start_index)
6334{
6335 if (0) { // debug print
6336 fputs("vm_invoke_builtin_delegate: passing -> ", stderr);
6337 for (int i=0; i<bf->argc; i++) {
6338 ruby_debug_printf(":%s ", rb_id2name(ISEQ_BODY(cfp->iseq)->local_table[i+start_index]));
6339 }
6340 ruby_debug_printf("\n" "%s %s(%d):%p\n", RUBY_FUNCTION_NAME_STRING, bf->name, bf->argc, bf->func_ptr);
6341 }
6342
6343 if (bf->argc == 0) {
6344 return invoke_bf(ec, cfp, bf, NULL);
6345 }
6346 else {
6347 const VALUE *argv = cfp->ep - ISEQ_BODY(cfp->iseq)->local_table_size - VM_ENV_DATA_SIZE + 1 + start_index;
6348 return invoke_bf(ec, cfp, bf, argv);
6349 }
6350}
6351
6352// for __builtin_inline!()
6353
6354VALUE
6355rb_vm_lvar_exposed(rb_execution_context_t *ec, int index)
6356{
6357 const rb_control_frame_t *cfp = ec->cfp;
6358 return cfp->ep[index];
6359}
#define RUBY_ASSERT(expr)
Asserts that the given expression is truthy if and only if RUBY_DEBUG is truthy.
Definition assert.h:177
#define RUBY_EVENT_END
Encountered an end of a class clause.
Definition event.h:36
#define RUBY_EVENT_C_CALL
A method, written in C, is called.
Definition event.h:39
#define RUBY_EVENT_B_RETURN
Encountered a next statement.
Definition event.h:52
#define RUBY_EVENT_CLASS
Encountered a new class.
Definition event.h:35
#define RUBY_EVENT_LINE
Encountered a new line.
Definition event.h:34
#define RUBY_EVENT_RETURN
Encountered a return statement.
Definition event.h:38
#define RUBY_EVENT_C_RETURN
Return from a method, written in C.
Definition event.h:40
#define RUBY_EVENT_B_CALL
Encountered an yield statement.
Definition event.h:51
uint32_t rb_event_flag_t
Represents event(s).
Definition event.h:103
#define RUBY_EVENT_CALL
A method, written in Ruby, is called.
Definition event.h:37
VALUE rb_singleton_class(VALUE obj)
Finds or creates the singleton class of the passed object.
Definition class.c:2241
VALUE rb_module_new(void)
Creates a new, anonymous module.
Definition class.c:1019
VALUE rb_class_inherited(VALUE super, VALUE klass)
Calls Class#inherited.
Definition class.c:914
VALUE rb_define_class_id(ID id, VALUE super)
This is a very badly designed API that creates an anonymous class.
Definition class.c:893
#define TYPE(_)
Old name of rb_type.
Definition value_type.h:107
#define FL_SINGLETON
Old name of RUBY_FL_SINGLETON.
Definition fl_type.h:58
#define FL_EXIVAR
Old name of RUBY_FL_EXIVAR.
Definition fl_type.h:67
#define REALLOC_N
Old name of RB_REALLOC_N.
Definition memory.h:397
#define ALLOC
Old name of RB_ALLOC.
Definition memory.h:394
#define RFLOAT_VALUE
Old name of rb_float_value.
Definition double.h:28
#define T_STRING
Old name of RUBY_T_STRING.
Definition value_type.h:78
#define Qundef
Old name of RUBY_Qundef.
#define INT2FIX
Old name of RB_INT2FIX.
Definition long.h:48
#define T_NIL
Old name of RUBY_T_NIL.
Definition value_type.h:72
#define T_FLOAT
Old name of RUBY_T_FLOAT.
Definition value_type.h:64
#define T_IMEMO
Old name of RUBY_T_IMEMO.
Definition value_type.h:67
#define ID2SYM
Old name of RB_ID2SYM.
Definition symbol.h:44
#define T_BIGNUM
Old name of RUBY_T_BIGNUM.
Definition value_type.h:57
#define SPECIAL_CONST_P
Old name of RB_SPECIAL_CONST_P.
#define T_STRUCT
Old name of RUBY_T_STRUCT.
Definition value_type.h:79
#define T_FIXNUM
Old name of RUBY_T_FIXNUM.
Definition value_type.h:63
#define SYM2ID
Old name of RB_SYM2ID.
Definition symbol.h:45
#define CLASS_OF
Old name of rb_class_of.
Definition globals.h:203
#define rb_ary_new4
Old name of rb_ary_new_from_values.
Definition array.h:653
#define FIXABLE
Old name of RB_FIXABLE.
Definition fixnum.h:25
#define LONG2FIX
Old name of RB_INT2FIX.
Definition long.h:49
#define FIX2INT
Old name of RB_FIX2INT.
Definition int.h:41
#define T_MODULE
Old name of RUBY_T_MODULE.
Definition value_type.h:70
#define STATIC_SYM_P
Old name of RB_STATIC_SYM_P.
#define ASSUME
Old name of RBIMPL_ASSUME.
Definition assume.h:27
#define FIX2ULONG
Old name of RB_FIX2ULONG.
Definition long.h:47
#define T_TRUE
Old name of RUBY_T_TRUE.
Definition value_type.h:81
#define T_ICLASS
Old name of RUBY_T_ICLASS.
Definition value_type.h:66
#define T_HASH
Old name of RUBY_T_HASH.
Definition value_type.h:65
#define ALLOC_N
Old name of RB_ALLOC_N.
Definition memory.h:393
#define FL_TEST_RAW
Old name of RB_FL_TEST_RAW.
Definition fl_type.h:140
#define rb_ary_new3
Old name of rb_ary_new_from_args.
Definition array.h:652
#define LONG2NUM
Old name of RB_LONG2NUM.
Definition long.h:50
#define rb_exc_new3
Old name of rb_exc_new_str.
Definition error.h:38
#define T_FALSE
Old name of RUBY_T_FALSE.
Definition value_type.h:61
#define Qtrue
Old name of RUBY_Qtrue.
#define Qnil
Old name of RUBY_Qnil.
#define Qfalse
Old name of RUBY_Qfalse.
#define FIX2LONG
Old name of RB_FIX2LONG.
Definition long.h:46
#define T_ARRAY
Old name of RUBY_T_ARRAY.
Definition value_type.h:56
#define T_OBJECT
Old name of RUBY_T_OBJECT.
Definition value_type.h:75
#define NIL_P
Old name of RB_NIL_P.
#define T_SYMBOL
Old name of RUBY_T_SYMBOL.
Definition value_type.h:80
#define DBL2NUM
Old name of rb_float_new.
Definition double.h:29
#define T_CLASS
Old name of RUBY_T_CLASS.
Definition value_type.h:58
#define BUILTIN_TYPE
Old name of RB_BUILTIN_TYPE.
Definition value_type.h:85
#define FL_TEST
Old name of RB_FL_TEST.
Definition fl_type.h:139
#define FIXNUM_P
Old name of RB_FIXNUM_P.
#define FL_USHIFT
Old name of RUBY_FL_USHIFT.
Definition fl_type.h:70
#define FL_SET_RAW
Old name of RB_FL_SET_RAW.
Definition fl_type.h:138
#define SYMBOL_P
Old name of RB_SYMBOL_P.
Definition value_type.h:88
void rb_notimplement(void)
Definition error.c:3193
void rb_raise(VALUE exc, const char *fmt,...)
Exception entry point.
Definition error.c:3150
void rb_exc_raise(VALUE mesg)
Raises an exception in the current thread.
Definition eval.c:688
void rb_bug(const char *fmt,...)
Interpreter panic switch.
Definition error.c:794
VALUE rb_eTypeError
TypeError exception.
Definition error.c:1091
VALUE rb_eFatal
fatal exception.
Definition error.c:1087
VALUE rb_eNoMethodError
NoMethodError exception.
Definition error.c:1099
void rb_exc_fatal(VALUE mesg)
Raises a fatal error in the current thread.
Definition eval.c:701
VALUE rb_eRuntimeError
RuntimeError exception.
Definition error.c:1089
void rb_warn(const char *fmt,...)
Identical to rb_warning(), except it reports always regardless of runtime -W flag.
Definition error.c:411
void rb_error_frozen_object(VALUE frozen_obj)
Identical to rb_error_frozen(), except it takes arbitrary Ruby object instead of C's string.
Definition error.c:3498
VALUE rb_exc_new_str(VALUE etype, VALUE str)
Identical to rb_exc_new_cstr(), except it takes a Ruby's string instead of C's.
Definition error.c:1142
VALUE rb_eArgError
ArgumentError exception.
Definition error.c:1092
VALUE rb_cClass
Class class.
Definition object.c:54
VALUE rb_cArray
Array class.
Definition array.c:40
VALUE rb_obj_alloc(VALUE klass)
Allocates an instance of the given class.
Definition object.c:1940
VALUE rb_cRegexp
Regexp class.
Definition re.c:2544
VALUE rb_obj_frozen_p(VALUE obj)
Just calls RB_OBJ_FROZEN() inside.
Definition object.c:1195
VALUE rb_cHash
Hash class.
Definition hash.c:94
VALUE rb_obj_class(VALUE obj)
Queries the class of an object.
Definition object.c:191
VALUE rb_inspect(VALUE obj)
Generates a human-readable textual representation of the given object.
Definition object.c:601
VALUE rb_cBasicObject
BasicObject class.
Definition object.c:50
VALUE rb_cModule
Module class.
Definition object.c:53
VALUE rb_class_real(VALUE klass)
Finds a "real" class.
Definition object.c:181
VALUE rb_obj_is_kind_of(VALUE obj, VALUE klass)
Queries if the given object is an instance (of possibly descendants) of the given class.
Definition object.c:788
VALUE rb_cFloat
Float class.
Definition numeric.c:191
VALUE rb_cProc
Proc class.
Definition proc.c:52
VALUE rb_cString
String class.
Definition string.c:79
#define RB_OBJ_WRITTEN(old, oldv, young)
Identical to RB_OBJ_WRITE(), except it doesn't write any values, but only a WB declaration.
Definition rgengc.h:232
#define RB_OBJ_WRITE(old, slot, young)
Declaration of a "back" pointer.
Definition rgengc.h:220
#define UNLIMITED_ARGUMENTS
This macro is used in conjunction with rb_check_arity().
Definition error.h:35
#define rb_check_frozen
Just another name of rb_check_frozen.
Definition error.h:264
static int rb_check_arity(int argc, int min, int max)
Ensures that the passed integer is in the passed range.
Definition error.h:280
#define rb_check_frozen_internal(obj)
Definition error.h:247
VALUE rb_proc_call_with_block(VALUE recv, int argc, const VALUE *argv, VALUE proc)
Identical to rb_proc_call(), except you can additionally pass another proc object,...
Definition proc.c:1027
VALUE rb_reg_last_match(VALUE md)
This just returns the argument, stringified.
Definition re.c:1886
VALUE rb_reg_match(VALUE re, VALUE str)
This is the match operator.
Definition re.c:3601
VALUE rb_reg_nth_match(int n, VALUE md)
Queries the nth captured substring.
Definition re.c:1861
VALUE rb_reg_match_post(VALUE md)
The portion of the original string after the given match.
Definition re.c:1943
VALUE rb_reg_match_pre(VALUE md)
The portion of the original string before the given match.
Definition re.c:1910
VALUE rb_reg_match_last(VALUE md)
The portion of the original string that captured at the very last.
Definition re.c:1960
VALUE rb_str_append(VALUE dst, VALUE src)
Identical to rb_str_buf_append(), except it converts the right hand side before concatenating.
Definition string.c:3353
VALUE rb_sym_to_s(VALUE sym)
This is an rb_sym2str() + rb_str_dup() combo.
Definition string.c:11597
VALUE rb_str_succ(VALUE orig)
Searches for the "successor" of a string.
Definition string.c:4828
VALUE rb_str_buf_append(VALUE dst, VALUE src)
Identical to rb_str_cat_cstr(), except it takes Ruby's string instead of C's.
Definition string.c:3319
VALUE rb_str_concat(VALUE dst, VALUE src)
Identical to rb_str_append(), except it also accepts an integer as a codepoint.
Definition string.c:3453
#define rb_str_cat_cstr(buf, str)
Identical to rb_str_cat(), except it assumes the passed pointer is a pointer to a C string.
Definition string.h:1656
VALUE rb_str_length(VALUE)
Identical to rb_str_strlen(), except it returns the value in rb_cInteger.
Definition string.c:2163
VALUE rb_str_intern(VALUE str)
Identical to rb_to_symbol(), except it assumes the receiver being an instance of RString.
Definition symbol.c:851
void rb_thread_schedule(void)
Tries to switch to another thread.
Definition thread.c:1488
VALUE rb_const_get(VALUE space, ID name)
Identical to rb_const_defined(), except it returns the actual defined value.
Definition variable.c:2896
VALUE rb_attr_get(VALUE obj, ID name)
Identical to rb_ivar_get()
Definition variable.c:1226
VALUE rb_ivar_set(VALUE obj, ID name, VALUE val)
Identical to rb_iv_set(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1606
void rb_cvar_set(VALUE klass, ID name, VALUE val)
Assigns a value to a class variable.
Definition variable.c:3677
VALUE rb_cvar_find(VALUE klass, ID name, VALUE *front)
Identical to rb_cvar_get(), except it takes additional "front" pointer.
Definition variable.c:3732
VALUE rb_ivar_get(VALUE obj, ID name)
Identical to rb_iv_get(), except it accepts the name as an ID instead of a C string.
Definition variable.c:1218
void rb_const_set(VALUE space, ID name, VALUE val)
Names a constant.
Definition variable.c:3346
VALUE rb_autoload_load(VALUE space, ID name)
Kicks the autoload procedure as if it was "touched".
Definition variable.c:2731
VALUE rb_mod_name(VALUE mod)
Queries the name of a module.
Definition variable.c:137
VALUE rb_const_get_at(VALUE space, ID name)
Identical to rb_const_defined_at(), except it returns the actual defined value.
Definition variable.c:2902
void rb_set_class_path_string(VALUE klass, VALUE space, VALUE name)
Identical to rb_set_class_path(), except it accepts the name as Ruby's string instead of C's.
Definition variable.c:231
VALUE rb_ivar_defined(VALUE obj, ID name)
Queries if the instance variable is defined at the object.
Definition variable.c:1623
int rb_const_defined_at(VALUE space, ID name)
Identical to rb_const_defined(), except it doesn't look for parent classes.
Definition variable.c:3210
VALUE rb_cvar_defined(VALUE klass, ID name)
Queries if the given class has the given class variable.
Definition variable.c:3754
VALUE rb_class_path(VALUE mod)
Identical to rb_mod_name(), except it returns #<Class: ...> style inspection for anonymous modules.
Definition variable.c:188
int rb_const_defined(VALUE space, ID name)
Queries if the constant is defined at the namespace.
Definition variable.c:3204
VALUE rb_check_funcall(VALUE recv, ID mid, int argc, const VALUE *argv)
Identical to rb_funcallv(), except it returns RUBY_Qundef instead of raising rb_eNoMethodError.
Definition vm_eval.c:664
rb_alloc_func_t rb_get_alloc_func(VALUE klass)
Queries the allocator function of a class.
Definition vm_method.c:1148
void rb_define_alloc_func(VALUE klass, rb_alloc_func_t func)
Sets the allocator function of a class.
int rb_method_boundp(VALUE klass, ID id, int ex)
Queries if the klass has this method.
Definition vm_method.c:1681
ID rb_check_id(volatile VALUE *namep)
Detects if the given name is already interned or not.
Definition symbol.c:1085
ID rb_intern(const char *name)
Finds or creates a symbol of the given name.
Definition symbol.c:796
VALUE rb_sym2str(VALUE id)
Identical to rb_id2str(), except it takes an instance of rb_cSymbol rather than an ID.
Definition symbol.c:943
const char * rb_id2name(ID id)
Retrieves the name mapped to the given id.
Definition symbol.c:960
static bool rb_ractor_shareable_p(VALUE obj)
Queries if multiple Ractors can share the passed object or not.
Definition ractor.h:249
#define RB_OBJ_SHAREABLE_P(obj)
Queries if the passed object has previously classified as shareable or not.
Definition ractor.h:235
VALUE rb_sprintf(const char *fmt,...)
Ruby's extended sprintf(3).
Definition sprintf.c:1219
VALUE rb_str_catf(VALUE dst, const char *fmt,...)
Identical to rb_sprintf(), except it renders the output to the specified object rather than creating ...
Definition sprintf.c:1242
#define MEMCPY(p1, p2, type, n)
Handy macro to call memcpy.
Definition memory.h:366
#define ALLOCA_N(type, n)
Definition memory.h:286
#define RB_GC_GUARD(v)
Prevents premature destruction of local objects.
Definition memory.h:161
#define MEMMOVE(p1, p2, type, n)
Handy macro to call memmove.
Definition memory.h:378
VALUE type(ANYARGS)
ANYARGS-ed function type.
VALUE rb_ensure(type *q, VALUE w, type *e, VALUE r)
An equivalent of ensure clause.
#define RARRAY_LEN
Just another name of rb_array_len.
Definition rarray.h:68
#define RARRAY_CONST_PTR_TRANSIENT
Just another name of rb_array_const_ptr_transient.
Definition rarray.h:70
#define RARRAY_AREF(a, i)
Definition rarray.h:583
static VALUE RBASIC_CLASS(VALUE obj)
Queries the class of an object.
Definition rbasic.h:152
#define RBASIC(obj)
Convenient casting macro.
Definition rbasic.h:40
#define RCLASS_SUPER
Just another name of rb_class_get_superclass.
Definition rclass.h:44
#define RHASH_SIZE(h)
Queries the size of the hash.
Definition rhash.h:82
#define RHASH_EMPTY_P(h)
Checks if the hash is empty.
Definition rhash.h:92
static VALUE * ROBJECT_IVPTR(VALUE obj)
Queries the instance variables.
Definition robject.h:162
static long RSTRING_LEN(VALUE str)
Queries the length of the string.
Definition rstring.h:484
static char * RSTRING_PTR(VALUE str)
Queries the contents pointer of the string.
Definition rstring.h:498
#define StringValueCStr(v)
Identical to StringValuePtr, except it additionally checks for the contents for viability as a C stri...
Definition rstring.h:95
#define RB_NO_KEYWORDS
Do not pass keywords.
Definition scan_args.h:69
static bool RB_SPECIAL_CONST_P(VALUE obj)
Checks if the given object is of enum ruby_special_consts.
#define RTEST
This is an old name of RB_TEST.
#define ANYARGS
Functions declared using this macro take arbitrary arguments, including void.
Definition stdarg.h:64
Definition hash.h:43
Definition iseq.h:263
Definition vm_core.h:247
Definition vm_core.h:281
Definition vm_core.h:276
Definition method.h:62
Definition constant.h:33
CREF (Class REFerence)
Definition method.h:44
Definition class.h:32
Definition method.h:54
rb_cref_t * cref
class reference, should be marked
Definition method.h:136
const rb_iseq_t * iseqptr
iseq pointer, should be separated from iseqval
Definition method.h:135
Definition st.h:79
IFUNC (Internal FUNCtion)
Definition imemo.h:84
SVAR (Special VARiable)
Definition imemo.h:53
const VALUE cref_or_me
class reference or rb_method_entry_t
Definition imemo.h:55
THROW_DATA.
Definition imemo.h:62
Definition vm_core.h:285
intptr_t SIGNED_VALUE
A signed integer type that has the same width with VALUE.
Definition value.h:63
uintptr_t ID
Type that represents a Ruby identifier such as a variable name.
Definition value.h:52
uintptr_t VALUE
Type that represents a Ruby object.
Definition value.h:40
static enum ruby_value_type RB_BUILTIN_TYPE(VALUE obj)
Queries the type of the object.
Definition value_type.h:181
static bool RB_FLOAT_TYPE_P(VALUE obj)
Queries if the object is an instance of rb_cFloat.
Definition value_type.h:263
static bool RB_TYPE_P(VALUE obj, enum ruby_value_type t)
Queries if the given object is of given type.
Definition value_type.h:375