@@ -110,6 +110,8 @@ jit_peek_at_self(jitstate_t *jit, ctx_t *ctx)
110110 return jit -> ec -> cfp -> self ;
111111}
112112
113+ static bool jit_guard_known_klass (jitstate_t * jit , ctx_t * ctx , VALUE known_klass , insn_opnd_t insn_opnd , const int max_chain_depth , uint8_t * side_exit );
114+
113115// Save YJIT registers prior to a C call
114116static void
115117yjit_save_regs (codeblock_t * cb )
@@ -371,6 +373,9 @@ yjit_gen_block(ctx_t *ctx, block_t *block, rb_execution_context_t *ec)
371373 // If we can't compile this instruction
372374 // exit to the interpreter and stop compiling
373375 if (status == YJIT_CANT_COMPILE ) {
376+ // TODO: if the codegen funcion makes changes to ctx and then return YJIT_CANT_COMPILE,
377+ // the exit this generates would be wrong. We could save a copy of the entry context
378+ // and assert that ctx is the same here.
374379 yjit_gen_exit (& jit , ctx , cb );
375380 break ;
376381 }
@@ -716,67 +721,58 @@ enum {
716721 OSWB_MAX_DEPTH = 5 , // up to 5 different classes
717722};
718723
724+ // Codegen for getting an instance variable.
725+ // Preconditions:
726+ // - receiver is in REG0
727+ // - receiver has the same class as CLASS_OF(comptime_receiver)
728+ // - no stack push or pops to ctx since the entry to the codegen of the instruction being compiled
719729static codegen_status_t
720- gen_getinstancevariable (jitstate_t * jit , ctx_t * ctx )
730+ gen_get_ivar (jitstate_t * jit , ctx_t * ctx , const int max_chain_depth , VALUE comptime_receiver , ID ivar_name , insn_opnd_t reg0_opnd , uint8_t * side_exit )
721731{
722- // Defer compilation so we can specialize a runtime `self`
723- if (!jit_at_current_insn (jit )) {
724- defer_compilation (jit -> block , jit -> insn_idx , ctx );
725- return YJIT_END_BLOCK ;
726- }
727-
728- // Specialize base on the compile time self
729- VALUE self_val = jit_peek_at_self (jit , ctx );
730- VALUE self_klass = rb_class_of (self_val );
731-
732- // Create a size-exit to fall back to the interpreter
733- uint8_t * side_exit = yjit_side_exit (jit , ctx );
732+ VALUE comptime_val_klass = CLASS_OF (comptime_receiver );
733+ const ctx_t starting_context = * ctx ; // make a copy for use with jit_chain_guard
734734
735735 // If the class uses the default allocator, instances should all be T_OBJECT
736736 // NOTE: This assumes nobody changes the allocator of the class after allocation.
737737 // Eventually, we can encode whether an object is T_OBJECT or not
738738 // inside object shapes.
739- if (rb_get_alloc_func (self_klass ) != rb_class_allocate_instance ) {
739+ if (rb_get_alloc_func (comptime_val_klass ) != rb_class_allocate_instance ) {
740+ GEN_COUNTER_INC (cb , getivar_not_object );
740741 return YJIT_CANT_COMPILE ;
741742 }
742- RUBY_ASSERT (BUILTIN_TYPE (self_val ) == T_OBJECT ); // because we checked the allocator
743+ RUBY_ASSERT (BUILTIN_TYPE (comptime_receiver ) == T_OBJECT ); // because we checked the allocator
743744
744- ID id = (ID )jit_get_arg (jit , 0 );
745+ // ID for the name of the ivar
746+ ID id = ivar_name ;
745747 struct rb_iv_index_tbl_entry * ent ;
746- struct st_table * iv_index_tbl = ROBJECT_IV_INDEX_TBL (self_val );
748+ struct st_table * iv_index_tbl = ROBJECT_IV_INDEX_TBL (comptime_receiver );
747749
748750 // Lookup index for the ivar the instruction loads
749751 if (iv_index_tbl && rb_iv_index_tbl_lookup (iv_index_tbl , id , & ent )) {
750752 uint32_t ivar_index = ent -> index ;
751753
752- // Load self from CFP
753- mov (cb , REG0 , member_opnd (REG_CFP , rb_control_frame_t , self ));
754-
755- guard_self_is_heap (cb , REG0 , COUNTED_EXIT (side_exit , getivar_se_self_not_heap ), ctx );
756-
757- // Guard that self has a known class
758- x86opnd_t klass_opnd = mem_opnd (64 , REG0 , offsetof(struct RBasic , klass ));
759- mov (cb , REG1 , klass_opnd );
760- x86opnd_t serial_opnd = mem_opnd (64 , REG1 , offsetof(struct RClass , class_serial ));
761- cmp (cb , serial_opnd , imm_opnd (RCLASS_SERIAL (self_klass )));
762- jit_chain_guard (JCC_JNE , jit , ctx , GETIVAR_MAX_DEPTH , side_exit );
754+ // Pop receiver if it's on the temp stack
755+ if (!reg0_opnd .is_self ) {
756+ (void )ctx_stack_pop (ctx , 1 );
757+ }
763758
764- // Compile time self is embedded and the ivar index is within the object
765- if (RB_FL_TEST_RAW (self_val , ROBJECT_EMBED ) && ivar_index < ROBJECT_EMBED_LEN_MAX ) {
759+ // Compile time self is embedded and the ivar index lands within the object
760+ if (RB_FL_TEST_RAW (comptime_receiver , ROBJECT_EMBED ) && ivar_index < ROBJECT_EMBED_LEN_MAX ) {
766761 // See ROBJECT_IVPTR() from include/ruby/internal/core/robject.h
767762
768763 // Guard that self is embedded
769764 // TODO: BT and JC is shorter
770765 ADD_COMMENT (cb , "guard embedded getivar" );
771766 x86opnd_t flags_opnd = member_opnd (REG0 , struct RBasic , flags );
772767 test (cb , flags_opnd , imm_opnd (ROBJECT_EMBED ));
773- jit_chain_guard (JCC_JZ , jit , ctx , GETIVAR_MAX_DEPTH , side_exit );
768+ jit_chain_guard (JCC_JZ , jit , & starting_context , max_chain_depth , side_exit );
774769
775770 // Load the variable
776771 x86opnd_t ivar_opnd = mem_opnd (64 , REG0 , offsetof(struct RObject , as .ary ) + ivar_index * SIZEOF_VALUE );
777772 mov (cb , REG1 , ivar_opnd );
778773
779774 // Guard that the variable is not Qundef
775+ // TODO: use cmov to push Qnil in this case
780776 cmp (cb , REG1 , imm_opnd (Qundef ));
781777 je_ptr (cb , COUNTED_EXIT (side_exit , getivar_undef ));
782778
@@ -785,14 +781,14 @@ gen_getinstancevariable(jitstate_t* jit, ctx_t* ctx)
785781 mov (cb , out_opnd , REG1 );
786782 }
787783 else {
788- // Compile time self is *not* embeded.
784+ // Compile time value is *not* embeded.
789785
790- // Guard that self is *not* embedded
786+ // Guard that value is *not* embedded
791787 // See ROBJECT_IVPTR() from include/ruby/internal/core/robject.h
792788 ADD_COMMENT (cb , "guard extended getivar" );
793789 x86opnd_t flags_opnd = member_opnd (REG0 , struct RBasic , flags );
794790 test (cb , flags_opnd , imm_opnd (ROBJECT_EMBED ));
795- jit_chain_guard (JCC_JNZ , jit , ctx , GETIVAR_MAX_DEPTH , side_exit );
791+ jit_chain_guard (JCC_JNZ , jit , & starting_context , max_chain_depth , side_exit );
796792
797793 // check that the extended table is big enough
798794 if (ivar_index >= ROBJECT_EMBED_LEN_MAX + 1 ) {
@@ -824,9 +820,36 @@ gen_getinstancevariable(jitstate_t* jit, ctx_t* ctx)
824820 return YJIT_END_BLOCK ;
825821 }
826822
823+ GEN_COUNTER_INC (cb , getivar_name_not_mapped );
827824 return YJIT_CANT_COMPILE ;
828825}
829826
827+ static codegen_status_t
828+ gen_getinstancevariable (jitstate_t * jit , ctx_t * ctx )
829+ {
830+ // Defer compilation so we can specialize on a runtime `self`
831+ if (!jit_at_current_insn (jit )) {
832+ defer_compilation (jit -> block , jit -> insn_idx , ctx );
833+ return YJIT_END_BLOCK ;
834+ }
835+
836+ ID ivar_name = (ID )jit_get_arg (jit , 0 );
837+
838+ VALUE comptime_val = jit_peek_at_self (jit , ctx );
839+ VALUE comptime_val_klass = CLASS_OF (comptime_val );
840+
841+ // Generate a side exit
842+ uint8_t * side_exit = yjit_side_exit (jit , ctx );
843+
844+ // Guard that the receiver has the same class as the one from compile time.
845+ mov (cb , REG0 , member_opnd (REG_CFP , rb_control_frame_t , self ));
846+ guard_self_is_heap (cb , REG0 , side_exit , ctx );
847+
848+ jit_guard_known_klass (jit , ctx , comptime_val_klass , OPND_SELF , GETIVAR_MAX_DEPTH , side_exit );
849+
850+ return gen_get_ivar (jit , ctx , GETIVAR_MAX_DEPTH , comptime_val , ivar_name , OPND_SELF , side_exit );
851+ }
852+
830853static codegen_status_t
831854gen_setinstancevariable (jitstate_t * jit , ctx_t * ctx )
832855{
@@ -1412,7 +1435,8 @@ jit_guard_known_klass(jitstate_t *jit, ctx_t* ctx, VALUE known_klass, insn_opnd_
14121435 // Pointer to the klass field of the receiver &(recv->klass)
14131436 x86opnd_t klass_opnd = mem_opnd (64 , REG0 , offsetof(struct RBasic , klass ));
14141437
1415- // Bail if receiver class is different from compile-time call cache class
1438+ // Bail if receiver class is different from known_klass
1439+ // TODO: jit_mov_gc_ptr keeps a strong reference, which leaks the class.
14161440 ADD_COMMENT (cb , "guard known class" );
14171441 jit_mov_gc_ptr (jit , cb , REG1 , known_klass );
14181442 cmp (cb , klass_opnd , REG1 );
@@ -1817,8 +1841,9 @@ gen_opt_send_without_block(jitstate_t* jit, ctx_t* ctx)
18171841
18181842 // Points to the receiver operand on the stack
18191843 x86opnd_t recv = ctx_stack_opnd (ctx , argc );
1844+ insn_opnd_t recv_opnd = OPND_STACK (argc );
18201845 mov (cb , REG0 , recv );
1821- if (!jit_guard_known_klass (jit , ctx , comptime_recv_klass , OPND_STACK ( argc ) , OSWB_MAX_DEPTH , side_exit )) {
1846+ if (!jit_guard_known_klass (jit , ctx , comptime_recv_klass , recv_opnd , OSWB_MAX_DEPTH , side_exit )) {
18221847 return YJIT_CANT_COMPILE ;
18231848 }
18241849
@@ -1860,15 +1885,24 @@ gen_opt_send_without_block(jitstate_t* jit, ctx_t* ctx)
18601885 return gen_oswb_iseq (jit , ctx , ci , cme , argc );
18611886 case VM_METHOD_TYPE_CFUNC :
18621887 return gen_oswb_cfunc (jit , ctx , ci , cme , argc );
1888+ case VM_METHOD_TYPE_IVAR :
1889+ if (argc != 0 ) {
1890+ // Argument count mismatch. Getters take no arguments.
1891+ GEN_COUNTER_INC (cb , oswb_getter_arity );
1892+ return YJIT_CANT_COMPILE ;
1893+ }
1894+ else {
1895+ mov (cb , REG0 , recv );
1896+
1897+ ID ivar_name = cme -> def -> body .attr .id ;
1898+ return gen_get_ivar (jit , ctx , OSWB_MAX_DEPTH , comptime_recv , ivar_name , recv_opnd , side_exit );
1899+ }
18631900 case VM_METHOD_TYPE_ATTRSET :
18641901 GEN_COUNTER_INC (cb , oswb_ivar_set_method );
18651902 return YJIT_CANT_COMPILE ;
18661903 case VM_METHOD_TYPE_BMETHOD :
18671904 GEN_COUNTER_INC (cb , oswb_bmethod );
18681905 return YJIT_CANT_COMPILE ;
1869- case VM_METHOD_TYPE_IVAR :
1870- GEN_COUNTER_INC (cb , oswb_ivar_get_method );
1871- return YJIT_CANT_COMPILE ;
18721906 case VM_METHOD_TYPE_ZSUPER :
18731907 GEN_COUNTER_INC (cb , oswb_zsuper_method );
18741908 return YJIT_CANT_COMPILE ;
0 commit comments