1 /**********************************************************************
6 created at: 04/01/01 19:41:38 JST
8 Copyright (C) 2004-2007 Koichi Sasada
10 **********************************************************************/
12 #ifndef RUBY_VM_CORE_H
13 #define RUBY_VM_CORE_H
15 #define RUBY_VM_THREAD_MODEL 2
17 #include "ruby/ruby.h"
26 #include "thread_win32.h"
27 #elif defined(HAVE_PTHREAD_H)
28 #include "thread_pthread.h"
30 #error "unsupported thread type"
37 # define NSIG (_SIGMAX + 1) /* For QNX */
40 #define RUBY_NSIG NSIG
42 #ifdef HAVE_STDARG_PROTOTYPES
44 #define va_init_list(a,b) va_start(a,b)
47 #define va_init_list(a,b) va_start(a)
55 #if defined(__GNUC__) && __GNUC__ >= 2
57 #if OPT_TOKEN_THREADED_CODE
58 #if OPT_DIRECT_THREADED_CODE
59 #undef OPT_DIRECT_THREADED_CODE
63 #else /* defined(__GNUC__) && __GNUC__ >= 2 */
65 /* disable threaded code options */
66 #if OPT_DIRECT_THREADED_CODE
67 #undef OPT_DIRECT_THREADED_CODE
69 #if OPT_TOKEN_THREADED_CODE
70 #undef OPT_TOKEN_THREADED_CODE
74 /* call threaded code */
75 #if OPT_CALL_THREADED_CODE
76 #if OPT_DIRECT_THREADED_CODE
77 #undef OPT_DIRECT_THREADED_CODE
78 #endif /* OPT_DIRECT_THREADED_CODE */
80 #undef OPT_STACK_CACHING
81 #endif /* OPT_STACK_CACHING */
82 #endif /* OPT_CALL_THREADED_CODE */
86 #define LIKELY(x) (__builtin_expect((x), 1))
87 #define UNLIKELY(x) (__builtin_expect((x), 0))
88 #else /* __GNUC__ >= 3 */
90 #define UNLIKELY(x) (x)
91 #endif /* __GNUC__ >= 3 */
93 typedef unsigned long rb_num_t;
95 struct iseq_compile_data_ensure_node_stack;
97 typedef struct rb_compile_option_struct {
98 int inline_const_cache;
99 int peephole_optimization;
100 int tailcall_optimization;
101 int specialized_instruction;
102 int operands_unification;
103 int instructions_unification;
105 int trace_instruction;
107 } rb_compile_option_t;
110 #define GetCoreDataFromValue(obj, type, ptr) do { \
111 ptr = (type*)DATA_PTR(obj); \
114 #define GetCoreDataFromValue(obj, type, ptr) Data_Get_Struct(obj, type, ptr)
117 #define GetISeqPtr(obj, ptr) \
118 GetCoreDataFromValue(obj, rb_iseq_t, ptr)
120 struct rb_iseq_struct;
122 struct rb_iseq_struct {
127 VALUE type; /* instruction sequence type */
128 VALUE name; /* String: iseq name */
129 VALUE filename; /* file information where this sequence from */
130 VALUE *iseq; /* iseq (insn number and openrads) */
131 VALUE *iseq_encoded; /* encoded iseq */
132 unsigned long iseq_size;
133 VALUE mark_ary; /* Array: includes operands which should be GC marked */
134 VALUE coverage; /* coverage array */
136 /* insn info, must be freed */
137 struct iseq_insn_info_entry *insn_info_table;
138 unsigned long insn_info_size;
140 ID *local_table; /* must free */
141 int local_table_size;
143 /* method, class frame: sizeof(vars) + 1, block frame: sizeof(vars) */
147 * argument information
149 * def m(a1, a2, ..., aM, # mandatory
150 * b1=(...), b2=(...), ..., bN=(...), # optinal
152 * d1, d2, ..., dO, # post
157 * arg_rest = M+N+1 // or -1 if no rest arg
159 * arg_opts_tbl = [ (N entries) ]
160 * arg_post_len = O // 0 if no post arguments
161 * arg_post_start = M+N+2
162 * arg_block = M+N + 1 + O + 1 // -1 if no block arg
163 * arg_simple = 0 if not simple arguments.
164 * = 1 if no opt, rest, post, block.
165 * = 2 if ambiguos block parameter ({|a|}).
166 * arg_size = argument size.
177 VALUE *arg_opt_table;
179 int stack_max; /* for stack overflow check */
182 struct iseq_catch_table_entry *catch_table;
183 int catch_table_size;
186 struct rb_iseq_struct *parent_iseq;
187 struct rb_iseq_struct *local_iseq;
194 VALUE orig; /* non-NULL if its data have origin */
199 * void *special_block_builder;
200 * void *cached_special_block_builder;
201 * VALUE cached_special_block;
204 /* klass/module nest information stack (cref) */
209 ID defined_method_id; /* for define_method */
211 /* used at compile time */
212 struct iseq_compile_data *compile_data;
215 enum ruby_special_exceptions {
219 ruby_special_error_count
222 typedef struct rb_iseq_struct rb_iseq_t;
224 #define GetVMPtr(obj, ptr) \
225 GetCoreDataFromValue(obj, rb_vm_t, ptr)
227 typedef struct rb_vm_struct {
230 rb_thread_lock_t global_vm_lock;
232 struct rb_thread_struct *main_thread;
233 struct rb_thread_struct *running_thread;
235 st_table *living_threads;
236 VALUE thgroup_default;
239 int thread_abort_on_exception;
240 unsigned long trace_flag;
241 volatile int sleeper;
243 /* object management */
244 VALUE mark_object_ary;
246 VALUE special_exceptions[ruby_special_error_count];
251 VALUE loaded_features;
252 struct st_table *loading_table;
258 } trap_list[RUBY_NSIG];
261 rb_event_hook_t *event_hooks;
263 int src_encoding_index;
265 VALUE verbose, debug, progname;
268 #if defined(ENABLE_VM_OBJSPACE) && ENABLE_VM_OBJSPACE
269 struct rb_objspace *objspace;
274 VALUE *pc; /* cfp[0] */
275 VALUE *sp; /* cfp[1] */
276 VALUE *bp; /* cfp[2] */
277 rb_iseq_t *iseq; /* cfp[3] */
278 VALUE flag; /* cfp[4] */
279 VALUE self; /* cfp[5] / block[0] */
280 VALUE *lfp; /* cfp[6] / block[1] */
281 VALUE *dfp; /* cfp[7] / block[2] */
282 rb_iseq_t *block_iseq; /* cfp[8] / block[3] */
283 VALUE proc; /* cfp[9] / block[4] */
284 ID method_id; /* cfp[10] saved in special case */
285 VALUE method_class; /* cfp[11] saved in special case */
286 } rb_control_frame_t;
288 typedef struct rb_block_struct {
289 VALUE self; /* share with method frame if it's only block */
290 VALUE *lfp; /* share with method frame if it's only block */
291 VALUE *dfp; /* share with method frame if it's only block */
296 #define GetThreadPtr(obj, ptr) \
297 GetCoreDataFromValue(obj, rb_thread_t, ptr)
299 enum rb_thread_status {
303 THREAD_STOPPED_FOREVER,
307 typedef RUBY_JMP_BUF rb_jmpbuf_t;
313 struct rb_vm_tag *prev;
316 struct rb_vm_trap_tag {
317 struct rb_vm_trap_tag *prev;
320 #define RUBY_VM_VALUE_CACHE_SIZE 0x1000
321 #define USE_VALUE_CACHE 0
323 struct rb_unblock_callback {
324 rb_unblock_function_t *func;
328 struct rb_mutex_struct;
330 typedef struct rb_thread_struct
335 /* execution information */
336 VALUE *stack; /* must free, must mark */
337 unsigned long stack_size;
338 rb_control_frame_t *cfp;
341 VALUE last_status; /* $? */
347 rb_block_t *passed_block;
354 rb_block_t *base_block;
360 rb_thread_id_t thread_id;
361 enum rb_thread_status status;
365 native_thread_data_t native_thread_data;
366 void *blocking_region_buffer;
372 VALUE thrown_errinfo;
376 rb_thread_lock_t interrupt_lock;
377 struct rb_unblock_callback unblock;
379 struct rb_mutex_struct *keeping_mutexes;
380 int transition_for_lock;
382 struct rb_vm_tag *tag;
383 struct rb_vm_trap_tag *trap_tag;
386 int mild_compile_error;
389 st_table *local_storage;
391 VALUE value_cache[RUBY_VM_VALUE_CACHE_SIZE + 1];
392 VALUE *value_cache_ptr;
395 struct rb_thread_struct *join_list_next;
396 struct rb_thread_struct *join_list_head;
400 VALUE (*first_func)(ANYARGS);
403 VALUE *machine_stack_start;
404 VALUE *machine_stack_end;
405 size_t machine_stack_maxsize;
407 VALUE *machine_register_stack_start;
408 VALUE *machine_register_stack_end;
409 size_t machine_register_stack_maxsize;
411 jmp_buf machine_regs;
414 /* statistics data for profiler */
415 VALUE stat_insn_usage;
418 rb_event_hook_t *event_hooks;
419 rb_event_flag_t event_flags;
425 rb_jmpbuf_t root_jmpbuf;
428 int method_missing_reason;
429 int abort_on_exception;
433 VALUE rb_iseq_new(NODE*, VALUE, VALUE, VALUE, VALUE);
434 VALUE rb_iseq_new_top(NODE *node, VALUE name, VALUE filename, VALUE parent);
435 VALUE rb_iseq_new_main(NODE *node, VALUE filename);
436 VALUE rb_iseq_new_with_bopt(NODE*, VALUE, VALUE, VALUE, VALUE, VALUE);
437 VALUE rb_iseq_new_with_opt(NODE*, VALUE, VALUE, VALUE, VALUE, const rb_compile_option_t*);
438 VALUE rb_iseq_compile(VALUE src, VALUE file, VALUE line);
439 VALUE ruby_iseq_disasm(VALUE self);
440 VALUE ruby_iseq_disasm_insn(VALUE str, VALUE *iseqval, int pos, rb_iseq_t *iseq, VALUE child);
441 const char *ruby_node_name(int node);
442 int rb_iseq_first_lineno(rb_iseq_t *iseq);
444 RUBY_EXTERN VALUE rb_cISeq;
445 RUBY_EXTERN VALUE rb_cRubyVM;
446 RUBY_EXTERN VALUE rb_cEnv;
447 RUBY_EXTERN VALUE rb_mRubyVMFrozenCore;
449 /* each thread has this size stack : 128KB */
450 #define RUBY_VM_THREAD_STACK_SIZE (128 * 1024)
452 struct global_entry {
453 struct global_variable *var;
457 #define GetProcPtr(obj, ptr) \
458 GetCoreDataFromValue(obj, rb_proc_t, ptr)
463 VALUE envval; /* for GC mark */
470 #define GetEnvPtr(obj, ptr) \
471 GetCoreDataFromValue(obj, rb_env_t, ptr)
477 VALUE prev_envval; /* for GC mark */
481 #define GetBindingPtr(obj, ptr) \
482 GetCoreDataFromValue(obj, rb_binding_t, ptr)
489 /* used by compile time and send insn */
490 #define VM_CALL_ARGS_SPLAT_BIT (0x01 << 1)
491 #define VM_CALL_ARGS_BLOCKARG_BIT (0x01 << 2)
492 #define VM_CALL_FCALL_BIT (0x01 << 3)
493 #define VM_CALL_VCALL_BIT (0x01 << 4)
494 #define VM_CALL_TAILCALL_BIT (0x01 << 5)
495 #define VM_CALL_TAILRECURSION_BIT (0x01 << 6)
496 #define VM_CALL_SUPER_BIT (0x01 << 7)
497 #define VM_CALL_SEND_BIT (0x01 << 8)
499 #define VM_SPECIAL_OBJECT_VMCORE 0x01
500 #define VM_SPECIAL_OBJECT_CBASE 0x02
502 #define VM_FRAME_MAGIC_METHOD 0x11
503 #define VM_FRAME_MAGIC_BLOCK 0x21
504 #define VM_FRAME_MAGIC_CLASS 0x31
505 #define VM_FRAME_MAGIC_TOP 0x41
506 #define VM_FRAME_MAGIC_FINISH 0x51
507 #define VM_FRAME_MAGIC_CFUNC 0x61
508 #define VM_FRAME_MAGIC_PROC 0x71
509 #define VM_FRAME_MAGIC_IFUNC 0x81
510 #define VM_FRAME_MAGIC_EVAL 0x91
511 #define VM_FRAME_MAGIC_LAMBDA 0xa1
512 #define VM_FRAME_MAGIC_MASK_BITS 8
513 #define VM_FRAME_MAGIC_MASK (~(~0<<VM_FRAME_MAGIC_MASK_BITS))
515 #define VM_FRAME_TYPE(cfp) ((cfp)->flag & VM_FRAME_MAGIC_MASK)
517 /* other frame flag */
518 #define VM_FRAME_FLAG_PASSED 0x0100
521 #define RUBYVM_CFUNC_FRAME_P(cfp) \
522 (VM_FRAME_TYPE(cfp) == VM_FRAME_MAGIC_CFUNC)
525 /* inline (method|const) cache */
526 #define NEW_INLINE_CACHE_ENTRY() NEW_WHILE(Qundef, 0, 0)
527 #define ic_class u1.value
528 #define ic_method u2.node
529 #define ic_value u2.value
530 #define ic_vmstat u3.cnt
533 void rb_vm_change_state(void);
535 typedef VALUE CDHASH;
537 #ifndef FUNC_FASTCALL
538 #define FUNC_FASTCALL(x) x
541 typedef rb_control_frame_t *
542 (FUNC_FASTCALL(*rb_insn_func_t))(rb_thread_t *, rb_control_frame_t *);
544 #define GC_GUARDED_PTR(p) ((VALUE)((VALUE)(p) | 0x01))
545 #define GC_GUARDED_PTR_REF(p) ((void *)(((VALUE)p) & ~0x03))
546 #define GC_GUARDED_PTR_P(p) (((VALUE)p) & 0x01)
548 #define RUBY_VM_PREVIOUS_CONTROL_FRAME(cfp) (cfp+1)
549 #define RUBY_VM_NEXT_CONTROL_FRAME(cfp) (cfp-1)
550 #define RUBY_VM_END_CONTROL_FRAME(th) \
551 ((rb_control_frame_t *)((th)->stack + (th)->stack_size))
552 #define RUBY_VM_VALID_CONTROL_FRAME_P(cfp, ecfp) \
553 ((void *)(ecfp) > (void *)(cfp))
554 #define RUBY_VM_CONTROL_FRAME_STACK_OVERFLOW_P(th, cfp) \
555 (!RUBY_VM_VALID_CONTROL_FRAME_P((cfp), RUBY_VM_END_CONTROL_FRAME(th)))
557 #define RUBY_VM_IFUNC_P(ptr) (BUILTIN_TYPE(ptr) == T_NODE)
558 #define RUBY_VM_NORMAL_ISEQ_P(ptr) \
559 (ptr && !RUBY_VM_IFUNC_P(ptr))
561 #define RUBY_VM_CLASS_SPECIAL_P(ptr) (((VALUE)(ptr)) & 0x02)
563 #define RUBY_VM_GET_BLOCK_PTR_IN_CFP(cfp) ((rb_block_t *)(&(cfp)->self))
564 #define RUBY_VM_GET_CFP_FROM_BLOCK_PTR(b) \
565 ((rb_control_frame_t *)((VALUE *)(b) - 5))
567 /* VM related object allocate functions */
568 VALUE rb_thread_alloc(VALUE klass);
569 VALUE rb_proc_alloc(VALUE klass);
572 extern void vm_stack_dump_raw(rb_thread_t *, rb_control_frame_t *);
573 #define SDR() vm_stack_dump_raw(GET_THREAD(), GET_THREAD()->cfp)
574 #define SDR2(cfp) vm_stack_dump_raw(GET_THREAD(), (cfp))
575 void rb_vm_bugreport(void);
578 /* functions about thread/vm execution */
580 VALUE rb_iseq_eval(VALUE iseqval);
581 VALUE rb_iseq_eval_main(VALUE iseqval);
582 void rb_enable_interrupt(void);
583 void rb_disable_interrupt(void);
584 int rb_thread_method_id_and_class(rb_thread_t *th, ID *idp, VALUE *klassp);
586 VALUE vm_invoke_proc(rb_thread_t *th, rb_proc_t *proc, VALUE self,
587 int argc, const VALUE *argv, rb_block_t *blockptr);
588 VALUE vm_make_proc(rb_thread_t *th, const rb_block_t *block, VALUE klass);
589 VALUE vm_make_env_object(rb_thread_t *th, rb_control_frame_t *cfp);
591 NOINLINE(void rb_gc_save_machine_context(rb_thread_t *));
593 #define sysstack_error GET_VM()->special_exceptions[ruby_error_sysstack]
597 #if RUBY_VM_THREAD_MODEL == 2
598 RUBY_EXTERN rb_thread_t *ruby_current_thread;
599 extern rb_vm_t *ruby_current_vm;
601 #define GET_VM() ruby_current_vm
602 #define GET_THREAD() ruby_current_thread
603 #define rb_thread_set_current_raw(th) (void)(ruby_current_thread = (th))
604 #define rb_thread_set_current(th) do { \
605 rb_thread_set_current_raw(th); \
606 th->vm->running_thread = th; \
610 #error "unsupported thread model"
613 #define RUBY_VM_SET_INTERRUPT(th) ((th)->interrupt_flag |= 0x02)
614 #define RUBY_VM_SET_TIMER_INTERRUPT(th) ((th)->interrupt_flag |= 0x01)
615 #define RUBY_VM_SET_FINALIZER_INTERRUPT(th) ((th)->interrupt_flag |= 0x04)
616 #define RUBY_VM_INTERRUPTED(th) ((th)->interrupt_flag & 0x02)
618 void rb_thread_execute_interrupts(rb_thread_t *);
620 #define RUBY_VM_CHECK_INTS_TH(th) do { \
621 if (UNLIKELY(th->interrupt_flag)) { \
622 rb_thread_execute_interrupts(th); \
626 #define RUBY_VM_CHECK_INTS() \
627 RUBY_VM_CHECK_INTS_TH(GET_THREAD())
631 exec_event_hooks(rb_event_hook_t *hook, rb_event_flag_t flag, VALUE self, ID id, VALUE klass)
633 if (self == rb_mRubyVMFrozenCore) return;
635 if (flag & hook->flag) {
636 (*hook->func)(flag, hook->data, self, id, klass);
642 #define EXEC_EVENT_HOOK(th, flag, self, id, klass) do { \
643 rb_event_flag_t wait_event__ = th->event_flags; \
644 if (UNLIKELY(wait_event__)) { \
645 if (wait_event__ & (flag | RUBY_EVENT_VM)) { \
646 VALUE self__ = (self), klass__ = (klass); \
648 if (wait_event__ & flag) { \
649 exec_event_hooks(th->event_hooks, flag, self__, id__, klass__); \
651 if (wait_event__ & RUBY_EVENT_VM) { \
652 exec_event_hooks(th->vm->event_hooks, flag, self__, id__, klass__); \
658 #endif /* RUBY_VM_CORE_H */