From: Armin R. <ar...@us...> - 2003-09-03 19:58:02
|
Update of /cvsroot/psyco/psyco/c/ivm In directory sc8-pr-cvs1:/tmp/cvs-serv20170/c/ivm Modified Files: iencoding.h iinitialize.h Added Files: idispatcher.c idispatcher.h iencoding.c iprocessor.c ipyencoding.c ipyencoding.h itiming.h ivm-insns.c ivm-insns.h Log Message: the virtual machine is up and running! --- NEW FILE: idispatcher.c --- #include "idispatcher.h" #include "../dispatcher.h" #include "../codemanager.h" #include "ipyencoding.h" /***************************************************************/ /*** the hard processor-dependent part of dispatching: ***/ /*** Unification. ***/ #define RUNTIME_STACK(v) getstack((v)->source) #define RUNTIME_STACK_NONE RunTime_StackNone struct dmove_s { PsycoObject* po; int original_stack_depth; char* usages; /* buffer: array of vinfo_t*, see ORIGINAL_VINFO() below */ int usages_size; code_t* code_origin; code_t* code_limit; code_t* code; /* only used by data_update_stack() */ CodeBufferObject* private_codebuf; }; static code_t* data_new_buffer(code_t* code, struct dmove_s* dm) { /* creates a new buffer containing a copy of the already-written code */ CodeBufferObject* codebuf; int codesize; if (dm->private_codebuf != NULL) { /* overflowing the regular (large) code buffer */ psyco_emergency_enlarge_buffer(&code, &dm->code_limit); return code; } else { /* overflowing the small buffer, start a new (regular) one */ codebuf = psyco_new_code_buffer(NULL, NULL, &dm->code_limit); codebuf->snapshot.fz_stuff.fz_stack_depth = dm->original_stack_depth; /* the new buffer should be at least as large as the old one */ codesize = code - dm->code_origin; if ((code_t*) codebuf->codestart + codesize > dm->code_limit) Py_FatalError("psyco: unexpected unify buffer overflow"); /* copy old code to new buffer */ memcpy(codebuf->codestart, dm->code_origin, codesize+POST_CODEBUFFER_SIZE); dm->private_codebuf = codebuf; #if PSYCO_DEBUG dm->code_origin = (code_t*) 0xCDCDCDCD; #endif return ((code_t*) codebuf->codestart) + codesize; } } #define ORIGINAL_VINFO(spos) (*(vinfo_t**)(dm->usages + ( \ extra_assert(0 <= (spos) && (spos) < dm->usages_size), \ (spos)))) static void data_original_table(vinfo_t* a, RunTimeSource bsource, struct dmove_s* dm) { /* called on each run-time vinfo_t in the FrozenPsycoObject. Record in the array dm->usages which vinfo_t is found at what position in the stack. Ignore the ones after dm->usages_size: they correspond to stack positions which will soon be deleted (because the stack will shrink). */ if (RUNTIME_STACK(a) < dm->usages_size) ORIGINAL_VINFO(RUNTIME_STACK(a)) = a; } static void data_update_stack(vinfo_t* a, RunTimeSource bsource, struct dmove_s* dm) { PsycoObject* po = dm->po; code_t* code = dm->code; long dststack = getstack(bsource); long srcstack = getstack(a->source); vinfo_t* overridden; RunTimeSource osrc; /* check for values passing from no-reference to reference */ if ((bsource & RunTime_NoRef) == 0) { /* destination has ref */ if ((a->source & RunTime_NoRef) == 0) /* source has ref too */ { /* remove the reference from 'a' because it now belongs to 'b' ('b->source' itself is in the frozen snapshot and must not be modified!) */ a->source = remove_rtref(a->source); } else { /* create a new reference for 'b'. Note that if the same 'a' is copied to several 'b's during data_update_stack() as is allowed by graph quotient detection in psyco_compatible(), then only the first copy will get the original reference owned by 'a' (if any) and for the following copies the following increfing code is executed as well. */ INSN_rt_push(a->source); INSN_incref(); } } /* 'a' must no longer own a reference at this point. The case of 'b' wanting no reference but 'a' having one is forbidden by psyco_compatible() because decrefing 'a' would potentially leave a freed pointer in 'b'. */ extra_assert(!has_rtref(a->source)); /* The operation below is: copy the value currently held by 'a' into the stack position 'dststack'. */ if (dststack == RUNTIME_STACK_NONE || dststack == srcstack) ; /* nothing to do */ else { /* is there already a pending value at 'dststack'? */ overridden = ORIGINAL_VINFO(dststack); if (overridden == NULL || RUNTIME_STACK(overridden) != dststack) goto can_save_only; /* no -- just save the new value to 'dststack'. The case RUNTIME_STACK(overridden) != dststack corresponds to a vinfo_t which has been moved elsewhere in the mean time. */ /* yes -- careful! We have to save the current value of 'dststack' before we can overwrite it. */ osrc = overridden->source; INSN_rt_push(osrc); osrc = set_rtstack_to_none(osrc); INSNPUSHED(1); overridden->source = set_rtstack_to(osrc, po->stack_depth); can_save_only: /* copy 'a' to 'dststack' */ INSN_rt_push(a->source); INSNPUSHED(1); INSN_rt_pop(bsource); INSNPOPPED(1); /* Now 'a' is at 'dststack' */ a->source = RunTime_New1(dststack, false, false); ORIGINAL_VINFO(dststack) = a; /* 'a' is now there */ if (code > dm->code_limit) /* oops, buffer overflow. Start a new buffer */ code = data_new_buffer(code, dm); } dm->code = code; } static code_t* data_free_unused(code_t* code, struct dmove_s* dm, vinfo_array_t* aa) { /* decref any object that would be present in 'po' but not at all in the snapshot. Note that it is uncommon that this function actually finds any unused object at all. */ int i = aa->count; while (i--) { vinfo_t* a = aa->items[i]; if (a != NULL) { if (has_rtref(a->source)) { PsycoObject* po = dm->po; code_t* saved_code; a->source = remove_rtref(a->source); saved_code = po->code; po->code = code; psyco_decref_rt(po, a); code = po->code; po->code = saved_code; if (code > dm->code_limit) /* oops, buffer overflow. Start a new buffer */ code = data_new_buffer(code, dm); } if (a->array != NullArray) code = data_free_unused(code, dm, a->array); } } return code; } DEFINEFN code_t* psyco_unify(PsycoObject* po, vcompatible_t* lastmatch, CodeBufferObject** target) { /* Update 'po' to match 'lastmatch', then jump to 'lastmatch'. */ struct dmove_s dm; code_t* code = po->code; CodeBufferObject* target_codebuf = lastmatch->matching; int sdepth = get_stack_depth(&target_codebuf->snapshot); extra_assert(lastmatch->diff == NullArray); /* unify with exact match only */ psyco_assert_coherent(po); dm.usages_size = sdepth + sizeof(vinfo_t**); dm.usages = (char*) PyMem_MALLOC(dm.usages_size); if (dm.usages == NULL) OUT_OF_MEMORY(); memset(dm.usages, 0, dm.usages_size); /* set to all NULL */ fz_find_runtimes(&po->vlocals, &target_codebuf->snapshot, (fz_find_fn) &data_original_table, &dm, false); dm.po = po; dm.original_stack_depth = po->stack_depth; dm.code_origin = code; dm.code_limit = po->codelimit == NULL ? code : po->codelimit; dm.private_codebuf = NULL; if (sdepth > po->stack_depth) { /* more items in the target stack (uncommon case). Let the stack grow. */ STACK_CORRECTION(sdepth - po->stack_depth); po->stack_depth = sdepth; } /* update the stack */ dm.code = code; fz_find_runtimes(&po->vlocals, &target_codebuf->snapshot, (fz_find_fn) &data_update_stack, &dm, true); code = dm.code; /* decref any object that would be present in 'po' but not at all in the snapshot (data_update_stack() has removed the 'ref' tag of all vinfo_ts it actually used from 'po') */ code = data_free_unused(code, &dm, &po->vlocals); /* done */ STACK_CORRECTION(sdepth - po->stack_depth); if (code > dm.code_limit) /* start a new buffer if we wrote past the end */ code = data_new_buffer(code, &dm); JUMP_TO((code_t*) target_codebuf->codestart); /* start a new buffer if the last JUMP_TO overflowed, but not if we had no room at all in the first place. */ if (code > dm.code_limit && po->codelimit != NULL) { /* Note that the JMP instruction emitted by JUMP_TO() is position-independent (a property of the vm) */ code = data_new_buffer(code, &dm); psyco_assert(code <= dm.code_limit); } PyMem_FREE(dm.usages); if (dm.private_codebuf == NULL) { Py_INCREF(target_codebuf); /* no new buffer created */ *target = target_codebuf; } else { SHRINK_CODE_BUFFER(dm.private_codebuf, code, "unify"); *target = dm.private_codebuf; /* add a jump from the original code buffer to the new one */ code = po->code; JUMP_TO((code_t*) dm.private_codebuf->codestart); dump_code_buffers(); } PsycoObject_Delete(po); return code; } --- NEW FILE: idispatcher.h --- /***************************************************************/ /*** Processor-specific routines for dispatcher.c ***/ /***************************************************************/ #ifndef _IDISPATCHER_H #define _IDISPATCHER_H #include "../vcompiler.h" #include "../codegen.h" #include "iencoding.h" #include "ivm-insns.h" #define NEED_STACK_FRAME_HACK 0 /***************************************************************/ /*** Unification ***/ /* idispatcher.c implements psyco_unify(), whose header is given in dispatcher.h. Conversely, dispatcher.c implements the following function which is declared here because it is really internal: */ typedef void (*fz_find_fn) (vinfo_t* a, RunTimeSource bsource, void* extra); EXTERNFN void fz_find_runtimes(vinfo_array_t* aa, FrozenPsycoObject* fpo, fz_find_fn callback, void* extra, bool clear); /***************************************************************/ /*** Promotion ***/ #define PROMOTION_FAST_COMMON_CASE 0 /* not implemented for ivm */ #define INTERNAL_PROMOTION_FIELDS /* nothing */ inline code_t* fix_fast_common_case(void* fs, long value, code_t* codeptr) { return codeptr; } inline void* ipromotion_finish(PsycoObject* po, vinfo_t* fix, void* do_promotion) { return psyco_call_code_builder(po, do_promotion, 0, fix->source); } /***************************************************************/ /*** Misc. ***/ inline void* conditional_jump_to(PsycoObject* po, code_t* target, condition_code_t condition) { word_t* arg = NULL; BEGIN_CODE switch (condition) { case CC_ALWAYS_FALSE: /* never jumps */ break; case CC_ALWAYS_TRUE: INSN_jumpfar(&arg); /* always jumps */ *arg = (word_t) target; break; default: INSN_rtcc_push(condition); INSN_jcondfar(&arg); *arg = (word_t) target; } END_CODE return arg; } inline void change_cond_jump_target(void* tag, code_t* newtarget) { word_t* arg = (word_t*)tag; *arg = (word_t) newtarget; } /* reserve a small buffer of code behind po->code in which conditional code can be stored. That code should only be executed if 'condition'. */ inline void* setup_conditional_code_bounds(PsycoObject* po, PsycoObject* po2, condition_code_t condition) { code_t* forward_distance_ptr; BEGIN_CODE INSN_rtcc_push(INVERT_CC(condition)); INSN_jcondnear(&forward_distance_ptr); po2->code = code; po2->codelimit = code + 255; END_CODE return forward_distance_ptr; } /* Backpatch the distance over which to skip the conditional code. */ inline void make_code_conditional(PsycoObject* po, code_t* codeend, condition_code_t condition, void* extra) { code_t* forward_distance_ptr = (code_t*) extra; code_t* code = codeend; int distance = INSN_CODE_LABEL() - po->code; po->code = code; extra_assert(0 <= distance && distance <= 255); *forward_distance_ptr = (code_t) distance; } #endif /* _IDISPATCHER_H */ --- NEW FILE: iencoding.c --- #include "iencoding.h" #include "ivm-insns.h" #include "../vcompiler.h" #include "../dispatcher.h" #include "../codegen.h" #include "../codemanager.h" #include "../Python/frames.h" /* building run-time values meaning "the top of stack" */ #define RunTime_TOS() RunTime_TOSF(false, false) #define RunTime_TOSF(ref, nonneg) RunTime_NewStack(po->stack_depth, ref, nonneg) /* building run-time values meaning "the nth item in the stack", where n=0 is the top */ #define RunTime_STACK(n) RunTime_STACKF(n, false, false) #define RunTime_STACKF(n, ref, nonneg) \ RunTime_NewStack(po->stack_depth - (n)*sizeof(long), ref, nonneg) DEFINEFN void* psyco_call_code_builder(PsycoObject* po, void* fn, int restore, RunTimeSource extraarg) { word_t* arg; code_t* code = po->code; /* the INSN_cbuildX() instructions call the given function and then jump to whatever address the function has returned. */ if (extraarg != SOURCE_DUMMY) { INSN_rt_push(extraarg); INSN_cbuild2(&arg); } else { INSN_cbuild1(&arg); } *arg = (word_t) fn; #if PSYCO_DEBUG /* add a zero to seperate code from data for ivmextract.py */ *code++ = 0; #endif /* make 'fs' point just after the end of the code, aligned */ ALIGN_NO_FILL(); return code; } DEFINEFN vinfo_t* psyco_call_psyco(PsycoObject* po, CodeBufferObject* codebuf, Source argsources[], int argcount, struct stack_frame_info_s* finfo) { word_t* arg; int i, initial_depth; Source* p; BEGIN_CODE ABOUT_TO_CALL_SUBFUNCTION(finfo); initial_depth = po->stack_depth; p = argsources; for (i=argcount; i--; p++) { INSN_rt_push(*p); INSNPUSHED(1); } INSN_vmcall(&arg); *arg = (word_t) codebuf->codestart; po->stack_depth = initial_depth; /* callee removes arguments */ RETURNED_FROM_SUBFUNCTION(); INSN_pushretval(); INSNPUSHED(1); END_CODE META_assertdepth(po->stack_depth); return generic_call_check(po, CfReturnRef|CfPyErrIfNull, bfunction_result(po, true)); } /***************************************************************/ /*** Memory reads and writes ***/ static void mem_access(PsycoObject* po, vinfo_t* nv_ptr, long offset, vinfo_t* rt_vindex, int size2) { BEGIN_CODE if (is_runtime(nv_ptr->source)) { INSN_rt_push(nv_ptr->source); if (offset) { INSN_immed(offset); INSN_addo(); INSN_pop(); } } else { offset += CompileTime_Get(nv_ptr->source)->value; INSN_immed(offset); } INSNPUSHED(1); if (rt_vindex != NULL) { INSN_rt_push(rt_vindex->source); if (size2 > 0) { INSN_immed(size2); INSN_lshift(); } INSN_addo(); INSN_pop(); } END_CODE } DEFINEFN vinfo_t* psyco_memory_read(PsycoObject* po, vinfo_t* nv_ptr, long offset, vinfo_t* rt_vindex, int size2, bool nonsigned) { mem_access(po, nv_ptr, offset, rt_vindex, size2); BEGIN_CODE switch (size2) { case 0: /* load 1 byte */ if (nonsigned) INSN_load1u(); else INSN_load1(); break; case 1: /* load 2 bytes */ if (nonsigned) INSN_load2u(); else INSN_load2(); break; default: /* load 4 bytes */ INSN_load4(); } END_CODE return vinfo_new(RunTime_TOS()); } DEFINEFN bool psyco_memory_write(PsycoObject* po, vinfo_t* nv_ptr, long offset, vinfo_t* rt_vindex, int size2, vinfo_t* value) { if (!compute_vinfo(value, po)) return false; mem_access(po, nv_ptr, offset, rt_vindex, size2); BEGIN_CODE INSN_nv_push(value->source); switch (size2) { case 0: /* store 1 byte */ INSN_store1(); break; case 1: /* store 2 bytes */ INSN_store2(); break; default: /* store 4 bytes */ INSN_store4(); } INSNPOPPED(1); END_CODE return true; } /*****************************************************************/ /*** Emit common instructions ***/ DEFINEFN condition_code_t bininstrcmp(PsycoObject* po, int base_py_op, vinfo_t* v1, vinfo_t* v2) { condition_code_t result = 0; vinfo_t* tmp; BEGIN_CODE /* the only operation with have is '<', so exchange v1 and v2 as needed */ switch (base_py_op & COMPARE_BASE_MASK) { case Py_NE: result = 1; /* fall through */ case Py_EQ: INSN_rt_push(v1->source); INSNPUSHED(1); INSN_rt_push(v2->source); INSN_cmpeq(); goto done; case Py_LE: result = 1; /* fall through */ case Py_GT: tmp=v1; v1=v2; v2=tmp; break; case Py_GE: result = 1; break; default: ; } INSN_rt_push(v1->source); INSNPUSHED(1); INSN_rt_push(v2->source); if (base_py_op & COMPARE_UNSIGNED) INSN_cmpltu(); else INSN_cmplt(); done: END_CODE return (condition_code_t) (po->stack_depth | result); } DEFINEFN vinfo_t* bininstrcond(PsycoObject* po, condition_code_t cc, long immed_true, long immed_false) { code_t* arg; BEGIN_CODE INSN_immed(immed_true); INSNPUSHED(1); INSN_rtcc_push(cc); INSN_jcondnear(&arg); INSN_pop(); INSN_immed(immed_false); *arg = INSN_CODE_LABEL() - (arg+sizeof(code_t)); END_CODE return vinfo_new(RunTime_TOSF(false, immed_true >= 0 && immed_false >= 0)); } #define DEFINE_BINARY_INSTRO(insn) \ DEFINEFN vinfo_t* bininstr##insn(PsycoObject* po, bool ovf, bool nonneg, \ vinfo_t* v1, vinfo_t* v2) { \ BEGIN_CODE \ INSN_nv_push(v1->source); INSNPUSHED(1); \ INSN_nv_push(v2->source); \ INSN_##insn##o(); \ END_CODE \ if (ovf) { \ INSNPUSHED(1); \ if (runtime_condition_f(po, po->stack_depth)) \ return NULL; /* if overflow */ \ INSNPOPPED(1); \ } \ BEGIN_CODE \ INSN_pop(); \ END_CODE \ return vinfo_new(RunTime_TOSF(false, nonneg)); \ } #define DEFINE_UNARY_INSTRO(insn) \ DEFINEFN vinfo_t* unaryinstr##insn(PsycoObject* po, bool ovf, bool nonneg, \ vinfo_t* v1) { \ BEGIN_CODE \ INSN_rt_push(v1->source); INSNPUSHED(1); \ INSN_##insn##o(); \ END_CODE \ if (ovf) { \ INSNPUSHED(1); \ if (runtime_condition_f(po, po->stack_depth)) \ return NULL; /* if overflow */ \ INSNPOPPED(1); \ } \ BEGIN_CODE \ INSN_pop(); \ END_CODE \ return vinfo_new(RunTime_TOSF(false, nonneg)); \ } #define DEFINE_BINARY_INSTR(insn) \ DEFINEFN vinfo_t* bininstr##insn(PsycoObject* po, bool nonneg, \ vinfo_t* v1, vinfo_t* v2) { \ BEGIN_CODE \ INSN_nv_push(v1->source); INSNPUSHED(1); \ INSN_nv_push(v2->source); \ INSN_##insn(); \ END_CODE \ return vinfo_new(RunTime_TOSF(false, nonneg)); \ } #define DEFINE_UNARY_INSTR(insn) \ DEFINEFN vinfo_t* unaryinstr##insn(PsycoObject* po, bool nonneg, \ vinfo_t* v1) { \ BEGIN_CODE \ INSN_rt_push(v1->source); INSNPUSHED(1); \ INSN_##insn(); \ END_CODE \ return vinfo_new(RunTime_TOSF(false, nonneg)); \ } DEFINE_BINARY_INSTRO(add) DEFINE_BINARY_INSTR(or ) DEFINE_BINARY_INSTR(and) DEFINE_BINARY_INSTRO(sub) DEFINE_BINARY_INSTR(xor) DEFINE_BINARY_INSTRO(mul) DEFINE_BINARY_INSTR(lshift) DEFINE_BINARY_INSTR(rshift) DEFINE_UNARY_INSTR(inv) DEFINE_UNARY_INSTRO(neg) DEFINE_UNARY_INSTRO(abs) DEFINEFN vinfo_t* bint_add_i(PsycoObject* po, vinfo_t* rt1, long value2, bool unsafe) { BEGIN_CODE NEED_CC(); INSN_rt_push(rt1->source); INSNPUSHED(1); INSN_immed(value2); INSN_addo(); INSN_pop(); END_CODE return vinfo_new(RunTime_TOSF(false, unsafe && value2>=0 && is_rtnonneg(rt1->source))); } DEFINEFN vinfo_t* bint_mul_i(PsycoObject* po, vinfo_t* v1, long value2, bool ovf) { BEGIN_CODE INSN_rt_push(v1->source); INSNPUSHED(1); INSN_immed(value2); INSN_mulo(); END_CODE if (ovf) { INSNPUSHED(1); if (runtime_condition_f(po, po->stack_depth)) return NULL; INSNPOPPED(1); } BEGIN_CODE INSN_pop(); END_CODE return vinfo_new(RunTime_TOSF(false, ovf && value2>=0 && is_rtnonneg(v1->source))); } #define GENERIC_SHIFT_BY(Insn, nonneg) \ { \ extra_assert(0 < counter && counter < LONG_BIT); \ BEGIN_CODE \ INSN_rt_push(v1->source); INSNPUSHED(1); \ INSN_immed(counter); \ Insn (); \ END_CODE \ return vinfo_new(RunTime_TOSF(false, nonneg)); \ } DEFINEFN vinfo_t* bint_lshift_i(PsycoObject* po, vinfo_t* v1, int counter) GENERIC_SHIFT_BY(INSN_lshift, false) DEFINEFN vinfo_t* bint_rshift_i(PsycoObject* po, vinfo_t* v1, int counter) GENERIC_SHIFT_BY(INSN_rshift, is_nonneg(v1->source)) DEFINEFN vinfo_t* bint_urshift_i(PsycoObject* po, vinfo_t* v1, int counter) GENERIC_SHIFT_BY(INSN_urshift, true) DEFINEFN condition_code_t bint_cmp_i(PsycoObject* po, int base_py_op, vinfo_t* rt1, long immed2) { condition_code_t result = 0; BEGIN_CODE /* the only operation with have is '<', so exchange v1 and v2 as needed */ switch (base_py_op & COMPARE_BASE_MASK) { case Py_NE: result = 1; /* fall through */ case Py_EQ: INSN_rt_push(rt1->source); INSNPUSHED(1); INSN_immed(immed2); INSN_cmpeq(); break; case Py_LE: result = 1; /* fall through */ case Py_GT: INSN_immed(immed2); INSNPUSHED(1); /* reversed arguments */ INSN_rt_push(rt1->source); if (base_py_op & COMPARE_UNSIGNED) INSN_cmpltu(); else INSN_cmplt(); break; case Py_GE: result = 1; /* fall through */ default: INSN_rt_push(rt1->source); INSNPUSHED(1); INSN_immed(immed2); if (base_py_op & COMPARE_UNSIGNED) INSN_cmpltu(); else INSN_cmplt(); } END_CODE return (condition_code_t) (po->stack_depth | result); } DEFINEFN vinfo_t* bfunction_result(PsycoObject* po, bool ref) { return vinfo_new(RunTime_TOSF(ref, false)); } DEFINEFN vinfo_t* make_runtime_copy(PsycoObject* po, vinfo_t* v) { if (!compute_vinfo(v, po)) return NULL; BEGIN_CODE INSN_nv_push(v->source); INSNPUSHED(1); END_CODE return vinfo_new(RunTime_TOSF(false, is_nonneg(v->source))); } --- NEW FILE: iprocessor.c --- #include "../processor.h" #include "../codemanager.h" #include "../cstruct.h" #include "../blockalloc.h" #include "../Python/frames.h" #include "ivm-insns.h" /* We distinguish between different types of interpreters: * - the most compatible one is switch()-based * - a GCC extension allows absolute threaded jumps * - a more recent GCC extension allows relative threaded jumps * (not sure that it is better than the previous one, though; * will need some tests) */ #ifdef __GNUC__ # define VM_THREADED_INTERPRETER 1 # define VM_RELATIVE_JUMPS 0 /* XXX check if this is really better */ //# define VM_RELATIVE_JUMPS (__GNUC__>3||(__GNUC__==3&&__GNUC_MINOR__>=2)) #else # define VM_THREADED_INTERPRETER 0 #endif /***************************************************************/ /*** Stack of the virtual machine ***/ /* Note that the stack grows downwards. */ /* See ivm-insns.h for customizable parameters. */ /* XXX loafy stack overflow checking ahead */ #define FINFO_STOP ((struct stack_frame_info_s*) 1) typedef struct vmstackframe_s vmstackframe_t; struct vmstackframe_s { struct stack_frame_info_s* finfo; /* describes the *called* (i.e. next frame's) function (this is for compatibility with the hacks needed for real machine code */ char* limit; char* sp; /* stack pointer: limit<=sp<=origin */ char* origin; vmstackframe_t* prevframe; /* the previous frame */ vmstackframe_t* nextframe; /* the next more recent frame */ }; BLOCKALLOC_STATIC(vmstackframe, vmstackframe_t, 256) typedef struct { PyCStruct_HEAD vmstackframe_t* topframe; /* most recent stack frame */ } PyVMStack; inline PyVMStack* vm_get_stack(PyObject* tdict) { PyVMStack* st = (PyVMStack*) PyDict_GetItem(tdict, Py_None); if (st == NULL) { st = PyCStruct_NEW(PyVMStack, NULL); st->topframe = NULL; if (PyDict_SetItem(tdict, Py_None, (PyObject*) st)) OUT_OF_MEMORY(); } return st; } /***************************************************************/ /*** Virtual machine interpreter ***/ #define bytecode_nextopcode() (*nextip++) #define bytecode_nextcode_t() (*nextip++) #define bytecode_nextword_t() (tmp = *(word_t*) nextip, \ nextip += sizeof(word_t), \ tmp) #define bytecode_nextchar() ((char)(*nextip++)) #define bytecode_next(T) bytecode_next##T() #define stack_nth(N) sp[N] #define stack_shift(N) (sp += (N), \ extra_assert((char*)sp >= frame->limit)) #define stack_shift_pos(N) (sp += (N)) #define stack_savesp() (frame->sp = (char*) sp) #define macro_args /* nothing */ #define macro_noarg () /* macro call with no argument */ inline long abso(long a) { return a < 0 ? -a : a; } #define ovf_checkabso(a) (a == LONG_MIN) #define ovf_checknego(a) (a == LONG_MIN) #define ovf_checkaddo(a, b) (((a+b)^a) < 0 && (a^b) >= 0) #define ovf_checksubo(a, b) (((a-b)^a) < 0 && ((a-b)^b) >= 0) #define ovf_checkmulo(a, b) psyco_int_mul_ovf(a, b) #define ovf_check(INSN, ARGS) ovf_check##INSN ARGS #define impl_stackgrow(sz) if ((char*)sp - frame->limit < \ (sz) + VM_STACK_SIZE_MARGIN) \ sp = vm_stackgrow(frame, sp) #define impl_jcond(test, newip) if (test) nextip = (code_t*) newip #define impl_jump(newip) nextip = (code_t*) newip typedef code_t* (*cbuild1_fn) (char*); typedef code_t* (*cbuild2_fn) (char*, word_t extra); #define impl_cbuild1(fn) stack_savesp(); \ nextip = ((cbuild1_fn) fn) ( \ (char*)((((long)nextip) + PSYCO_DEBUG + \ ALIGN_CODE_MASK)&~ALIGN_CODE_MASK)) #define impl_cbuild2(fn, extra) stack_savesp(); \ nextip = ((cbuild2_fn) fn) ( \ (char*)((((long)nextip) + PSYCO_DEBUG + \ ALIGN_CODE_MASK)&~ALIGN_CODE_MASK), \ extra) #define impl_incref(o) Py_INCREF((PyObject*) o) #define impl_decref(o) stack_savesp(); Py_DECREF((PyObject*) o) #define impl_decrefnz(o) ((PyObject*) o)->ob_refcnt-- /* implemented in pycompiler.c */ EXTERNFN void cimpl_finalize_frame_locals(PyObject*, PyObject*, PyObject*); #define impl_exitframe(tb, val, exc) stack_savesp(); \ if (exc) cimpl_finalize_frame_locals( \ (PyObject*) exc, \ (PyObject*) val, \ (PyObject*) tb) #define impl_pyenter(finfo) frame = vm_pyenter(vmst, frame, finfo, sp) #define impl_pyleave frame = vm_pyleave(vmst, frame, sp); \ sp = (word_t*) frame->sp; /* XXX hack! We abuse the fact that frame->sp is not completely in sync with the local sp (this is the case for optimization purposes). When impl_vmcall() is called, frame->sp still has the value it had at the last impl_pyenter() (see iencoding.c:psyco_call_psyco()). */ #define impl_vmcall(target) (tmp=(word_t) nextip, \ nextip=(code_t*) target, \ frame->origin = frame->sp, \ tmp) #define impl_ret(retaddr) if (retaddr == 0) { \ return retval; \ } else { \ nextip = (code_t*) retaddr; \ } /* XXX divide the stack in separately-growable blocks across INSN_vmcall() */ typedef word_t (*ccalled_fn_t_0) (void); typedef word_t (*ccalled_fn_t_1) (word_t); typedef word_t (*ccalled_fn_t_2) (word_t,word_t); typedef word_t (*ccalled_fn_t_3) (word_t,word_t,word_t); typedef word_t (*ccalled_fn_t_4) (word_t,word_t,word_t,word_t); typedef word_t (*ccalled_fn_t_5) (word_t,word_t,word_t,word_t,word_t); typedef word_t (*ccalled_fn_t_6) (word_t,word_t,word_t,word_t,word_t,word_t); typedef word_t (*ccalled_fn_t_7) (word_t,word_t,word_t,word_t,word_t,word_t,word_t); #define impl_ccall(nbargs, fn, args) (stack_savesp(), \ (((ccalled_fn_t_##nbargs)(fn)) args)) #define impl_checkdict(dict, key, result, target, index) do { \ PyDictObject* d = (PyDictObject*) dict; \ if (d->ma_mask < (unsigned)index || \ d->ma_table[index].me_key != (PyObject*)key || \ d->ma_table[index].me_value != (PyObject*)result) \ nextip = (code_t*) target; \ } while (0) inline vmstackframe_t* vm_pyenter(PyVMStack* vmst, vmstackframe_t* frame, word_t finfo, word_t* currentsp) { vmstackframe_t* top = psyco_llalloc_vmstackframe(); top->finfo = FINFO_STOP; top->limit = frame->limit; top->sp = frame->sp = (char*) currentsp; top->origin = frame->origin; top->prevframe = frame; top->nextframe = NULL; frame->finfo = (struct stack_frame_info_s*) finfo; frame->nextframe = top; vmst->topframe = top; return top; } inline vmstackframe_t* vm_pyleave(PyVMStack* vmst, vmstackframe_t* top, word_t* currentsp) { vmstackframe_t* prevtop = top->prevframe; prevtop->nextframe = NULL; if (prevtop->limit != top->limit) { /* only when leaving an non-inlined subfunction. Then prevtop->sp already contains the equivalent pointer to restore. This only works if the pyleave instruction is immediately after vmcall, because it assumes that the stack has the same depth as when pyenter was called (which is false when pyleave is used for an inlined subfunction). */ PyMem_Free(top->limit); } else { prevtop->sp = (char*) currentsp; } vmst->topframe = prevtop; psyco_llfree_vmstackframe(top); return prevtop; } static word_t* vm_stackgrow(vmstackframe_t* frame, word_t* currentsp) { /* enlarge the stack of the topmost frame 'frame' and all the previous frames which share exactly the same stack */ vmstackframe_t* f; char* currentlimit = frame->limit; char* currentorigin = frame->origin; char* newsp; size_t cursize = currentorigin - (char*)currentsp; size_t newsize = cursize + VM_EXTRA_STACK_SIZE+2*VM_STACK_SIZE_MARGIN-1; newsize &= -VM_STACK_SIZE_MARGIN; frame->limit = PyMem_Malloc(newsize); if (frame->limit == NULL) OUT_OF_MEMORY(); frame->origin = frame->limit + newsize; newsp = frame->origin - cursize; memcpy(newsp, currentsp, cursize); for (f = frame->prevframe; f != NULL && f->limit == currentlimit; f = f->prevframe) { if (f->origin == currentorigin) { /* exactly the same stack, fix it too */ f->limit = frame->limit; f->origin = frame->origin; f->sp = newsp; } else { /* previous frame has a larger stack starting from the same position, which means it is from a parent function -- the child function's stack is smaller because it does not contain everything past the input arguments. In this case there is no old stack to free because the old stack is still in use. */ return (word_t*) newsp; } } /* free old stack */ PyMem_Free(currentlimit); return (word_t*) newsp; } /* on register-limited architectures it may help a little bit to force these local variables in registers, as the compiler may think it would be better not to. */ #if defined(__GNUC__) && !PSYCO_DEBUG # ifdef __i386__ # define F_SPREG asm("esi") # define F_NEXTIPREG asm("edi") # endif #endif #ifndef F_ACCUMREG # define F_ACCUMREG /* nothing */ #endif #ifndef F_SPREG # define F_SPREG /* nothing */ #endif #ifndef F_NEXTIPREG # define F_NEXTIPREG /* nothing */ #endif static word_t vm_interpreter_main_loop(PyVMStack* vmst) { /* virtual machine "registers" */ register word_t accum F_ACCUMREG; /* 1st stack item, for optimization */ register word_t* sp F_SPREG; /* stack pointer */ register code_t* nextip F_NEXTIPREG; /* next instruction pointer */ word_t retval = 0; /* return value, when known */ word_t tmp; vmstackframe_t* frame = vmst->topframe; /* initialization */ nextip = (code_t*) frame->limit; /* hack from psyco_processor_run() */ sp = (word_t*) frame->sp; accum = 0xCDCDCDCD; /* unused */ /* Let's loop! */ # if VM_THREADED_INTERPRETER { # if VM_RELATIVE_JUMPS # include "prolog/insns-threaded-rel.i" # else # include "prolog/insns-threaded.i" # endif } # else while (1) { switch (bytecode_nextopcode()) { # include "prolog/insns-switch.i" default: psyco_fatal_msg("invalid vm opcode"); } } # endif } /***************************************************************/ /*** Virtual machine entry point ***/ #define VM_ENOUGH_STACK \ (top->origin - top->limit >= 4*sizeof(long)*argc + \ VM_INITIAL_MINIMAL_STACK_SIZE + VM_STACK_SIZE_MARGIN) DEFINEFN PyObject* psyco_processor_run(CodeBufferObject* codebuf, long initial_stack[], struct stack_frame_info_s*** finfo, PyObject* tdict) { PyObject* result; PyVMStack* vmst = vm_get_stack(tdict); vmstackframe_t* prevtop = vmst->topframe; vmstackframe_t* top = psyco_llalloc_vmstackframe(); *finfo = &top->finfo; top->finfo = FINFO_STOP; top->prevframe = prevtop; top->nextframe = NULL; if (prevtop) { top->limit = prevtop->limit; top->origin = prevtop->sp; /* start using the stack from the prevtop's current sp */ } else { top->limit = NULL; top->origin = NULL; } /* to store the incoming arguments on the stack, the "cleanest" solution seems to be to build a temporary pseudo-code. The not-so-clean hack is to abuse the stack to write this code. */ { int argc = RUN_ARGC(codebuf); word_t* arg; code_t* code; if (!VM_ENOUGH_STACK) { top->limit = PyMem_Malloc(VM_STACK_BLOCK); if (!top->limit) OUT_OF_MEMORY(); top->origin = top->limit + VM_STACK_BLOCK; extra_assert(VM_ENOUGH_STACK); } top->sp = top->origin; code = (code_t*) top->limit; INIT_CODE_EMISSION(code); while (argc) { /* incoming arguments */ long argvalue = initial_stack[--argc]; INSN_immed(argvalue); } INSN_immed(0); /* return address. Special value 0 means "leave the interpreter main loop" */ INSN_jumpfar(&arg); *arg = (word_t) codebuf->codestart; } vmst->topframe = top; result = (PyObject*) vm_interpreter_main_loop(vmst); vmst->topframe = prevtop; /* restore the stack */ if (prevtop == NULL || top->limit != prevtop->limit) { PyMem_Free(top->limit); } psyco_llfree_vmstackframe(top); return result; } /***************************************************************/ /*** Misc ***/ static struct stack_frame_info_s* finfo_stop = FINFO_STOP; DEFINEFN struct stack_frame_info_s** psyco_next_stack_frame(struct stack_frame_info_s** finfo) { vmstackframe_t* frame = (vmstackframe_t*) finfo; extra_assert(finfo == &frame->finfo); if (frame->nextframe == NULL) return &finfo_stop; else return &frame->nextframe->finfo; } /* check for signed integer multiplication overflow */ /* code shamelessly ripped off Python's intobject.c */ static char python_style_mul_ovf(long a, long b) { long longprod; /* a*b in native long arithmetic */ double doubled_longprod; /* (double)longprod */ double doubleprod; /* (double)a * (double)b */ longprod = a * b; doubleprod = (double)a * (double)b; doubled_longprod = (double)longprod; /* Fast path for normal case: small multiplicands, and no info is lost in either method. */ if (doubled_longprod == doubleprod) return false; /* no overflow */ /* Somebody somewhere lost info. Close enough, or way off? Note that a != 0 and b != 0 (else doubled_longprod == doubleprod == 0). The difference either is or isn't significant compared to the true value (of which doubleprod is a good approximation). */ { const double diff = doubled_longprod - doubleprod; const double absdiff = diff >= 0.0 ? diff : -diff; const double absprod = doubleprod >= 0.0 ? doubleprod : -doubleprod; /* absdiff/absprod <= 1/32 iff 32 * absdiff <= absprod -- 5 good bits is "close enough" */ return !(32.0 * absdiff <= absprod); } } DEFINEVAR char (*psyco_int_mul_ovf) (long a, long b) = &python_style_mul_ovf; /* don't look */ static long hacky_call_var(void* c_func, int argcount, long arguments[]) { switch (argcount) { case 0: return ((ccalled_fn_t_0) c_func) (); case 1: return ((ccalled_fn_t_1) c_func) (arguments[0]); case 2: return ((ccalled_fn_t_2) c_func) (arguments[0], arguments[1]); case 3: return ((ccalled_fn_t_3) c_func) (arguments[0], arguments[1], arguments[2]); case 4: return ((ccalled_fn_t_4) c_func) (arguments[0], arguments[1], arguments[2], arguments[3]); case 5: return ((ccalled_fn_t_5) c_func) (arguments[0], arguments[1], arguments[2], arguments[3], arguments[4]); case 6: return ((ccalled_fn_t_6) c_func) (arguments[0], arguments[1], arguments[2], arguments[3], arguments[4], arguments[5]); case 7: return ((ccalled_fn_t_7) c_func) (arguments[0], arguments[1], arguments[2], arguments[3], arguments[4], arguments[5], arguments[6]); default: psyco_fatal_msg("too many arguments to C function call"); } return 0; } DEFINEVAR long (*psyco_call_var) (void* c_func, int argcount, long arguments[]) = &hacky_call_var; --- NEW FILE: ipyencoding.c --- #include "ipyencoding.h" #include "../pycodegen.h" DEFINEFN void decref_create_new_ref(PsycoObject* po, vinfo_t* w) { psyco_incref_nv(po, w); } DEFINEFN bool decref_create_new_lastref(PsycoObject* po, vinfo_t* w) { bool could_eat = eat_reference(w); if (!could_eat) { /* in this case we must Py_INCREF() the object */ psyco_incref_nv(po, w); } return could_eat; } --- NEW FILE: ipyencoding.h --- /***************************************************************/ /*** Processor- and language-dependent code producers ***/ /***************************************************************/ #ifndef _IPYENCODING_H #define _IPYENCODING_H #include "iencoding.h" #include "ivm-insns.h" /* See comments in i386/ipyencoding.h about these functions */ inline void dictitem_check_change(PsycoObject* po, code_t* onchange_target, PyDictObject* dict, PyDictEntry* ep) { int index = ep - dict->ma_table; PyObject* key = ep->me_key; PyObject* result = ep->me_value; word_t* arg1; word_t* arg2; word_t* arg3; word_t* arg4; word_t* arg5; Py_INCREF(key); /* XXX these become immortal */ Py_INCREF(result); /* XXX */ BEGIN_CODE /* this special instruction quickly checks that the same object is still in place in the dictionary */ INSN_checkdict(&arg1, &arg2, &arg3, &arg4, &arg5); *arg1 = (word_t) dict; *arg2 = (word_t) key; *arg3 = (word_t) result; *arg4 = (word_t) onchange_target; *arg5 = index; extra_assert(arg4 == ((word_t*)code) - 2); extra_assert(arg5 == ((word_t*)code) - 1); END_CODE } inline code_t* dictitem_update_nochange(code_t* originalmacrocode, PyDictObject* dict, PyDictEntry* new_ep) { int index = new_ep - dict->ma_table; word_t* arg5 = ((word_t*)originalmacrocode) - 1; *arg5 = index; return originalmacrocode; } inline void dictitem_update_jump(code_t* originalmacrocode, code_t* target) { word_t* arg4 = ((word_t*)originalmacrocode) - 2; word_t* arg5 = ((word_t*)originalmacrocode) - 1; *arg4 = (word_t) target; *arg5 = (word_t) -1; /* jump always -- we effectively changed the checkdict instruction into an unconditional jump */ } inline void psyco_incref_nv(PsycoObject* po, vinfo_t* v) { BEGIN_CODE INSN_nv_push(v->source); INSN_incref(); END_CODE } inline void psyco_incref_rt(PsycoObject* po, vinfo_t* v) { BEGIN_CODE INSN_rt_push(v->source); INSN_incref(); END_CODE } inline void psyco_decref_rt(PsycoObject* po, vinfo_t* v) { BEGIN_CODE INSN_rt_push(v->source); INSN_decref(); END_CODE } inline void psyco_decref_c(PsycoObject* po, PyObject* o) { word_t* arg; BEGIN_CODE INSN_decrefnz(&arg); *arg = (word_t) o; END_CODE } EXTERNFN void decref_create_new_ref(PsycoObject* po, vinfo_t* w); EXTERNFN bool decref_create_new_lastref(PsycoObject* po, vinfo_t* w); /* called by psyco_emit_header() */ #define INITIALIZE_FRAME_LOCALS(nframelocal) do { \ STACK_CORRECTION(sizeof(long)*((nframelocal)-1)); \ INSN_immed(0); /* f_exc_type, initially NULL */ \ } while (0) /* called by psyco_finish_return() */ #define WRITE_FRAME_EPILOGUE(retval, nframelocal) do { \ /* load the return value into 'flag' -- little abuse here :-) */ \ if (retval != SOURCE_DUMMY) { \ INSN_nv_push(retval); \ INSN_retval(); \ } \ if (nframelocal > 0) \ { \ /* psyco_emit_header() was used; first clear the stack only up to and not \ including the frame-local data */ \ int framelocpos = getstack(LOC_CONTINUATION->array->items[0]->source); \ STACK_CORRECTION(framelocpos - po->stack_depth); \ po->stack_depth = framelocpos; \ \ /* perform Python-specific cleanup */ \ INSN_exitframe(); \ INSNPOPPED(3); \ } \ } while (0) #endif /* _IPYENCODING_H */ --- NEW FILE: itiming.h --- /***************************************************************/ /*** Measuring processor time ***/ /***************************************************************/ /* Only implemented for processor-specific ways to measure time spent in Python, so it does not apply for the virtual machine. Psyco's profilers will revert to clock(). */ --- NEW FILE: ivm-insns.c --- #include "ivm-insns.h" #define setlatestopcode(opcode) (*code = (opcode)) #define INSN_EMIT_opcode(opcode) (*code++ = (opcode)) #define INSN_EMIT_modified_opcode(opcode, totalargs) \ (code[-(totalargs)-1] = (opcode)) #define bytecode_size(T) sizeof(T) #define INSN_EMIT_void(arg) do { /*nothing*/ } while (0) #define INSN_EMIT_byte(arg) (*code++ = (code_t)(arg)) #define INSN_EMIT_char(arg) (*code++ = (code_t)(arg)) #define INSN_EMIT_int(arg) (*(int*)code = (int)(arg), code += sizeof(int)) #define INSN_EMIT_word_t(arg) (*(word_t*)code=(word_t)(arg),code+=sizeof(word_t)) #define INSN_EMIT_placeholder_byte(ppbyte) (*(ppbyte)=code++) #define INSN_EMIT_placeholder_long(ppword) (*(ppword)=(word_t*)code, \ code+=sizeof(word_t)) #include "prolog/insns-igen.i" --- NEW FILE: ivm-insns.h --- /***************************************************************/ /*** Link to the Prolog-generated instruction producers ***/ /***************************************************************/ #ifndef _IVMINSNS_H #define _IVMINSNS_H #include "../vcompiler.h" #ifndef META_ASSERT_DEPTH # define META_ASSERT_DEPTH 0 #endif #ifndef VM_STRESS_STACK # define VM_STRESS_STACK 0 #endif /* Some tunable run-time virtual machine parameters. Keep in mind that each thread has its own VM stack. */ #if !VM_STRESS_STACK # define VM_INITIAL_MINIMAL_STACK_SIZE 4096 # define VM_EXTRA_STACK_SIZE 8192 # define VM_STACK_SIZE_MARGIN 2048 /* power of 2 */ # define VM_STACK_BLOCK 16384 #else # define VM_INITIAL_MINIMAL_STACK_SIZE 512 # define VM_EXTRA_STACK_SIZE 512 # define VM_STACK_SIZE_MARGIN 1024 /* power of 2 */ # define VM_STACK_BLOCK 2048 #endif /* The virtual machine uses opcode compression: some sequences of opcodes can be compressed into a single opcode which expects all the arguments of the individual opcodes. A single, slightly more complicated opcode is typically much faster to interpret than several simpler opcodes. Each INSN_xxx() macro detects if the most recently generated opcode can be combined with the new one. For this purpose, we store an extra byte at '*code' which is the most recently emitted opcode. In other words, 'code' points (as normally) just past the most recently written byte of the vm bytecode, but during code generation this bytecode is immediately following by an extra byte which is a copy of the most recently emitted opcode instruction. (This is necessary: the most recent opcode instruction starts just a few bytes before '*code', but we don't know how many exactly -- it depends on the number of arguments.) */ #define LATEST_OPCODE (extra_assert(*code <= LAST_DEFINED_OPCODE), *code) #define INIT_CODE_EMISSION(code) (*(code) = 0) #define POST_CODEBUFFER_SIZE 1 /* byte for the LATEST_OPCODE */ #define INSN_EMIT_macro_opcode(opcode) (*code++ = (opcode), \ *code = (opcode)) #define INSN_CODE_LABEL() (INIT_CODE_EMISSION(code), code) typedef long word_t; #include "prolog/insns-igen-h.i" /* stack handling */ #define INSNPOPPED(N) (po->stack_depth -= (N)*sizeof(long)) #define INSNPUSHED(N) (po->stack_depth += (N)*sizeof(long)) /* meta-instructions */ #define INSN_nv_push(nvsource) do { \ if (is_compiletime(nvsource)) \ INSN_immed(CompileTime_Get(nvsource)->value); \ else \ INSN_rt_push(nvsource); \ } while (0) #define INSN_rt_push(rtsource) INSN_s_push(CURRENT_STACK_POSITION(rtsource)) #define INSN_rt_pop(rtsource) INSN_s_pop(CURRENT_STACK_POSITION(rtsource)) #define INSN_rtcc_push(cc) do { \ extra_assert((cc) < CC_TOTAL); \ INSN_s_push((po->stack_depth - (cc) + 1) / sizeof(long)); \ if ((cc) & 1) \ INSN_not(); \ } while (0) #if META_ASSERT_DEPTH # define META_assertdepth(x) BEGIN_CODE INSN_assertdepth(x); END_CODE #else # define META_assertdepth(x) /* nothing */ #endif #endif /* _IVMINSNS_H */ Index: iencoding.h =================================================================== RCS file: /cvsroot/psyco/psyco/c/ivm/iencoding.h,v retrieving revision 1.1 retrieving revision 1.2 diff -C2 -d -r1.1 -r1.2 *** iencoding.h 6 May 2003 16:43:14 -0000 1.1 --- iencoding.h 3 Sep 2003 19:57:57 -0000 1.2 *************** *** 8,23 **** #include "../psyco.h" ! #include "ivm-insns.h" ! #define REG_TOTAL 0 /* the virtual machine has only a stack */ ! typedef enum { ! CC_TOS = 0, /* flag is on the top of the stack */ ! CC_NEG_TOS = 1, /* negation of CC_TOS */ ! #define CC_TOTAL 2 ! CC_ALWAYS_FALSE = 2, /* pseudo condition codes for known outcomes */ ! CC_ALWAYS_TRUE = 3, ! CC_ERROR = -1 } condition_code_t; --- 8,32 ---- #include "../psyco.h" ! #define MACHINE_CODE_FORMAT "ivm" ! #define HAVE_FP_FN_CALLS 0 ! #define REG_TOTAL 0 /* the virtual machine has only a stack */ ! /* the 'flags' condition codes are stored on the stack as well, ! which makes our vm very unprocessor-like. We abuse condition_code_t ! to actually hold the stack position where the 'current flags' are ! stored. If moreover the last bit of condition_code_t is set, then ! the 'flag' is actually the negation of the value in the stack. */ ! typedef int condition_code_t; ! #define CC_TOTAL RunTime_StackMax ! #define HAVE_CCREG 0 ! ! /* pseudo condition codes for known outcomes */ ! #define CC_ALWAYS_FALSE (CC_TOTAL) ! #define CC_ALWAYS_TRUE (CC_TOTAL|1) ! #define CC_ERROR (-1) ! ! #define INVERT_CC(cc) ((condition_code_t)((int)(cc) ^ 1)) *************** *** 25,45 **** #define PROCESSOR_PSYCOOBJECT_FIELDS \ int stack_depth; /* the size of data currently pushed in the stack */ \ ! vinfo_t* ccreg; /* processor condition codes (aka flags) */ /* release a run-time vinfo_t */ ! #define RTVINFO_RELEASE(rtsource) do { \ ! /* pop an item off the stack only if it is close to the top */ \ ! switch (current_stack_position(rtsource)) { \ ! case 0: \ ! insn_pop(); \ ! break; \ ! case 1: \ ! insn_pop2nd(); \ ! break; \ ! default: \ ! break; /* not removed */ \ ! } \ ! } while (0) /* move a run-time vinfo_t */ --- 34,86 ---- #define PROCESSOR_PSYCOOBJECT_FIELDS \ int stack_depth; /* the size of data currently pushed in the stack */ \ ! int minimal_stack_size; /* total stack size that we are sure about */ ! #define INIT_PROCESSOR_PSYCOOBJECT(po) \ ! ((po)->minimal_stack_size = VM_INITIAL_MINIMAL_STACK_SIZE) + #define PROCESSOR_FROZENOBJECT_FIELDS \ + unsigned short minimal_extra_stack_words; /* ~= minimal_stack_size */ + #define SAVE_PROCESSOR_FROZENOBJECT(fpo, po) do { \ + int extra_stack_words = ((po)->minimal_stack_size - \ + (po)->stack_depth) / sizeof(long); \ + if (extra_stack_words < 0) \ + extra_stack_words = 0; \ + else if (extra_stack_words > 0xFFFF) \ + extra_stack_words = 0xFFFF; \ + (fpo)->minimal_extra_stack_words = extra_stack_words; \ + } while (0) + #define RESTORE_PROCESSOR_FROZENOBJECT(fpo, po) do { \ + (po)->minimal_stack_size = (po)->stack_depth + \ + (fpo)->minimal_extra_stack_words * sizeof(long); \ + } while (0) + + #define CHECK_STACK_SPACE() do { \ + if (po->stack_depth >= po->minimal_stack_size) { \ + BEGIN_CODE \ + INSN_stackgrow(); \ + END_CODE \ + po->minimal_stack_size = po->stack_depth + VM_EXTRA_STACK_SIZE; \ + } \ + META_assertdepth(po->stack_depth); \ + } while(0) + + + #define CURRENT_STACK_POSITION(rtsource) ( \ + (po->stack_depth - getstack(rtsource)) / sizeof(long)) /* release a run-time vinfo_t */ ! /* #define RTVINFO_RELEASE(rtsource) do { \ */ ! /* // pop an item off the stack only if it is close to the top \ */ ! /* switch (CURRENT_STACK_POSITION(rtsource)) { \ */ ! /* case 0: \ */ ! /* INSN_pop(); INSN_POPPED(1); \ */ ! /* break; \ */ ! /* case 1: \ */ ! /* INSN_pop2nd(); ??? \ */ ! /* break; \ */ ! /* default: \ */ ! /* break; // not removed \ */ ! /* } \ */ ! /* } while (0) */ ! #define RTVINFO_RELEASE(rtsource) do { /* nothing */ } while (0) /* move a run-time vinfo_t */ *************** *** 47,54 **** /* for PsycoObject_Duplicate() */ ! #define DUPLICATE_PROCESSOR(result, po) do { \ ! if (po->ccreg != NULL) \ ! result->ccreg = po->ccreg->tmp; \ ! result->stack_depth = po->stack_depth; \ } while (0) --- 88,94 ---- /* for PsycoObject_Duplicate() */ ! #define DUPLICATE_PROCESSOR(result, po) do { \ ! result->stack_depth = po->stack_depth; \ ! result->minimal_stack_size = po->minimal_stack_size; \ } while (0) *************** *** 56,67 **** #define RTVINFO_CHECKED(po, found) do { /*nothing*/ } while (0) /***************************************************************/ /*** some macro for code emission ***/ ! #define ALIGN_PAD_CODE_PTR() do { /*nothing*/ } while (0) ! #define ALIGN_WITH_BYTE(byte) do { /*nothing*/ } while (0) #define ALIGN_WITH_NOP() do { /*nothing*/ } while (0) ! #define ALIGN_NO_FILL() do { /*nothing*/ } while (0) --- 96,256 ---- #define RTVINFO_CHECKED(po, found) do { /*nothing*/ } while (0) + #define ABOUT_TO_CALL_SUBFUNCTION(finfo) do { \ + word_t* _arg; \ + INSN_pyenter(&_arg); \ + *_arg = (word_t)(finfo); \ + } while (0) + #define RETURNED_FROM_SUBFUNCTION() do { \ + INSN_pyleave(); \ + } while (0) + + + /*****************************************************************/ + /*** Emit common instructions ***/ + + #define EXTERN_BINARY_INSTRO(insn) \ + EXTERNFN vinfo_t* bininstr##insn(PsycoObject* po, bool ovf, bool nonneg, \ + vinfo_t* v1, vinfo_t* v2); + #define EXTERN_UNARY_INSTRO(insn) \ + EXTERNFN vinfo_t* unaryinstr##insn(PsycoObject* po, bool ovf, bool nonneg, \ + vinfo_t* v1); + #define EXTERN_BINARY_INSTR(insn) \ + EXTERNFN vinfo_t* bininstr##insn(PsycoObject* po, bool nonneg, \ + vinfo_t* v1, vinfo_t* v2); + #define EXTERN_UNARY_INSTR(insn) \ + EXTERNFN vinfo_t* unaryinstr##insn(PsycoObject* po, bool nonneg, \ + vinfo_t* v1); + + EXTERN_BINARY_INSTRO(add) + EXTERN_BINARY_INSTR (or) + EXTERN_BINARY_INSTR (and) + EXTERN_BINARY_INSTRO(sub) + EXTERN_BINARY_INSTR (xor) + EXTERN_BINARY_INSTRO(mul) + EXTERN_BINARY_INSTR (lshift) + EXTERN_BINARY_INSTR (rshift) + EXTERN_UNARY_INSTR (inv) + EXTERN_UNARY_INSTRO (neg) + EXTERN_UNARY_INSTRO (abs) + + EXTERNFN condition_code_t bininstrcmp(PsycoObject* po, int base_py_op, + vinfo_t* v1, vinfo_t* v2); + EXTERNFN vinfo_t* bininstrcond(PsycoObject* po, condition_code_t cc, + long immed_true, long immed_false); + + #define BINARY_INSTR_ADD(ovf, nonneg) bininstradd(po, ovf, nonneg, v1, v2) + #define BINARY_INSTR_OR( ovf, nonneg) bininstror (po, nonneg, v1, v2) + #define BINARY_INSTR_AND(ovf, nonneg) bininstrand(po, nonneg, v1, v2) + #define BINARY_INSTR_SUB(ovf, nonneg) bininstrsub(po, ovf, nonneg, v1, v2) + #define BINARY_INSTR_XOR(ovf, nonneg) bininstrxor(po, nonneg, v1, v2) + #define BINARY_INSTR_MUL(ovf, nonneg) bininstrmul(po, ovf, nonneg, v1, v2) + #define BINARY_INSTR_LSHIFT( nonneg) bininstrlshift(po, nonneg, v1, v2) + #define BINARY_INSTR_RSHIFT( nonneg) bininstrrshift(po, nonneg, v1, v2) + #define BINARY_INSTR_CMP(base_py_op) bininstrcmp(po, base_py_op, v1, v2) + #define BINARY_INSTR_COND(cc, i1, i2) bininstrcond(po, cc, i1, i2) + #define UNARY_INSTR_INV(ovf, nonneg) unaryinstrinv(po, nonneg, v1) + #define UNARY_INSTR_NEG(ovf, nonneg) unaryinstrneg(po, ovf, nonneg, v1) + #define UNARY_INSTR_ABS(ovf, nonneg) unaryinstrabs(po, ovf, nonneg, v1) + + EXTERNFN vinfo_t* bint_add_i(PsycoObject* po, vinfo_t* rt1, long value2, + bool unsafe); + EXTERNFN vinfo_t* bint_mul_i(PsycoObject* po, vinfo_t* v1, long value2, + bool ovf); + EXTERNFN vinfo_t* bint_lshift_i(PsycoObject* po, vinfo_t* v1, int counter); + EXTERNFN vinfo_t* bint_rshift_i(PsycoObject* po, vinfo_t* v1, int counter); + EXTERNFN vinfo_t* bint_urshift_i(PsycoObject* po, vinfo_t* v1, int counter); + EXTERNFN condition_code_t bint_cmp_i(PsycoObject* po, int base_py_op, + vinfo_t* rt1, long immed2); + EXTERNFN vinfo_t* bfunction_result(PsycoObject* po, bool ref); /***************************************************************/ /*** some macro for code emission ***/ ! #define NEED_CC() do { /* nothing -- no real flag register */ } while (0) ! ! #define CHECK_NONZERO_FROM_RT(src, rcc) (rcc = getstack(src)) ! ! #define SAVE_REGS_FN_CALLS NEED_CC() ! ! #define TEMP_SAVE_REGS_FN_CALLS do { /* nothing */ } while (0) ! ! #define TEMP_RESTORE_REGS_FN_CALLS do { /* nothing */ } while (0) ! ! #define JUMP_TO(target) do { \ ! word_t* _arg; \ ! INSN_jumpfar(&_arg); \ ! *_arg = (word_t) target; \ ! } while (0) ! ! #define FAR_COND_JUMP_TO(target, condition) do { \ ! word_t* _arg; \ ! INSN_rtcc_push(condition); \ ! INSN_jcondfar(&_arg); \ ! *_arg = (word_t) target; \ ! } while(0) ! ! #define MAXIMUM_SIZE_OF_FAR_JUMP (sizeof(code_t)+sizeof(word_t)+sizeof(code_t)) ! ! ! #define CALL_SET_ARG_IMMED(immed, arg_index, nb_args) do { \ ! INSN_immed(immed); \ ! INSNPUSHED(1); \ ! } while (0) ! #define CALL_SET_ARG_FROM_RT(source, arg_index, nb_args) do { \ ! INSN_rt_push(source); \ ! INSNPUSHED(1); \ ! } while (0) ! #define CALL_SET_ARG_FROM_ADDR(source, arg_index, nb_args) do { \ ! INSN_ref_push(CURRENT_STACK_POSITION(source)); \ ! INSNPUSHED(1); \ ! } while (0) ! #define CALL_C_FUNCTION(target, nb_args) do { \ ! word_t* _arg; \ ! switch (nb_args) { ... [truncated message content] |