|
From: <sv...@va...> - 2005-03-30 23:36:32
|
Author: tom Date: 2005-03-31 00:36:28 +0100 (Thu, 31 Mar 2005) New Revision: 3486 Modified: trunk/coregrind/amd64/dispatch.S Log: Make the dispatcher code position independent so that PIE mode works. Modified: trunk/coregrind/amd64/dispatch.S =3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D= =3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D= =3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D --- trunk/coregrind/amd64/dispatch.S 2005-03-30 19:31:18 UTC (rev 3485) +++ trunk/coregrind/amd64/dispatch.S 2005-03-30 23:36:28 UTC (rev 3486) @@ -60,8 +60,12 @@ pushq %r15 pushq %rdi =20 - /* 0(%rsp) holds cached copy of guest_state */ + movq VG_(dispatch_ctr)@GOTPCREL(%rip), %rsi + pushq (%rsi) =20 + /* 8(%rsp) holds cached copy of guest_state */ + /* 0(%rsp) holds cached copy of VG_(dispatch_ctr) */ + /* Set up the guest state pointer */ movq %rdi, %rbp =09 @@ -95,17 +99,19 @@ movq %rax, OFFSET_amd64_RIP(%rbp) =20 /* Are we out of timeslice? If yes, defer to scheduler. */ - subl $1, VG_(dispatch_ctr) + subl $1, 0(%rsp) jz counter_is_zero =20 /* try a fast lookup in the translation cache */ movq %rax, %rbx andq $VG_TT_FAST_MASK, %rbx - movq VG_(tt_fast)(,%rbx,8), %rcx + movq VG_(tt_fast)@GOTPCREL(%rip), %rcx + movq (%rcx,%rbx,8), %rcx cmpq %rax, (%rcx) jnz fast_lookup_failed /* increment bb profile counter */ - movq VG_(tt_fastN)(,%rbx,8), %rdx + movq VG_(tt_fastN)@GOTPCREL(%rip), %rdx + movq (%rdx,%rbx,8), %rdx incl (%rdx) =20 /* Found a match. Call tce[1], which is 8 bytes along, since @@ -118,13 +124,13 @@ %rbp indicates further details of the control transfer requested to the address in %rax. =09 - If rbp is unchanged (=3D=3D * 0(%rsp)), just jump next to %rax. + If rbp is unchanged (=3D=3D * 8(%rsp)), just jump next to %rax. =20 Otherwise fall out, back to the scheduler, and let it figure out what to do next. */ =20 - cmpq 0(%rsp), %rbp + cmpq 8(%rsp), %rbp jz dispatch_boring =20 jmp dispatch_exceptional @@ -157,6 +163,8 @@ jmp run_innerloop_exit_REALLY =20 run_innerloop_exit_REALLY: + movq VG_(dispatch_ctr)@GOTPCREL(%rip), %rsi + popq (%rsi) popq %rdi popq %r15 popq %r14 @@ -184,20 +192,20 @@ jz counter_is_zero =20 /* save %rax in %RIP and defer to sched */ - movq 0(%rsp), %rdi + movq 8(%rsp), %rdi movq %rax, OFFSET_amd64_RIP(%rdi) movq %rbp, %rax jmp run_innerloop_exit =20 fast_lookup_failed: /* %RIP is up to date here since dispatch_boring dominates */ - addl $1, VG_(dispatch_ctr) + addl $1, 0(%rsp) movq $VG_TRC_INNER_FASTMISS, %rax jmp run_innerloop_exit =20 counter_is_zero: /* %RIP is up to date here since dispatch_boring dominates */ - addl $1, VG_(dispatch_ctr) + addl $1, 0(%rsp) movq $VG_TRC_INNER_COUNTERZERO, %rax jmp run_innerloop_exit =20 |