|
From: <sv...@va...> - 2006-01-13 09:57:14
|
Author: tom
Date: 2006-01-13 09:57:01 +0000 (Fri, 13 Jan 2006)
New Revision: 5525
Log:
Use VG_(am_mmap_anon_float_valgrind) to allocate the unredirected
translation cache. This ensures that the cache space is executable
which it isn't when it is allocated as a static variable in the data
segment, at least on my amd64 box.
Modified:
trunk/coregrind/m_transtab.c
Modified: trunk/coregrind/m_transtab.c
=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=
=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=
=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D
--- trunk/coregrind/m_transtab.c 2006-01-13 09:26:23 UTC (rev 5524)
+++ trunk/coregrind/m_transtab.c 2006-01-13 09:57:01 UTC (rev 5525)
@@ -1241,8 +1241,8 @@
UTCEntry;
=20
/* We just allocate forwards in _tc, never deleting. */
-static ULong unredir_tc[N_UNREDIR_TCQ] __attribute__((aligned(8)));
-static Int unredir_tc_used;
+static ULong *unredir_tc;
+static Int unredir_tc_used =3D N_UNREDIR_TCQ;
=20
/* Slots in _tt can come into use and out again (.inUse).
Nevertheless _tt_highwater is maintained so that invalidations
@@ -1256,6 +1256,14 @@
static void init_unredir_tt_tc ( void )
{
Int i;
+ if (unredir_tc =3D=3D NULL) {
+ SysRes sres =3D VG_(am_mmap_anon_float_valgrind)( N_UNREDIR_TT * U=
NREDIR_SZB );
+ if (sres.isError) {
+ VG_(out_of_memory_NORETURN)("init_unredir_tt_tc", N_UNREDIR_TT =
* UNREDIR_SZB);
+ /*NOTREACHED*/
+ }
+ unredir_tc =3D (ULong *)sres.val;
+ }
unredir_tc_used =3D 0;
for (i =3D 0; i < N_UNREDIR_TT; i++)
unredir_tt[i].inUse =3D False;
|