|
From: <sv...@va...> - 2005-12-15 22:57:42
|
Author: njn
Date: 2005-12-15 22:57:35 +0000 (Thu, 15 Dec 2005)
New Revision: 5355
Log:
Factor out the masking from {LOADV,STOREV}[1248]. Cuts 30 lines and make=
s
things more readable.
Modified:
branches/COMPVBITS/memcheck/mc_main.c
Modified: branches/COMPVBITS/memcheck/mc_main.c
=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=
=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=
=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D=3D
--- branches/COMPVBITS/memcheck/mc_main.c 2005-12-15 22:40:32 UTC (rev 53=
54)
+++ branches/COMPVBITS/memcheck/mc_main.c 2005-12-15 22:57:35 UTC (rev 53=
55)
@@ -2632,12 +2632,19 @@
are a UWord, and for STOREV8 they are a ULong.
*/
=20
+/* If any part of '_a' indicated by the mask is 1, either
+ '_a' is not naturally '_sz'-aligned, or it exceeds the range
+ covered by the primary map. */
+#define UNALIGNED_OR_HIGH(_a,_sz) ((_a) & MASK((_sz)))
+#define MASK(_sz) ( ~((0x10000-(_sz)) | ((N_PRIMARY_MAP-1) << 16)) )
+
+
/* ------------------------ Size =3D 8 ------------------------ */
=20
static inline __attribute__((always_inline))
ULong mc_LOADV8 ( Addr aA, Bool isBigEndian )
{
- UWord mask, a, sm_off64, vabits64;
+ UWord a, sm_off64, vabits64;
SecMap* sm;
=20
PROF_EVENT(200, "mc_LOADV8");
@@ -2645,14 +2652,9 @@
if (VG_DEBUG_MEMORY >=3D 2)
return mc_LOADVn_slow( aA, 8, isBigEndian );
=20
- mask =3D ~((0x10000-8) | ((N_PRIMARY_MAP-1) << 16));
- a =3D (UWord)aA;
+ a =3D (UWord)aA;
=20
- /* If any part of 'a' indicated by the mask is 1, either */
- /* 'a' is not naturally aligned, or 'a' exceeds the range */
- /* covered by the primary map. Either way we defer to the */
- /* slow-path case. */
- if (EXPECTED_NOT_TAKEN(a & mask)) {
+ if (EXPECTED_NOT_TAKEN( UNALIGNED_OR_HIGH(a,8) )) {
PROF_EVENT(201, "mc_LOADV8-slow1");
return (UWord)mc_LOADVn_slow( aA, 8, isBigEndian );
}
@@ -2688,7 +2690,7 @@
static inline __attribute__((always_inline))
void mc_STOREV8 ( Addr aA, ULong vbytes, Bool isBigEndian )
{
- UWord mask, a, sm_off64, vabits64;
+ UWord a, sm_off64, vabits64;
SecMap* sm;
=20
PROF_EVENT(210, "mc_STOREV8");
@@ -2700,14 +2702,9 @@
return;
}
=20
- mask =3D ~((0x10000-8) | ((N_PRIMARY_MAP-1) << 16));
- a =3D (UWord)aA;
+ a =3D (UWord)aA;
=20
- /* If any part of 'a' indicated by the mask is 1, either */
- /* 'a' is not naturally aligned, or 'a' exceeds the range */
- /* covered by the primary map. Either way we defer to the */
- /* slow-path case. */
- if (EXPECTED_NOT_TAKEN(a & mask)) {
+ if (EXPECTED_NOT_TAKEN( UNALIGNED_OR_HIGH(a,8) )) {
PROF_EVENT(211, "mc_STOREV8-slow1");
mc_STOREVn_slow( aA, 8, vbytes, isBigEndian );
return;
@@ -2758,7 +2755,7 @@
static inline __attribute__((always_inline))
UWord mc_LOADV4 ( Addr a, Bool isBigEndian )
{
- UWord mask, sm_off, vabits32;
+ UWord sm_off, vabits32;
SecMap* sm;
=20
PROF_EVENT(220, "mc_LOADV4");
@@ -2766,13 +2763,7 @@
if (VG_DEBUG_MEMORY >=3D 2)
return (UWord)mc_LOADVn_slow( a, 4, isBigEndian );
=20
- mask =3D ~((0x10000-4) | ((N_PRIMARY_MAP-1) << 16));
-
- /* If any part of 'a' indicated by the mask is 1, either */
- /* 'a' is not naturally aligned, or 'a' exceeds the range */
- /* covered by the primary map. Either way we defer to the */
- /* slow-path case. */
- if (EXPECTED_NOT_TAKEN(a & mask)) {
+ if (EXPECTED_NOT_TAKEN( UNALIGNED_OR_HIGH(a,4) )) {
PROF_EVENT(221, "mc_LOADV4-slow1");
return (UWord)mc_LOADVn_slow( a, 4, isBigEndian );
}
@@ -2813,7 +2804,7 @@
static inline __attribute__((always_inline))
void mc_STOREV4 ( Addr aA, UWord vbytes, Bool isBigEndian )
{
- UWord mask, a, sm_off, vabits32;
+ UWord a, sm_off, vabits32;
SecMap* sm;
=20
PROF_EVENT(230, "mc_STOREV4");
@@ -2823,14 +2814,9 @@
return;
}
=20
- mask =3D ~((0x10000-4) | ((N_PRIMARY_MAP-1) << 16));
- a =3D (UWord)aA;
+ a =3D (UWord)aA;
=20
- /* If any part of 'a' indicated by the mask is 1, either */
- /* 'a' is not naturally aligned, or 'a' exceeds the range */
- /* covered by the primary map. Either way we defer to the */
- /* slow-path case. */
- if (EXPECTED_NOT_TAKEN(a & mask)) {
+ if (EXPECTED_NOT_TAKEN( UNALIGNED_OR_HIGH(a,4) )) {
PROF_EVENT(231, "mc_STOREV4-slow1");
mc_STOREVn_slow( aA, 4, (ULong)vbytes, isBigEndian );
return;
@@ -2915,7 +2901,7 @@
static inline __attribute__((always_inline))
UWord mc_LOADV2 ( Addr aA, Bool isBigEndian )
{
- UWord mask, a, sm_off, vabits32;
+ UWord a, sm_off, vabits32;
SecMap* sm;
=20
PROF_EVENT(240, "mc_LOADV2");
@@ -2923,14 +2909,9 @@
if (VG_DEBUG_MEMORY >=3D 2)
return (UWord)mc_LOADVn_slow( aA, 2, isBigEndian );
=20
- mask =3D ~((0x10000-2) | ((N_PRIMARY_MAP-1) << 16));
- a =3D (UWord)aA;
+ a =3D (UWord)aA;
=20
- /* If any part of 'a' indicated by the mask is 1, either */
- /* 'a' is not naturally aligned, or 'a' exceeds the range */
- /* covered by the primary map. Either way we defer to the */
- /* slow-path case. */
- if (EXPECTED_NOT_TAKEN(a & mask)) {
+ if (EXPECTED_NOT_TAKEN( UNALIGNED_OR_HIGH(a,2) )) {
PROF_EVENT(241, "mc_LOADV2-slow1");
return (UWord)mc_LOADVn_slow( aA, 2, isBigEndian );
}
@@ -2968,7 +2949,7 @@
static inline __attribute__((always_inline))
void mc_STOREV2 ( Addr aA, UWord vbytes, Bool isBigEndian )
{
- UWord mask, a, sm_off, vabits32;
+ UWord a, sm_off, vabits32;
SecMap* sm;
=20
PROF_EVENT(250, "mc_STOREV2");
@@ -2978,14 +2959,9 @@
return;
}
=20
- mask =3D ~((0x10000-2) | ((N_PRIMARY_MAP-1) << 16));
- a =3D (UWord)aA;
+ a =3D (UWord)aA;
=20
- /* If any part of 'a' indicated by the mask is 1, either */
- /* 'a' is not naturally aligned, or 'a' exceeds the range */
- /* covered by the primary map. Either way we defer to the */
- /* slow-path case. */
- if (EXPECTED_NOT_TAKEN(a & mask)) {
+ if (EXPECTED_NOT_TAKEN( UNALIGNED_OR_HIGH(a,2) )) {
PROF_EVENT(251, "mc_STOREV2-slow1");
mc_STOREVn_slow( aA, 2, (ULong)vbytes, isBigEndian );
return;
@@ -3040,7 +3016,7 @@
VG_REGPARM(1)
UWord MC_(helperc_LOADV1) ( Addr aA )
{
- UWord mask, a, sm_off, vabits32;
+ UWord a, sm_off, vabits32;
SecMap* sm;
=20
PROF_EVENT(260, "helperc_LOADV1");
@@ -3049,13 +3025,9 @@
return (UWord)mc_LOADVn_slow( aA, 1, False/*irrelevant*/ );
# endif
=20
- mask =3D ~((0x10000-1) | ((N_PRIMARY_MAP-1) << 16));
- a =3D (UWord)aA;
+ a =3D (UWord)aA;
=20
- /* If any part of 'a' indicated by the mask is 1, it means 'a'
- exceeds the range covered by the primary map. In which case we
- defer to the slow-path case. */
- if (EXPECTED_NOT_TAKEN(a & mask)) {
+ if (EXPECTED_NOT_TAKEN( UNALIGNED_OR_HIGH(a,1) )) {
PROF_EVENT(261, "helperc_LOADV1-slow1");
return (UWord)mc_LOADVn_slow( aA, 1, False/*irrelevant*/ );
}
@@ -3087,7 +3059,7 @@
VG_REGPARM(2)
void MC_(helperc_STOREV1) ( Addr aA, UWord vbyte )
{
- UWord mask, a, sm_off, vabits32;
+ UWord a, sm_off, vabits32;
SecMap* sm;
=20
PROF_EVENT(270, "helperc_STOREV1");
@@ -3097,12 +3069,9 @@
return;
# endif
=20
- mask =3D ~((0x10000-1) | ((N_PRIMARY_MAP-1) << 16));
- a =3D (UWord)aA;
- /* If any part of 'a' indicated by the mask is 1, it means 'a'
- exceeds the range covered by the primary map. In which case we
- defer to the slow-path case. */
- if (EXPECTED_NOT_TAKEN(a & mask)) {
+ a =3D (UWord)aA;
+
+ if (EXPECTED_NOT_TAKEN( UNALIGNED_OR_HIGH(a,1) )) {
PROF_EVENT(271, "helperc_STOREV1-slow1");
mc_STOREVn_slow( aA, 1, (ULong)vbyte, False/*irrelevant*/ );
return;
|