|
From: <sv...@va...> - 2012-03-08 14:51:12
|
sewardj 2012-03-08 14:51:01 +0000 (Thu, 08 Mar 2012)
New Revision: 12430
Log:
Change the behaviour of --partial-loads-ok=yes to avoid false
negatives, by marking the V bits that come from out of range parts of
the access as undefined; and hence any use of them leads to an value
error. Prior to this they were marked as defined and could be used
without error.
Behaviour of --partial-loads-ok=no (the default case) is unchanged.
Also add some testing thereof.
Fixes #294523. Modified version of a patch and testcase by Patrick
J. LoPresti (lop...@gm...).
Added files:
trunk/memcheck/tests/test-plo-no.stderr.exp-le32
trunk/memcheck/tests/test-plo-no.stderr.exp-le64
trunk/memcheck/tests/test-plo-no.stdout.exp
trunk/memcheck/tests/test-plo-no.vgtest
trunk/memcheck/tests/test-plo-yes.stderr.exp-le32
trunk/memcheck/tests/test-plo-yes.stderr.exp-le64
trunk/memcheck/tests/test-plo-yes.stdout.exp
trunk/memcheck/tests/test-plo-yes.vgtest
trunk/memcheck/tests/test-plo.c
Modified files:
trunk/memcheck/mc_main.c
trunk/memcheck/tests/Makefile.am
Added: trunk/memcheck/tests/test-plo-no.stdout.exp (+0 -0)
===================================================================
Modified: trunk/memcheck/mc_main.c (+71 -29)
===================================================================
--- trunk/memcheck/mc_main.c 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/mc_main.c 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -1141,19 +1141,6 @@
__attribute__((noinline))
ULong mc_LOADVn_slow ( Addr a, SizeT nBits, Bool bigendian )
{
- /* Make up a 64-bit result V word, which contains the loaded data for
- valid addresses and Defined for invalid addresses. Iterate over
- the bytes in the word, from the most significant down to the
- least. */
- ULong vbits64 = V_BITS64_UNDEFINED;
- SizeT szB = nBits / 8;
- SSizeT i; // Must be signed.
- SizeT n_addrs_bad = 0;
- Addr ai;
- Bool partial_load_exemption_applies;
- UChar vbits8;
- Bool ok;
-
PROF_EVENT(30, "mc_LOADVn_slow");
/* ------------ BEGIN semi-fast cases ------------ */
@@ -1188,38 +1175,93 @@
}
/* ------------ END semi-fast cases ------------ */
+ ULong vbits64 = V_BITS64_UNDEFINED; /* result */
+ ULong pessim64 = V_BITS64_DEFINED; /* only used when p-l-ok=yes */
+ SSizeT szB = nBits / 8;
+ SSizeT i; /* Must be signed. */
+ SizeT n_addrs_bad = 0;
+ Addr ai;
+ UChar vbits8;
+ Bool ok;
+
tl_assert(nBits == 64 || nBits == 32 || nBits == 16 || nBits == 8);
+ /* Make up a 64-bit result V word, which contains the loaded data
+ for valid addresses and Defined for invalid addresses. Iterate
+ over the bytes in the word, from the most significant down to
+ the least. The vbits to return are calculated into vbits64.
+ Also compute the pessimising value to be used when
+ --partial-loads-ok=yes. n_addrs_bad is redundant (the relevant
+ info can be gleaned from pessim64) but is used as a
+ cross-check. */
for (i = szB-1; i >= 0; i--) {
PROF_EVENT(31, "mc_LOADVn_slow(loop)");
ai = a + byte_offset_w(szB, bigendian, i);
ok = get_vbits8(ai, &vbits8);
- if (!ok) n_addrs_bad++;
vbits64 <<= 8;
vbits64 |= vbits8;
+ if (!ok) n_addrs_bad++;
+ pessim64 <<= 8;
+ pessim64 |= (ok ? V_BITS8_DEFINED : V_BITS8_UNDEFINED);
}
- /* This is a hack which avoids producing errors for code which
- insists in stepping along byte strings in aligned word-sized
- chunks, and there is a partially defined word at the end. (eg,
- optimised strlen). Such code is basically broken at least WRT
- semantics of ANSI C, but sometimes users don't have the option
- to fix it, and so this option is provided. Note it is now
- defaulted to not-engaged.
+ /* In the common case, all the addresses involved are valid, so we
+ just return the computed V bits and have done. */
+ if (LIKELY(n_addrs_bad == 0))
+ return vbits64;
- A load from a partially-addressible place is allowed if:
- - the command-line flag is set
+ /* If there's no possibility of getting a partial-loads-ok
+ exemption, report the error and quit. */
+ if (!MC_(clo_partial_loads_ok)) {
+ MC_(record_address_error)( VG_(get_running_tid)(), a, szB, False );
+ return vbits64;
+ }
+
+ /* The partial-loads-ok excemption might apply. Find out if it
+ does. If so, don't report an addressing error, but do return
+ Undefined for the bytes that are out of range, so as to avoid
+ false negatives. If it doesn't apply, just report an addressing
+ error in the usual way. */
+
+ /* Some code steps along byte strings in aligned word-sized chunks
+ even when there is only a partially defined word at the end (eg,
+ optimised strlen). This is allowed by the memory model of
+ modern machines, since an aligned load cannot span two pages and
+ thus cannot "partially fault". Despite such behaviour being
+ declared undefined by ANSI C/C++.
+
+ Therefore, a load from a partially-addressible place is allowed
+ if all of the following hold:
+ - the command-line flag is set [by default, it isn't]
- it's a word-sized, word-aligned load
- at least one of the addresses in the word *is* valid
+
+ Since this suppresses the addressing error, we avoid false
+ negatives by marking bytes undefined when they come from an
+ invalid address.
*/
- partial_load_exemption_applies
- = MC_(clo_partial_loads_ok) && szB == VG_WORDSIZE
- && VG_IS_WORD_ALIGNED(a)
- && n_addrs_bad < VG_WORDSIZE;
- if (n_addrs_bad > 0 && !partial_load_exemption_applies)
- MC_(record_address_error)( VG_(get_running_tid)(), a, szB, False );
+ /* "at least one of the addresses is invalid" */
+ tl_assert(pessim64 != V_BITS64_DEFINED);
+ if (szB == VG_WORDSIZE && VG_IS_WORD_ALIGNED(a)
+ && n_addrs_bad < VG_WORDSIZE) {
+ /* Exemption applies. Use the previously computed pessimising
+ value for vbits64 and return the combined result, but don't
+ flag an addressing error. The pessimising value is Defined
+ for valid addresses and Undefined for invalid addresses. */
+ /* for assumption that doing bitwise or implements UifU */
+ tl_assert(V_BIT_UNDEFINED == 1 && V_BIT_DEFINED == 0);
+ /* (really need "UifU" here...)
+ vbits64 UifU= pessim64 (is pessimised by it, iow) */
+ vbits64 |= pessim64;
+ return vbits64;
+ }
+
+ /* Exemption doesn't apply. Flag an addressing error in the normal
+ way. */
+ MC_(record_address_error)( VG_(get_running_tid)(), a, szB, False );
+
return vbits64;
}
Added: trunk/memcheck/tests/test-plo-yes.vgtest (+2 -0)
===================================================================
--- trunk/memcheck/tests/test-plo-yes.vgtest 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/tests/test-plo-yes.vgtest 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -0,0 +1,2 @@
+prog: test-plo
+vgopts: -q --partial-loads-ok=yes
Modified: trunk/memcheck/tests/Makefile.am (+11 -6)
===================================================================
--- trunk/memcheck/tests/Makefile.am 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/tests/Makefile.am 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -187,15 +187,19 @@
supp2.stderr.exp supp2.vgtest \
supp.supp \
suppfree.stderr.exp suppfree.vgtest \
+ test-plo-no.vgtest test-plo-no.stdout.exp \
+ test-plo-no.stderr.exp-le64 test-plo-no.stderr.exp-le32 \
+ test-plo-yes.vgtest test-plo-yes.stdout.exp \
+ test-plo-yes.stderr.exp-le64 test-plo-yes.stderr.exp-le32 \
trivialleak.stderr.exp trivialleak.vgtest trivialleak.stderr.exp2 \
unit_libcbase.stderr.exp unit_libcbase.vgtest \
unit_oset.stderr.exp unit_oset.stdout.exp unit_oset.vgtest \
- varinfo1.vgtest varinfo1.stdout.exp varinfo1.stderr.exp varinfo1.stderr.exp-ppc64\
- varinfo2.vgtest varinfo2.stdout.exp varinfo2.stderr.exp varinfo2.stderr.exp-ppc64\
- varinfo3.vgtest varinfo3.stdout.exp varinfo3.stderr.exp varinfo3.stderr.exp-ppc64\
- varinfo4.vgtest varinfo4.stdout.exp varinfo4.stderr.exp varinfo4.stderr.exp-ppc64\
- varinfo5.vgtest varinfo5.stdout.exp varinfo5.stderr.exp varinfo5.stderr.exp-ppc64\
- varinfo6.vgtest varinfo6.stdout.exp varinfo6.stderr.exp varinfo6.stderr.exp-ppc64\
+ varinfo1.vgtest varinfo1.stdout.exp varinfo1.stderr.exp varinfo1.stderr.exp-ppc64 \
+ varinfo2.vgtest varinfo2.stdout.exp varinfo2.stderr.exp varinfo2.stderr.exp-ppc64 \
+ varinfo3.vgtest varinfo3.stdout.exp varinfo3.stderr.exp varinfo3.stderr.exp-ppc64 \
+ varinfo4.vgtest varinfo4.stdout.exp varinfo4.stderr.exp varinfo4.stderr.exp-ppc64 \
+ varinfo5.vgtest varinfo5.stdout.exp varinfo5.stderr.exp varinfo5.stderr.exp-ppc64 \
+ varinfo6.vgtest varinfo6.stdout.exp varinfo6.stderr.exp varinfo6.stderr.exp-ppc64 \
vcpu_bz2.stdout.exp vcpu_bz2.stderr.exp vcpu_bz2.vgtest \
vcpu_fbench.stdout.exp vcpu_fbench.stderr.exp vcpu_fbench.vgtest \
vcpu_fnfns.stdout.exp vcpu_fnfns.stdout.exp-glibc28-amd64 \
@@ -266,6 +270,7 @@
strchr \
str_tester \
supp_unknown supp1 supp2 suppfree \
+ test-plo \
trivialleak \
unit_libcbase unit_oset \
varinfo1 varinfo2 varinfo3 varinfo4 \
Added: trunk/memcheck/tests/test-plo-yes.stderr.exp-le64 (+9 -0)
===================================================================
--- trunk/memcheck/tests/test-plo-yes.stderr.exp-le64 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/tests/test-plo-yes.stderr.exp-le64 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -0,0 +1,9 @@
+Conditional jump or move depends on uninitialised value(s)
+ ...
+
+Invalid read of size 8
+ ...
+ Address 0x........ is 8 bytes inside a block of size 24 free'd
+ at 0x........: free (vg_replace_malloc.c:...)
+ ...
+
Added: trunk/memcheck/tests/test-plo.c (+86 -0)
===================================================================
--- trunk/memcheck/tests/test-plo.c 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/tests/test-plo.c 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -0,0 +1,86 @@
+#include <malloc.h>
+#include <stdio.h>
+#include <assert.h>
+
+typedef unsigned long long int ULong;
+typedef unsigned long int UWord;
+
+__attribute__((noinline))
+static int my_ffsll ( ULong x )
+{
+ int i;
+ for (i = 0; i < 64; i++) {
+ if ((x & 1ULL) == 1ULL)
+ break;
+ x >>= 1;
+ }
+ return i+1;
+}
+
+/* Find length of string, assuming it is aligned and shorter than 8
+ characters. Little-endian only. */
+__attribute__((noinline))
+static int aligned_strlen(char *s)
+{
+ /* This is for 64-bit platforms */
+ assert(sizeof(ULong) == 8);
+ /* ..and only works for aligned input */
+ assert(((unsigned long)s & 0x7) == 0);
+
+ /* read 8 bytes */
+ ULong val = *(ULong*)s;
+ /* Subtract one from each byte */
+ ULong val2 = val - 0x0101010101010101ULL;
+ /* Find lowest byte whose high bit changed */
+ val2 ^= val;
+ val2 &= 0x8080808080808080ULL;
+
+ return (my_ffsll(val2) / 8) - 1;
+}
+
+__attribute__((noinline)) void foo ( int x )
+{
+ __asm__ __volatile__("":::"memory");
+}
+
+int
+main(int argc, char *argv[])
+{
+ char *buf = memalign(8, 5);
+ buf[0] = 'a';
+ buf[1] = 'b';
+ buf[2] = 'c';
+ buf[3] = 'd';
+ buf[4] = '\0';
+
+ /* --partial-loads-ok=no: expect addr error (here) */
+ /* --partial-loads-ok=yes: expect no error */
+ if (aligned_strlen(buf) == 4)
+ foo(44);
+
+ /* --partial-loads-ok=no: expect addr error (here) */
+ /* --partial-loads-ok=yes: expect value error (in my_ffsll) */
+ buf[4] = 'x';
+ if (aligned_strlen(buf) == 0)
+ foo(37);
+
+ free(buf);
+
+ /* Also, we need to check that a completely out-of-range,
+ word-sized load gives an addressing error regardless of the
+ start of --partial-loads-ok=. *And* that the resulting
+ value is completely defined. */
+ UWord* words = malloc(3 * sizeof(UWord));
+ free(words);
+
+ /* Should ALWAYS give an addr error. */
+ UWord w = words[1];
+
+ /* Should NEVER give an error (you might expect a value one, but no.) */
+ if (w == 0x31415927) {
+ fprintf(stderr,
+ "Elvis is alive and well and living in Milton Keynes.\n");
+ }
+
+ return 0;
+}
Added: trunk/memcheck/tests/test-plo-yes.stderr.exp-le32 (+1 -0)
===================================================================
--- trunk/memcheck/tests/test-plo-yes.stderr.exp-le32 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/tests/test-plo-yes.stderr.exp-le32 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -0,0 +1 @@
+XXX put 32 bit results in here
Added: trunk/memcheck/tests/test-plo-no.vgtest (+2 -0)
===================================================================
--- trunk/memcheck/tests/test-plo-no.vgtest 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/tests/test-plo-no.vgtest 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -0,0 +1,2 @@
+prog: test-plo
+vgopts: -q
Added: trunk/memcheck/tests/test-plo-no.stderr.exp-le64 (+18 -0)
===================================================================
--- trunk/memcheck/tests/test-plo-no.stderr.exp-le64 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/tests/test-plo-no.stderr.exp-le64 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -0,0 +1,18 @@
+Invalid read of size 8
+ ...
+ Address 0x........ is 0 bytes inside a block of size 5 alloc'd
+ at 0x........: memalign (vg_replace_malloc.c:...)
+ ...
+
+Invalid read of size 8
+ ...
+ Address 0x........ is 0 bytes inside a block of size 5 alloc'd
+ at 0x........: memalign (vg_replace_malloc.c:...)
+ ...
+
+Invalid read of size 8
+ ...
+ Address 0x........ is 8 bytes inside a block of size 24 free'd
+ at 0x........: free (vg_replace_malloc.c:...)
+ ...
+
Added: trunk/memcheck/tests/test-plo-yes.stdout.exp (+0 -0)
===================================================================
Added: trunk/memcheck/tests/test-plo-no.stderr.exp-le32 (+1 -0)
===================================================================
--- trunk/memcheck/tests/test-plo-no.stderr.exp-le32 2012-03-08 14:44:57 +00:00 (rev 12429)
+++ trunk/memcheck/tests/test-plo-no.stderr.exp-le32 2012-03-08 14:51:01 +00:00 (rev 12430)
@@ -0,0 +1 @@
+XXX put 32 bit results in here
|
|
From: Florian K. <br...@ac...> - 2012-03-17 17:38:20
|
On 03/08/2012 09:51 AM, sv...@va... wrote: > sewardj 2012-03-08 14:51:01 +0000 (Thu, 08 Mar 2012) > > New Revision: 12430 > > Log: > Change the behaviour of --partial-loads-ok=yes to avoid false > negatives, by marking the V bits that come from out of range parts of > the access as undefined; and hence any use of them leads to an value > error. Prior to this they were marked as defined and could be used > without error. > > Behaviour of --partial-loads-ok=no (the default case) is unchanged. > > Also add some testing thereof. > > Fixes #294523. Modified version of a patch and testcase by Patrick > J. LoPresti (lop...@gm...). > > Added files: > trunk/memcheck/tests/test-plo-no.stderr.exp-le32 > trunk/memcheck/tests/test-plo-no.stderr.exp-le64 > trunk/memcheck/tests/test-plo-no.stdout.exp > trunk/memcheck/tests/test-plo-no.vgtest > trunk/memcheck/tests/test-plo-yes.stderr.exp-le32 > trunk/memcheck/tests/test-plo-yes.stderr.exp-le64 > trunk/memcheck/tests/test-plo-yes.stdout.exp > trunk/memcheck/tests/test-plo-yes.vgtest > trunk/memcheck/tests/test-plo.c The test-plo-yes version of this testcase fails on big endian platforms. Can you adapt it easily for big-endian ? Florian |
|
From: Julian S. <js...@ac...> - 2012-03-17 20:49:42
|
On Saturday, March 17, 2012, Florian Krohm wrote: > > trunk/memcheck/tests/test-plo-no.stderr.exp-le32 > > trunk/memcheck/tests/test-plo-no.stderr.exp-le64 > > trunk/memcheck/tests/test-plo-no.stdout.exp > > trunk/memcheck/tests/test-plo-no.vgtest > > trunk/memcheck/tests/test-plo-yes.stderr.exp-le32 > > trunk/memcheck/tests/test-plo-yes.stderr.exp-le64 > > trunk/memcheck/tests/test-plo-yes.stdout.exp > > trunk/memcheck/tests/test-plo-yes.vgtest > > trunk/memcheck/tests/test-plo.c > > The test-plo-yes version of this testcase fails on big endian platforms. > Can you adapt it easily for big-endian ? Er, probably, although it would require figuring out how to do this strlen-based-on-carry-bit-propagation game for big endian words instead of little endian ones. Or [probably simpler] write a routine to do swapping of endianness for a 64 bit word, using shifts, ands and ors, which Memcheck should track exactly. Then we can use the existing strlen routine. I had anticipated that this would require different outputs on big endian platforms, hence the -le32/-le64 suffixes. But now I think of it, if we use a byte swap routine in the big endian case, it should then work the same. J |