39 lines
1.7 KiB
Diff
39 lines
1.7 KiB
Diff
------------------------------------------------------------------------
|
|
r2499 | sewardj | 2012-09-02 14:13:34 +0200 (Sun, 02 Sep 2012) | 4 lines
|
|
|
|
Remove alignment checks for VMPSADBW, VPHMINPOSUW, VPALIGNR since they
|
|
do not apply to the AVX versions of these instructions. Fixes #305926.
|
|
(Jakub Jelinek, jakub@redhat.com)
|
|
|
|
------------------------------------------------------------------------
|
|
Index: priv/guest_amd64_toIR.c
|
|
===================================================================
|
|
--- valgrind-3.8.0/VEX/priv/guest_amd64_toIR.c (revision 2498)
|
|
+++ valgrind-3.8.0/VEX/priv/guest_amd64_toIR.c (revision 2499)
|
|
@@ -16028,7 +16028,8 @@
|
|
DIP("%sphminposuw %s,%s\n", mbV, nameXMMReg(rE), nameXMMReg(rG));
|
|
} else {
|
|
addr = disAMode ( &alen, vbi, pfx, delta, dis_buf, 0 );
|
|
- gen_SEGV_if_not_16_aligned(addr);
|
|
+ if (!isAvx)
|
|
+ gen_SEGV_if_not_16_aligned(addr);
|
|
assign( sV, loadLE(Ity_V128, mkexpr(addr)) );
|
|
delta += alen;
|
|
DIP("%sphminposuw %s,%s\n", mbV, dis_buf, nameXMMReg(rG));
|
|
@@ -25898,7 +25899,6 @@
|
|
nameXMMReg(rV), nameXMMReg(rG));
|
|
} else {
|
|
addr = disAMode( &alen, vbi, pfx, delta, dis_buf, 1 );
|
|
- gen_SEGV_if_not_16_aligned( addr );
|
|
assign( sV, loadLE(Ity_V128, mkexpr(addr)) );
|
|
imm8 = getUChar(delta+alen);
|
|
delta += alen+1;
|
|
@@ -26291,7 +26291,6 @@
|
|
} else {
|
|
addr = disAMode( &alen, vbi, pfx, delta, dis_buf,
|
|
1/* imm8 is 1 byte after the amode */ );
|
|
- gen_SEGV_if_not_16_aligned( addr );
|
|
assign( src_vec, loadLE( Ity_V128, mkexpr(addr) ) );
|
|
imm8 = (Int)getUChar(delta+alen);
|
|
delta += alen+1;
|