firefox/rhbz-2235654.patch

60 lines
1.8 KiB
Diff
Raw Normal View History

2023-08-30 11:23:38 +00:00
diff -up firefox-117.0/media/ffvpx/libavcodec/x86/mathops.h.gccfailure firefox-117.0/media/ffvpx/libavcodec/x86/mathops.h
--- firefox-117.0/media/ffvpx/libavcodec/x86/mathops.h.gccfailure 2023-08-24 20:33:54.000000000 +0200
+++ firefox-117.0/media/ffvpx/libavcodec/x86/mathops.h 2023-08-30 13:04:56.174949736 +0200
@@ -35,12 +35,20 @@
static av_always_inline av_const int MULL(int a, int b, unsigned shift)
{
int rt, dummy;
+ if (__builtin_constant_p(shift))
__asm__ (
"imull %3 \n\t"
"shrdl %4, %%edx, %%eax \n\t"
:"=a"(rt), "=d"(dummy)
- :"a"(a), "rm"(b), "ci"((uint8_t)shift)
+ :"a"(a), "rm"(b), "i"(shift & 0x1F)
);
+ else
+ __asm__ (
+ "imull %3 \n\t"
+ "shrdl %4, %%edx, %%eax \n\t"
+ :"=a"(rt), "=d"(dummy)
+ :"a"(a), "rm"(b), "c"((uint8_t)shift)
+ );
return rt;
}
@@ -113,19 +121,31 @@ __asm__ volatile(\
// avoid +32 for shift optimization (gcc should do that ...)
#define NEG_SSR32 NEG_SSR32
static inline int32_t NEG_SSR32( int32_t a, int8_t s){
+ if (__builtin_constant_p(s))
__asm__ ("sarl %1, %0\n\t"
: "+r" (a)
- : "ic" ((uint8_t)(-s))
+ : "i" (-s & 0x1F)
);
+ else
+ __asm__ ("sarl %1, %0\n\t"
+ : "+r" (a)
+ : "c" ((uint8_t)(-s))
+ );
return a;
}
#define NEG_USR32 NEG_USR32
static inline uint32_t NEG_USR32(uint32_t a, int8_t s){
+ if (__builtin_constant_p(s))
__asm__ ("shrl %1, %0\n\t"
: "+r" (a)
- : "ic" ((uint8_t)(-s))
+ : "i" (-s & 0x1F)
);
+ else
+ __asm__ ("shrl %1, %0\n\t"
+ : "+r" (a)
+ : "c" ((uint8_t)(-s))
+ );
return a;
}