forked from rpms/openssl
87 lines
1.8 KiB
Diff
87 lines
1.8 KiB
Diff
|
diff --git a/crypto/poly1305/asm/poly1305-ppc.pl b/crypto/poly1305/asm/poly1305-ppc.pl
|
||
|
index 9f86134d923fb..2e601bb9c24be 100755
|
||
|
--- a/crypto/poly1305/asm/poly1305-ppc.pl
|
||
|
+++ b/crypto/poly1305/asm/poly1305-ppc.pl
|
||
|
@@ -744,7 +744,7 @@
|
||
|
my $LOCALS= 6*$SIZE_T;
|
||
|
my $VSXFRAME = $LOCALS + 6*$SIZE_T;
|
||
|
$VSXFRAME += 128; # local variables
|
||
|
- $VSXFRAME += 13*16; # v20-v31 offload
|
||
|
+ $VSXFRAME += 12*16; # v20-v31 offload
|
||
|
|
||
|
my $BIG_ENDIAN = ($flavour !~ /le/) ? 4 : 0;
|
||
|
|
||
|
@@ -919,12 +919,12 @@
|
||
|
addi r11,r11,32
|
||
|
stvx v22,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
- stvx v23,r10,$sp
|
||
|
- addi r10,r10,32
|
||
|
- stvx v24,r11,$sp
|
||
|
+ stvx v23,r11,$sp
|
||
|
addi r11,r11,32
|
||
|
- stvx v25,r10,$sp
|
||
|
+ stvx v24,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
+ stvx v25,r11,$sp
|
||
|
+ addi r11,r11,32
|
||
|
stvx v26,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
stvx v27,r11,$sp
|
||
|
@@ -1153,12 +1153,12 @@
|
||
|
addi r11,r11,32
|
||
|
stvx v22,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
- stvx v23,r10,$sp
|
||
|
- addi r10,r10,32
|
||
|
- stvx v24,r11,$sp
|
||
|
+ stvx v23,r11,$sp
|
||
|
addi r11,r11,32
|
||
|
- stvx v25,r10,$sp
|
||
|
+ stvx v24,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
+ stvx v25,r11,$sp
|
||
|
+ addi r11,r11,32
|
||
|
stvx v26,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
stvx v27,r11,$sp
|
||
|
@@ -1899,26 +1899,26 @@
|
||
|
mtspr 256,r12 # restore vrsave
|
||
|
lvx v20,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
- lvx v21,r10,$sp
|
||
|
- addi r10,r10,32
|
||
|
- lvx v22,r11,$sp
|
||
|
+ lvx v21,r11,$sp
|
||
|
addi r11,r11,32
|
||
|
- lvx v23,r10,$sp
|
||
|
+ lvx v22,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
- lvx v24,r11,$sp
|
||
|
+ lvx v23,r11,$sp
|
||
|
addi r11,r11,32
|
||
|
- lvx v25,r10,$sp
|
||
|
+ lvx v24,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
- lvx v26,r11,$sp
|
||
|
+ lvx v25,r11,$sp
|
||
|
addi r11,r11,32
|
||
|
- lvx v27,r10,$sp
|
||
|
+ lvx v26,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
- lvx v28,r11,$sp
|
||
|
+ lvx v27,r11,$sp
|
||
|
addi r11,r11,32
|
||
|
- lvx v29,r10,$sp
|
||
|
+ lvx v28,r10,$sp
|
||
|
addi r10,r10,32
|
||
|
- lvx v30,r11,$sp
|
||
|
- lvx v31,r10,$sp
|
||
|
+ lvx v29,r11,$sp
|
||
|
+ addi r11,r11,32
|
||
|
+ lvx v30,r10,$sp
|
||
|
+ lvx v31,r11,$sp
|
||
|
$POP r27,`$VSXFRAME-$SIZE_T*5`($sp)
|
||
|
$POP r28,`$VSXFRAME-$SIZE_T*4`($sp)
|
||
|
$POP r29,`$VSXFRAME-$SIZE_T*3`($sp)
|