libgcrypt/libgcrypt-1.8.5-ppc-sha2.patch

2139 lines
75 KiB
Diff

diff --git a/cipher/Makefile.am b/cipher/Makefile.am
index 85a5b5fb..cb41c251 100644
--- a/cipher/Makefile.am
+++ b/cipher/Makefile.am
@@ -94,9 +94,9 @@ serpent.c serpent-sse2-amd64.S serpent-avx2-amd64.S serpent-armv7-neon.S \
sha1.c sha1-ssse3-amd64.S sha1-avx-amd64.S sha1-avx-bmi2-amd64.S \
sha1-armv7-neon.S sha1-armv8-aarch32-ce.S sha1-armv8-aarch64-ce.S \
sha256.c sha256-ssse3-amd64.S sha256-avx-amd64.S sha256-avx2-bmi2-amd64.S \
- sha256-armv8-aarch32-ce.S sha256-armv8-aarch64-ce.S \
+ sha256-armv8-aarch32-ce.S sha256-armv8-aarch64-ce.S sha256-ppc.c \
sha512.c sha512-ssse3-amd64.S sha512-avx-amd64.S sha512-avx2-bmi2-amd64.S \
- sha512-armv7-neon.S sha512-arm.S \
+ sha512-armv7-neon.S sha512-arm.S sha512-ppc.c \
keccak.c keccak_permute_32.h keccak_permute_64.h keccak-armv7-neon.S \
stribog.c \
tiger.c \
@@ -148,4 +148,14 @@ rijndael-ppc9le.o: $(srcdir)/rijndael-ppc9le.c Makefile
rijndael-ppc9le.lo: $(srcdir)/rijndael-ppc9le.c Makefile
`echo $(LTCOMPILE) $(ppc_vcrypto_cflags) -c $< `
+sha256-ppc.o: $(srcdir)/sha256-ppc.c Makefile
+ `echo $(COMPILE) $(ppc_vcrypto_cflags) -c $< `
+
+sha256-ppc.lo: $(srcdir)/sha256-ppc.c Makefile
+ `echo $(LTCOMPILE) $(ppc_vcrypto_cflags) -c $< `
+sha512-ppc.o: $(srcdir)/sha512-ppc.c Makefile
+ `echo $(COMPILE) $(ppc_vcrypto_cflags) -c $< `
+
+sha512-ppc.lo: $(srcdir)/sha512-ppc.c Makefile
+ `echo $(LTCOMPILE) $(ppc_vcrypto_cflags) -c $< `
diff --git a/cipher/sha256-ppc.c b/cipher/sha256-ppc.c
new file mode 100644
index 00000000..a9b59714
--- /dev/null
+++ b/cipher/sha256-ppc.c
@@ -0,0 +1,795 @@
+/* sha256-ppc.c - PowerPC vcrypto implementation of SHA-256 transform
+ * Copyright (C) 2019 Jussi Kivilinna <jussi.kivilinna@iki.fi>
+ *
+ * This file is part of Libgcrypt.
+ *
+ * Libgcrypt is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * Libgcrypt is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this program; if not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <config.h>
+
+#if defined(ENABLE_PPC_CRYPTO_SUPPORT) && \
+ defined(HAVE_COMPATIBLE_CC_PPC_ALTIVEC) && \
+ defined(HAVE_GCC_INLINE_ASM_PPC_ALTIVEC) && \
+ defined(USE_SHA256) && \
+ __GNUC__ >= 4
+
+#include <altivec.h>
+#include "bufhelp.h"
+
+
+typedef vector unsigned char vector16x_u8;
+typedef vector unsigned int vector4x_u32;
+typedef vector unsigned long long vector2x_u64;
+
+
+#define ALWAYS_INLINE inline __attribute__((always_inline))
+#define NO_INLINE __attribute__((noinline))
+#define NO_INSTRUMENT_FUNCTION __attribute__((no_instrument_function))
+
+#define ASM_FUNC_ATTR NO_INSTRUMENT_FUNCTION
+#define ASM_FUNC_ATTR_INLINE ASM_FUNC_ATTR ALWAYS_INLINE
+#define ASM_FUNC_ATTR_NOINLINE ASM_FUNC_ATTR NO_INLINE
+
+
+static const u32 K[64] =
+ {
+#define TBL(v) v
+ TBL(0x428a2f98), TBL(0x71374491), TBL(0xb5c0fbcf), TBL(0xe9b5dba5),
+ TBL(0x3956c25b), TBL(0x59f111f1), TBL(0x923f82a4), TBL(0xab1c5ed5),
+ TBL(0xd807aa98), TBL(0x12835b01), TBL(0x243185be), TBL(0x550c7dc3),
+ TBL(0x72be5d74), TBL(0x80deb1fe), TBL(0x9bdc06a7), TBL(0xc19bf174),
+ TBL(0xe49b69c1), TBL(0xefbe4786), TBL(0x0fc19dc6), TBL(0x240ca1cc),
+ TBL(0x2de92c6f), TBL(0x4a7484aa), TBL(0x5cb0a9dc), TBL(0x76f988da),
+ TBL(0x983e5152), TBL(0xa831c66d), TBL(0xb00327c8), TBL(0xbf597fc7),
+ TBL(0xc6e00bf3), TBL(0xd5a79147), TBL(0x06ca6351), TBL(0x14292967),
+ TBL(0x27b70a85), TBL(0x2e1b2138), TBL(0x4d2c6dfc), TBL(0x53380d13),
+ TBL(0x650a7354), TBL(0x766a0abb), TBL(0x81c2c92e), TBL(0x92722c85),
+ TBL(0xa2bfe8a1), TBL(0xa81a664b), TBL(0xc24b8b70), TBL(0xc76c51a3),
+ TBL(0xd192e819), TBL(0xd6990624), TBL(0xf40e3585), TBL(0x106aa070),
+ TBL(0x19a4c116), TBL(0x1e376c08), TBL(0x2748774c), TBL(0x34b0bcb5),
+ TBL(0x391c0cb3), TBL(0x4ed8aa4a), TBL(0x5b9cca4f), TBL(0x682e6ff3),
+ TBL(0x748f82ee), TBL(0x78a5636f), TBL(0x84c87814), TBL(0x8cc70208),
+ TBL(0x90befffa), TBL(0xa4506ceb), TBL(0xbef9a3f7), TBL(0xc67178f2)
+#undef TBL
+ };
+
+
+static ASM_FUNC_ATTR_INLINE vector4x_u32
+vec_rol_elems(vector4x_u32 v, unsigned int idx)
+{
+#ifndef WORDS_BIGENDIAN
+ return vec_sld (v, v, (16 - (4 * idx)) & 15);
+#else
+ return vec_sld (v, v, (4 * idx) & 15);
+#endif
+}
+
+
+static ASM_FUNC_ATTR_INLINE vector4x_u32
+vec_merge_idx0_elems(vector4x_u32 v0, vector4x_u32 v1,
+ vector4x_u32 v2, vector4x_u32 v3)
+{
+ return (vector4x_u32)vec_mergeh ((vector2x_u64) vec_mergeh(v0, v1),
+ (vector2x_u64) vec_mergeh(v2, v3));
+}
+
+
+static ASM_FUNC_ATTR_INLINE vector4x_u32
+vec_ror_u32(vector4x_u32 v, unsigned int shift)
+{
+ return (v >> (shift & 31)) ^ (v << ((32 - shift) & 31));
+}
+
+
+static ASM_FUNC_ATTR_INLINE vector4x_u32
+vec_vshasigma_u32(vector4x_u32 v, unsigned int a, unsigned int b)
+{
+ asm ("vshasigmaw %0,%1,%2,%3"
+ : "=v" (v)
+ : "v" (v), "g" (a), "g" (b)
+ : "memory");
+ return v;
+}
+
+
+/* SHA2 round in vector registers */
+#define R(a,b,c,d,e,f,g,h,k,w) do \
+ { \
+ t1 = (h); \
+ t1 += ((k) + (w)); \
+ t1 += Cho((e),(f),(g)); \
+ t1 += Sum1((e)); \
+ t2 = Sum0((a)); \
+ t2 += Maj((a),(b),(c)); \
+ d += t1; \
+ h = t1 + t2; \
+ } while (0)
+
+#define Cho(b, c, d) (vec_sel(d, c, b))
+
+#define Maj(c, d, b) (vec_sel(c, b, c ^ d))
+
+#define Sum0(x) (vec_vshasigma_u32(x, 1, 0))
+
+#define Sum1(x) (vec_vshasigma_u32(x, 1, 15))
+
+
+/* Message expansion on general purpose registers */
+#define S0(x) (ror ((x), 7) ^ ror ((x), 18) ^ ((x) >> 3))
+#define S1(x) (ror ((x), 17) ^ ror ((x), 19) ^ ((x) >> 10))
+
+#define I(i) ( w[i] = buf_get_be32(data + i * 4) )
+#define W(i) ({ w[i&0x0f] += w[(i-7) &0x0f]; \
+ w[i&0x0f] += S0(w[(i-15)&0x0f]); \
+ w[i&0x0f] += S1(w[(i-2) &0x0f]); \
+ w[i&0x0f]; })
+
+#define I2(i) ( w2[i] = buf_get_be32(64 + data + i * 4), I(i) )
+#define W2(i) ({ w2[i] = w2[i-7]; \
+ w2[i] += S1(w2[i-2]); \
+ w2[i] += S0(w2[i-15]); \
+ w2[i] += w2[i-16]; \
+ W(i); })
+#define R2(i) ( w2[i] )
+
+
+unsigned int ASM_FUNC_ATTR
+_gcry_sha256_transform_ppc8(u32 state[8], const unsigned char *data,
+ size_t nblks)
+{
+ /* GPRs used for message expansion as vector intrinsics based generates
+ * slower code. */
+ vector4x_u32 h0, h1, h2, h3, h4, h5, h6, h7;
+ vector4x_u32 h0_h3, h4_h7;
+ vector4x_u32 a, b, c, d, e, f, g, h, t1, t2;
+ u32 w[16];
+ u32 w2[64];
+
+ h0_h3 = vec_vsx_ld (4 * 0, state);
+ h4_h7 = vec_vsx_ld (4 * 4, state);
+
+ h0 = h0_h3;
+ h1 = vec_rol_elems (h0_h3, 1);
+ h2 = vec_rol_elems (h0_h3, 2);
+ h3 = vec_rol_elems (h0_h3, 3);
+ h4 = h4_h7;
+ h5 = vec_rol_elems (h4_h7, 1);
+ h6 = vec_rol_elems (h4_h7, 2);
+ h7 = vec_rol_elems (h4_h7, 3);
+
+ while (nblks >= 2)
+ {
+ a = h0;
+ b = h1;
+ c = h2;
+ d = h3;
+ e = h4;
+ f = h5;
+ g = h6;
+ h = h7;
+
+ R(a, b, c, d, e, f, g, h, K[0], I2(0));
+ R(h, a, b, c, d, e, f, g, K[1], I2(1));
+ R(g, h, a, b, c, d, e, f, K[2], I2(2));
+ R(f, g, h, a, b, c, d, e, K[3], I2(3));
+ R(e, f, g, h, a, b, c, d, K[4], I2(4));
+ R(d, e, f, g, h, a, b, c, K[5], I2(5));
+ R(c, d, e, f, g, h, a, b, K[6], I2(6));
+ R(b, c, d, e, f, g, h, a, K[7], I2(7));
+ R(a, b, c, d, e, f, g, h, K[8], I2(8));
+ R(h, a, b, c, d, e, f, g, K[9], I2(9));
+ R(g, h, a, b, c, d, e, f, K[10], I2(10));
+ R(f, g, h, a, b, c, d, e, K[11], I2(11));
+ R(e, f, g, h, a, b, c, d, K[12], I2(12));
+ R(d, e, f, g, h, a, b, c, K[13], I2(13));
+ R(c, d, e, f, g, h, a, b, K[14], I2(14));
+ R(b, c, d, e, f, g, h, a, K[15], I2(15));
+ data += 64 * 2;
+
+ R(a, b, c, d, e, f, g, h, K[16], W2(16));
+ R(h, a, b, c, d, e, f, g, K[17], W2(17));
+ R(g, h, a, b, c, d, e, f, K[18], W2(18));
+ R(f, g, h, a, b, c, d, e, K[19], W2(19));
+ R(e, f, g, h, a, b, c, d, K[20], W2(20));
+ R(d, e, f, g, h, a, b, c, K[21], W2(21));
+ R(c, d, e, f, g, h, a, b, K[22], W2(22));
+ R(b, c, d, e, f, g, h, a, K[23], W2(23));
+ R(a, b, c, d, e, f, g, h, K[24], W2(24));
+ R(h, a, b, c, d, e, f, g, K[25], W2(25));
+ R(g, h, a, b, c, d, e, f, K[26], W2(26));
+ R(f, g, h, a, b, c, d, e, K[27], W2(27));
+ R(e, f, g, h, a, b, c, d, K[28], W2(28));
+ R(d, e, f, g, h, a, b, c, K[29], W2(29));
+ R(c, d, e, f, g, h, a, b, K[30], W2(30));
+ R(b, c, d, e, f, g, h, a, K[31], W2(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W2(32));
+ R(h, a, b, c, d, e, f, g, K[33], W2(33));
+ R(g, h, a, b, c, d, e, f, K[34], W2(34));
+ R(f, g, h, a, b, c, d, e, K[35], W2(35));
+ R(e, f, g, h, a, b, c, d, K[36], W2(36));
+ R(d, e, f, g, h, a, b, c, K[37], W2(37));
+ R(c, d, e, f, g, h, a, b, K[38], W2(38));
+ R(b, c, d, e, f, g, h, a, K[39], W2(39));
+ R(a, b, c, d, e, f, g, h, K[40], W2(40));
+ R(h, a, b, c, d, e, f, g, K[41], W2(41));
+ R(g, h, a, b, c, d, e, f, K[42], W2(42));
+ R(f, g, h, a, b, c, d, e, K[43], W2(43));
+ R(e, f, g, h, a, b, c, d, K[44], W2(44));
+ R(d, e, f, g, h, a, b, c, K[45], W2(45));
+ R(c, d, e, f, g, h, a, b, K[46], W2(46));
+ R(b, c, d, e, f, g, h, a, K[47], W2(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], W2(48));
+ R(h, a, b, c, d, e, f, g, K[49], W2(49));
+ R(g, h, a, b, c, d, e, f, K[50], W2(50));
+ R(f, g, h, a, b, c, d, e, K[51], W2(51));
+ R(e, f, g, h, a, b, c, d, K[52], W2(52));
+ R(d, e, f, g, h, a, b, c, K[53], W2(53));
+ R(c, d, e, f, g, h, a, b, K[54], W2(54));
+ R(b, c, d, e, f, g, h, a, K[55], W2(55));
+ R(a, b, c, d, e, f, g, h, K[56], W2(56));
+ R(h, a, b, c, d, e, f, g, K[57], W2(57));
+ R(g, h, a, b, c, d, e, f, K[58], W2(58));
+ R(f, g, h, a, b, c, d, e, K[59], W2(59));
+ R(e, f, g, h, a, b, c, d, K[60], W2(60));
+ R(d, e, f, g, h, a, b, c, K[61], W2(61));
+ R(c, d, e, f, g, h, a, b, K[62], W2(62));
+ R(b, c, d, e, f, g, h, a, K[63], W2(63));
+
+ h0 += a;
+ h1 += b;
+ h2 += c;
+ h3 += d;
+ h4 += e;
+ h5 += f;
+ h6 += g;
+ h7 += h;
+
+ a = h0;
+ b = h1;
+ c = h2;
+ d = h3;
+ e = h4;
+ f = h5;
+ g = h6;
+ h = h7;
+
+ R(a, b, c, d, e, f, g, h, K[0], R2(0));
+ R(h, a, b, c, d, e, f, g, K[1], R2(1));
+ R(g, h, a, b, c, d, e, f, K[2], R2(2));
+ R(f, g, h, a, b, c, d, e, K[3], R2(3));
+ R(e, f, g, h, a, b, c, d, K[4], R2(4));
+ R(d, e, f, g, h, a, b, c, K[5], R2(5));
+ R(c, d, e, f, g, h, a, b, K[6], R2(6));
+ R(b, c, d, e, f, g, h, a, K[7], R2(7));
+ R(a, b, c, d, e, f, g, h, K[8], R2(8));
+ R(h, a, b, c, d, e, f, g, K[9], R2(9));
+ R(g, h, a, b, c, d, e, f, K[10], R2(10));
+ R(f, g, h, a, b, c, d, e, K[11], R2(11));
+ R(e, f, g, h, a, b, c, d, K[12], R2(12));
+ R(d, e, f, g, h, a, b, c, K[13], R2(13));
+ R(c, d, e, f, g, h, a, b, K[14], R2(14));
+ R(b, c, d, e, f, g, h, a, K[15], R2(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], R2(16));
+ R(h, a, b, c, d, e, f, g, K[17], R2(17));
+ R(g, h, a, b, c, d, e, f, K[18], R2(18));
+ R(f, g, h, a, b, c, d, e, K[19], R2(19));
+ R(e, f, g, h, a, b, c, d, K[20], R2(20));
+ R(d, e, f, g, h, a, b, c, K[21], R2(21));
+ R(c, d, e, f, g, h, a, b, K[22], R2(22));
+ R(b, c, d, e, f, g, h, a, K[23], R2(23));
+ R(a, b, c, d, e, f, g, h, K[24], R2(24));
+ R(h, a, b, c, d, e, f, g, K[25], R2(25));
+ R(g, h, a, b, c, d, e, f, K[26], R2(26));
+ R(f, g, h, a, b, c, d, e, K[27], R2(27));
+ R(e, f, g, h, a, b, c, d, K[28], R2(28));
+ R(d, e, f, g, h, a, b, c, K[29], R2(29));
+ R(c, d, e, f, g, h, a, b, K[30], R2(30));
+ R(b, c, d, e, f, g, h, a, K[31], R2(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], R2(32));
+ R(h, a, b, c, d, e, f, g, K[33], R2(33));
+ R(g, h, a, b, c, d, e, f, K[34], R2(34));
+ R(f, g, h, a, b, c, d, e, K[35], R2(35));
+ R(e, f, g, h, a, b, c, d, K[36], R2(36));
+ R(d, e, f, g, h, a, b, c, K[37], R2(37));
+ R(c, d, e, f, g, h, a, b, K[38], R2(38));
+ R(b, c, d, e, f, g, h, a, K[39], R2(39));
+ R(a, b, c, d, e, f, g, h, K[40], R2(40));
+ R(h, a, b, c, d, e, f, g, K[41], R2(41));
+ R(g, h, a, b, c, d, e, f, K[42], R2(42));
+ R(f, g, h, a, b, c, d, e, K[43], R2(43));
+ R(e, f, g, h, a, b, c, d, K[44], R2(44));
+ R(d, e, f, g, h, a, b, c, K[45], R2(45));
+ R(c, d, e, f, g, h, a, b, K[46], R2(46));
+ R(b, c, d, e, f, g, h, a, K[47], R2(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], R2(48));
+ R(h, a, b, c, d, e, f, g, K[49], R2(49));
+ R(g, h, a, b, c, d, e, f, K[50], R2(50));
+ R(f, g, h, a, b, c, d, e, K[51], R2(51));
+ R(e, f, g, h, a, b, c, d, K[52], R2(52));
+ R(d, e, f, g, h, a, b, c, K[53], R2(53));
+ R(c, d, e, f, g, h, a, b, K[54], R2(54));
+ R(b, c, d, e, f, g, h, a, K[55], R2(55));
+ R(a, b, c, d, e, f, g, h, K[56], R2(56));
+ R(h, a, b, c, d, e, f, g, K[57], R2(57));
+ R(g, h, a, b, c, d, e, f, K[58], R2(58));
+ R(f, g, h, a, b, c, d, e, K[59], R2(59));
+ R(e, f, g, h, a, b, c, d, K[60], R2(60));
+ R(d, e, f, g, h, a, b, c, K[61], R2(61));
+ R(c, d, e, f, g, h, a, b, K[62], R2(62));
+ R(b, c, d, e, f, g, h, a, K[63], R2(63));
+
+ h0 += a;
+ h1 += b;
+ h2 += c;
+ h3 += d;
+ h4 += e;
+ h5 += f;
+ h6 += g;
+ h7 += h;
+
+ nblks -= 2;
+ }
+
+ while (nblks)
+ {
+ a = h0;
+ b = h1;
+ c = h2;
+ d = h3;
+ e = h4;
+ f = h5;
+ g = h6;
+ h = h7;
+
+ R(a, b, c, d, e, f, g, h, K[0], I(0));
+ R(h, a, b, c, d, e, f, g, K[1], I(1));
+ R(g, h, a, b, c, d, e, f, K[2], I(2));
+ R(f, g, h, a, b, c, d, e, K[3], I(3));
+ R(e, f, g, h, a, b, c, d, K[4], I(4));
+ R(d, e, f, g, h, a, b, c, K[5], I(5));
+ R(c, d, e, f, g, h, a, b, K[6], I(6));
+ R(b, c, d, e, f, g, h, a, K[7], I(7));
+ R(a, b, c, d, e, f, g, h, K[8], I(8));
+ R(h, a, b, c, d, e, f, g, K[9], I(9));
+ R(g, h, a, b, c, d, e, f, K[10], I(10));
+ R(f, g, h, a, b, c, d, e, K[11], I(11));
+ R(e, f, g, h, a, b, c, d, K[12], I(12));
+ R(d, e, f, g, h, a, b, c, K[13], I(13));
+ R(c, d, e, f, g, h, a, b, K[14], I(14));
+ R(b, c, d, e, f, g, h, a, K[15], I(15));
+ data += 64;
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], W(48));
+ R(h, a, b, c, d, e, f, g, K[49], W(49));
+ R(g, h, a, b, c, d, e, f, K[50], W(50));
+ R(f, g, h, a, b, c, d, e, K[51], W(51));
+ R(e, f, g, h, a, b, c, d, K[52], W(52));
+ R(d, e, f, g, h, a, b, c, K[53], W(53));
+ R(c, d, e, f, g, h, a, b, K[54], W(54));
+ R(b, c, d, e, f, g, h, a, K[55], W(55));
+ R(a, b, c, d, e, f, g, h, K[56], W(56));
+ R(h, a, b, c, d, e, f, g, K[57], W(57));
+ R(g, h, a, b, c, d, e, f, K[58], W(58));
+ R(f, g, h, a, b, c, d, e, K[59], W(59));
+ R(e, f, g, h, a, b, c, d, K[60], W(60));
+ R(d, e, f, g, h, a, b, c, K[61], W(61));
+ R(c, d, e, f, g, h, a, b, K[62], W(62));
+ R(b, c, d, e, f, g, h, a, K[63], W(63));
+
+ h0 += a;
+ h1 += b;
+ h2 += c;
+ h3 += d;
+ h4 += e;
+ h5 += f;
+ h6 += g;
+ h7 += h;
+
+ nblks--;
+ }
+
+ h0_h3 = vec_merge_idx0_elems (h0, h1, h2, h3);
+ h4_h7 = vec_merge_idx0_elems (h4, h5, h6, h7);
+ vec_vsx_st (h0_h3, 4 * 0, state);
+ vec_vsx_st (h4_h7, 4 * 4, state);
+
+ return sizeof(w2) + sizeof(w);
+}
+#undef R
+#undef Cho
+#undef Maj
+#undef Sum0
+#undef Sum1
+#undef S0
+#undef S1
+#undef I
+#undef W
+#undef I2
+#undef W2
+#undef R2
+
+
+/* SHA2 round in general purpose registers */
+#define R(a,b,c,d,e,f,g,h,k,w) do \
+ { \
+ t1 = (h) + Sum1((e)) + Cho((e),(f),(g)) + ((k) + (w));\
+ t2 = Sum0((a)) + Maj((a),(b),(c)); \
+ d += t1; \
+ h = t1 + t2; \
+ } while (0)
+
+#define Cho(x, y, z) ((x & y) + (~x & z))
+
+#define Maj(z, x, y) ((x & y) + (z & (x ^ y)))
+
+#define Sum0(x) (ror (x, 2) ^ ror (x ^ ror (x, 22-13), 13))
+
+#define Sum1(x) (ror (x, 6) ^ ror (x, 11) ^ ror (x, 25))
+
+
+/* Message expansion on general purpose registers */
+#define S0(x) (ror ((x), 7) ^ ror ((x), 18) ^ ((x) >> 3))
+#define S1(x) (ror ((x), 17) ^ ror ((x), 19) ^ ((x) >> 10))
+
+#define I(i) ( w[i] = buf_get_be32(data + i * 4) )
+#define WN(i) ({ w[i&0x0f] += w[(i-7) &0x0f]; \
+ w[i&0x0f] += S0(w[(i-15)&0x0f]); \
+ w[i&0x0f] += S1(w[(i-2) &0x0f]); \
+ w[i&0x0f]; })
+#define W(i) ({ u32 r = w[i&0x0f]; WN(i); r; })
+#define L(i) w[i&0x0f]
+
+
+unsigned int ASM_FUNC_ATTR
+_gcry_sha256_transform_ppc9(u32 state[8], const unsigned char *data,
+ size_t nblks)
+{
+ /* GPRs used for round function and message expansion as vector intrinsics
+ * based generates slower code for POWER9. */
+ u32 a, b, c, d, e, f, g, h, t1, t2;
+ u32 w[16];
+
+ a = state[0];
+ b = state[1];
+ c = state[2];
+ d = state[3];
+ e = state[4];
+ f = state[5];
+ g = state[6];
+ h = state[7];
+
+ while (nblks >= 2)
+ {
+ I(0); I(1); I(2); I(3);
+ I(4); I(5); I(6); I(7);
+ I(8); I(9); I(10); I(11);
+ I(12); I(13); I(14); I(15);
+ data += 64;
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], L(48));
+ R(h, a, b, c, d, e, f, g, K[49], L(49));
+ R(g, h, a, b, c, d, e, f, K[50], L(50));
+ R(f, g, h, a, b, c, d, e, K[51], L(51));
+ I(0); I(1); I(2); I(3);
+ R(e, f, g, h, a, b, c, d, K[52], L(52));
+ R(d, e, f, g, h, a, b, c, K[53], L(53));
+ R(c, d, e, f, g, h, a, b, K[54], L(54));
+ R(b, c, d, e, f, g, h, a, K[55], L(55));
+ I(4); I(5); I(6); I(7);
+ R(a, b, c, d, e, f, g, h, K[56], L(56));
+ R(h, a, b, c, d, e, f, g, K[57], L(57));
+ R(g, h, a, b, c, d, e, f, K[58], L(58));
+ R(f, g, h, a, b, c, d, e, K[59], L(59));
+ I(8); I(9); I(10); I(11);
+ R(e, f, g, h, a, b, c, d, K[60], L(60));
+ R(d, e, f, g, h, a, b, c, K[61], L(61));
+ R(c, d, e, f, g, h, a, b, K[62], L(62));
+ R(b, c, d, e, f, g, h, a, K[63], L(63));
+ I(12); I(13); I(14); I(15);
+ data += 64;
+
+ a += state[0];
+ b += state[1];
+ c += state[2];
+ d += state[3];
+ e += state[4];
+ f += state[5];
+ g += state[6];
+ h += state[7];
+ state[0] = a;
+ state[1] = b;
+ state[2] = c;
+ state[3] = d;
+ state[4] = e;
+ state[5] = f;
+ state[6] = g;
+ state[7] = h;
+
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], L(48));
+ R(h, a, b, c, d, e, f, g, K[49], L(49));
+ R(g, h, a, b, c, d, e, f, K[50], L(50));
+ R(f, g, h, a, b, c, d, e, K[51], L(51));
+ R(e, f, g, h, a, b, c, d, K[52], L(52));
+ R(d, e, f, g, h, a, b, c, K[53], L(53));
+ R(c, d, e, f, g, h, a, b, K[54], L(54));
+ R(b, c, d, e, f, g, h, a, K[55], L(55));
+ R(a, b, c, d, e, f, g, h, K[56], L(56));
+ R(h, a, b, c, d, e, f, g, K[57], L(57));
+ R(g, h, a, b, c, d, e, f, K[58], L(58));
+ R(f, g, h, a, b, c, d, e, K[59], L(59));
+ R(e, f, g, h, a, b, c, d, K[60], L(60));
+ R(d, e, f, g, h, a, b, c, K[61], L(61));
+ R(c, d, e, f, g, h, a, b, K[62], L(62));
+ R(b, c, d, e, f, g, h, a, K[63], L(63));
+
+ a += state[0];
+ b += state[1];
+ c += state[2];
+ d += state[3];
+ e += state[4];
+ f += state[5];
+ g += state[6];
+ h += state[7];
+ state[0] = a;
+ state[1] = b;
+ state[2] = c;
+ state[3] = d;
+ state[4] = e;
+ state[5] = f;
+ state[6] = g;
+ state[7] = h;
+
+ nblks -= 2;
+ }
+
+ while (nblks)
+ {
+ I(0); I(1); I(2); I(3);
+ I(4); I(5); I(6); I(7);
+ I(8); I(9); I(10); I(11);
+ I(12); I(13); I(14); I(15);
+ data += 64;
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], L(48));
+ R(h, a, b, c, d, e, f, g, K[49], L(49));
+ R(g, h, a, b, c, d, e, f, K[50], L(50));
+ R(f, g, h, a, b, c, d, e, K[51], L(51));
+ R(e, f, g, h, a, b, c, d, K[52], L(52));
+ R(d, e, f, g, h, a, b, c, K[53], L(53));
+ R(c, d, e, f, g, h, a, b, K[54], L(54));
+ R(b, c, d, e, f, g, h, a, K[55], L(55));
+ R(a, b, c, d, e, f, g, h, K[56], L(56));
+ R(h, a, b, c, d, e, f, g, K[57], L(57));
+ R(g, h, a, b, c, d, e, f, K[58], L(58));
+ R(f, g, h, a, b, c, d, e, K[59], L(59));
+ R(e, f, g, h, a, b, c, d, K[60], L(60));
+ R(d, e, f, g, h, a, b, c, K[61], L(61));
+ R(c, d, e, f, g, h, a, b, K[62], L(62));
+ R(b, c, d, e, f, g, h, a, K[63], L(63));
+
+ a += state[0];
+ b += state[1];
+ c += state[2];
+ d += state[3];
+ e += state[4];
+ f += state[5];
+ g += state[6];
+ h += state[7];
+ state[0] = a;
+ state[1] = b;
+ state[2] = c;
+ state[3] = d;
+ state[4] = e;
+ state[5] = f;
+ state[6] = g;
+ state[7] = h;
+
+ nblks--;
+ }
+
+ return sizeof(w);
+}
+
+#endif /* ENABLE_PPC_CRYPTO_SUPPORT */
diff --git a/cipher/sha256.c b/cipher/sha256.c
index d174321d..6d49b6c2 100644
--- a/cipher/sha256.c
+++ b/cipher/sha256.c
@@ -90,6 +90,18 @@
# endif
#endif
+/* USE_PPC_CRYPTO indicates whether to enable PowerPC vector crypto
+ * accelerated code. */
+#undef USE_PPC_CRYPTO
+#ifdef ENABLE_PPC_CRYPTO_SUPPORT
+# if defined(HAVE_COMPATIBLE_CC_PPC_ALTIVEC) && \
+ defined(HAVE_GCC_INLINE_ASM_PPC_ALTIVEC)
+# if __GNUC__ >= 4
+# define USE_PPC_CRYPTO 1
+# endif
+# endif
+#endif
+
typedef struct {
gcry_md_block_ctx_t bctx;
@@ -108,28 +120,41 @@ typedef struct {
#endif
} SHA256_CONTEXT;
+#ifdef USE_PPC_CRYPTO
+unsigned int _gcry_sha256_transform_ppc8(u32 state[8],
+ const unsigned char *input_data,
+ size_t num_blks);
+
+unsigned int _gcry_sha256_transform_ppc9(u32 state[8],
+ const unsigned char *input_data,
+ size_t num_blks);
+
+static unsigned int
+do_sha256_transform_ppc8(void *ctx, const unsigned char *data, size_t nblks)
+{
+ SHA256_CONTEXT *hd = ctx;
+ return _gcry_sha256_transform_ppc8 (&hd->h0, data, nblks);
+}
+
+static unsigned int
+do_sha256_transform_ppc9(void *ctx, const unsigned char *data, size_t nblks)
+{
+ SHA256_CONTEXT *hd = ctx;
+ return _gcry_sha256_transform_ppc9 (&hd->h0, data, nblks);
+}
+#endif
+
+
static unsigned int
transform (void *c, const unsigned char *data, size_t nblks);
static void
-sha256_init (void *context, unsigned int flags)
+sha256_common_init (SHA256_CONTEXT *hd)
{
- SHA256_CONTEXT *hd = context;
unsigned int features = _gcry_get_hw_features ();
- (void)flags;
-
- hd->h0 = 0x6a09e667;
- hd->h1 = 0xbb67ae85;
- hd->h2 = 0x3c6ef372;
- hd->h3 = 0xa54ff53a;
- hd->h4 = 0x510e527f;
- hd->h5 = 0x9b05688c;
- hd->h6 = 0x1f83d9ab;
- hd->h7 = 0x5be0cd19;
-
hd->bctx.nblocks = 0;
hd->bctx.nblocks_high = 0;
hd->bctx.count = 0;
@@ -149,16 +174,41 @@ sha256_init (void *context, unsigned int flags)
#endif
#ifdef USE_ARM_CE
hd->use_arm_ce = (features & HWF_ARM_SHA2) != 0;
+#endif
+#ifdef USE_PPC_CRYPTO
+ if ((features & HWF_PPC_VCRYPTO) != 0)
+ hd->bctx.bwrite = do_sha256_transform_ppc8;
+ if ((features & HWF_PPC_VCRYPTO) != 0 && (features & HWF_PPC_ARCH_3_00) != 0)
+ hd->bctx.bwrite = do_sha256_transform_ppc9;
#endif
(void)features;
}
+static void
+sha256_init (void *context, unsigned int flags)
+{
+ SHA256_CONTEXT *hd = context;
+
+ (void)flags;
+
+ hd->h0 = 0x6a09e667;
+ hd->h1 = 0xbb67ae85;
+ hd->h2 = 0x3c6ef372;
+ hd->h3 = 0xa54ff53a;
+ hd->h4 = 0x510e527f;
+ hd->h5 = 0x9b05688c;
+ hd->h6 = 0x1f83d9ab;
+ hd->h7 = 0x5be0cd19;
+
+ sha256_common_init (hd);
+}
+
+
static void
sha224_init (void *context, unsigned int flags)
{
SHA256_CONTEXT *hd = context;
- unsigned int features = _gcry_get_hw_features ();
(void)flags;
@@ -171,27 +221,7 @@ sha224_init (void *context, unsigned int flags)
hd->h6 = 0x64f98fa7;
hd->h7 = 0xbefa4fa4;
- hd->bctx.nblocks = 0;
- hd->bctx.nblocks_high = 0;
- hd->bctx.count = 0;
- hd->bctx.blocksize = 64;
- hd->bctx.bwrite = transform;
-
-#ifdef USE_SSSE3
- hd->use_ssse3 = (features & HWF_INTEL_SSSE3) != 0;
-#endif
-#ifdef USE_AVX
- /* AVX implementation uses SHLD which is known to be slow on non-Intel CPUs.
- * Therefore use this implementation on Intel CPUs only. */
- hd->use_avx = (features & HWF_INTEL_AVX) && (features & HWF_INTEL_FAST_SHLD);
-#endif
-#ifdef USE_AVX2
- hd->use_avx2 = (features & HWF_INTEL_AVX2) && (features & HWF_INTEL_BMI2);
-#endif
-#ifdef USE_ARM_CE
- hd->use_arm_ce = (features & HWF_ARM_SHA2) != 0;
-#endif
- (void)features;
+ sha256_common_init (hd);
}
diff --git a/cipher/sha512-ppc.c b/cipher/sha512-ppc.c
new file mode 100644
index 00000000..a758e1ea
--- /dev/null
+++ b/cipher/sha512-ppc.c
@@ -0,0 +1,921 @@
+/* sha512-ppc.c - PowerPC vcrypto implementation of SHA-512 transform
+ * Copyright (C) 2019 Jussi Kivilinna <jussi.kivilinna@iki.fi>
+ *
+ * This file is part of Libgcrypt.
+ *
+ * Libgcrypt is free software; you can redistribute it and/or modify
+ * it under the terms of the GNU Lesser General Public License as
+ * published by the Free Software Foundation; either version 2.1 of
+ * the License, or (at your option) any later version.
+ *
+ * Libgcrypt is distributed in the hope that it will be useful,
+ * but WITHOUT ANY WARRANTY; without even the implied warranty of
+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
+ * GNU Lesser General Public License for more details.
+ *
+ * You should have received a copy of the GNU Lesser General Public
+ * License along with this program; if not, see <http://www.gnu.org/licenses/>.
+ */
+
+#include <config.h>
+
+#if defined(ENABLE_PPC_CRYPTO_SUPPORT) && \
+ defined(HAVE_COMPATIBLE_CC_PPC_ALTIVEC) && \
+ defined(HAVE_GCC_INLINE_ASM_PPC_ALTIVEC) && \
+ defined(USE_SHA512) && \
+ __GNUC__ >= 4
+
+#include <altivec.h>
+#include "bufhelp.h"
+
+
+typedef vector unsigned char vector16x_u8;
+typedef vector unsigned long long vector2x_u64;
+
+
+#define ALWAYS_INLINE inline __attribute__((always_inline))
+#define NO_INLINE __attribute__((noinline))
+#define NO_INSTRUMENT_FUNCTION __attribute__((no_instrument_function))
+
+#define ASM_FUNC_ATTR NO_INSTRUMENT_FUNCTION
+#define ASM_FUNC_ATTR_INLINE ASM_FUNC_ATTR ALWAYS_INLINE
+#define ASM_FUNC_ATTR_NOINLINE ASM_FUNC_ATTR NO_INLINE
+
+
+static const u64 K[80] =
+ {
+ U64_C(0x428a2f98d728ae22), U64_C(0x7137449123ef65cd),
+ U64_C(0xb5c0fbcfec4d3b2f), U64_C(0xe9b5dba58189dbbc),
+ U64_C(0x3956c25bf348b538), U64_C(0x59f111f1b605d019),
+ U64_C(0x923f82a4af194f9b), U64_C(0xab1c5ed5da6d8118),
+ U64_C(0xd807aa98a3030242), U64_C(0x12835b0145706fbe),
+ U64_C(0x243185be4ee4b28c), U64_C(0x550c7dc3d5ffb4e2),
+ U64_C(0x72be5d74f27b896f), U64_C(0x80deb1fe3b1696b1),
+ U64_C(0x9bdc06a725c71235), U64_C(0xc19bf174cf692694),
+ U64_C(0xe49b69c19ef14ad2), U64_C(0xefbe4786384f25e3),
+ U64_C(0x0fc19dc68b8cd5b5), U64_C(0x240ca1cc77ac9c65),
+ U64_C(0x2de92c6f592b0275), U64_C(0x4a7484aa6ea6e483),
+ U64_C(0x5cb0a9dcbd41fbd4), U64_C(0x76f988da831153b5),
+ U64_C(0x983e5152ee66dfab), U64_C(0xa831c66d2db43210),
+ U64_C(0xb00327c898fb213f), U64_C(0xbf597fc7beef0ee4),
+ U64_C(0xc6e00bf33da88fc2), U64_C(0xd5a79147930aa725),
+ U64_C(0x06ca6351e003826f), U64_C(0x142929670a0e6e70),
+ U64_C(0x27b70a8546d22ffc), U64_C(0x2e1b21385c26c926),
+ U64_C(0x4d2c6dfc5ac42aed), U64_C(0x53380d139d95b3df),
+ U64_C(0x650a73548baf63de), U64_C(0x766a0abb3c77b2a8),
+ U64_C(0x81c2c92e47edaee6), U64_C(0x92722c851482353b),
+ U64_C(0xa2bfe8a14cf10364), U64_C(0xa81a664bbc423001),
+ U64_C(0xc24b8b70d0f89791), U64_C(0xc76c51a30654be30),
+ U64_C(0xd192e819d6ef5218), U64_C(0xd69906245565a910),
+ U64_C(0xf40e35855771202a), U64_C(0x106aa07032bbd1b8),
+ U64_C(0x19a4c116b8d2d0c8), U64_C(0x1e376c085141ab53),
+ U64_C(0x2748774cdf8eeb99), U64_C(0x34b0bcb5e19b48a8),
+ U64_C(0x391c0cb3c5c95a63), U64_C(0x4ed8aa4ae3418acb),
+ U64_C(0x5b9cca4f7763e373), U64_C(0x682e6ff3d6b2b8a3),
+ U64_C(0x748f82ee5defb2fc), U64_C(0x78a5636f43172f60),
+ U64_C(0x84c87814a1f0ab72), U64_C(0x8cc702081a6439ec),
+ U64_C(0x90befffa23631e28), U64_C(0xa4506cebde82bde9),
+ U64_C(0xbef9a3f7b2c67915), U64_C(0xc67178f2e372532b),
+ U64_C(0xca273eceea26619c), U64_C(0xd186b8c721c0c207),
+ U64_C(0xeada7dd6cde0eb1e), U64_C(0xf57d4f7fee6ed178),
+ U64_C(0x06f067aa72176fba), U64_C(0x0a637dc5a2c898a6),
+ U64_C(0x113f9804bef90dae), U64_C(0x1b710b35131c471b),
+ U64_C(0x28db77f523047d84), U64_C(0x32caab7b40c72493),
+ U64_C(0x3c9ebe0a15c9bebc), U64_C(0x431d67c49c100d4c),
+ U64_C(0x4cc5d4becb3e42b6), U64_C(0x597f299cfc657e2a),
+ U64_C(0x5fcb6fab3ad6faec), U64_C(0x6c44198c4a475817)
+ };
+
+
+static ASM_FUNC_ATTR_INLINE u64
+ror64 (u64 v, u64 shift)
+{
+ return (v >> (shift & 63)) ^ (v << ((64 - shift) & 63));
+}
+
+
+static ASM_FUNC_ATTR_INLINE vector2x_u64
+vec_rol_elems(vector2x_u64 v, unsigned int idx)
+{
+#ifndef WORDS_BIGENDIAN
+ return vec_sld (v, v, (16 - (8 * idx)) & 15);
+#else
+ return vec_sld (v, v, (8 * idx) & 15);
+#endif
+}
+
+
+static ASM_FUNC_ATTR_INLINE vector2x_u64
+vec_merge_idx0_elems(vector2x_u64 v0, vector2x_u64 v1)
+{
+ return vec_mergeh (v0, v1);
+}
+
+
+static ASM_FUNC_ATTR_INLINE vector2x_u64
+vec_vshasigma_u64(vector2x_u64 v, unsigned int a, unsigned int b)
+{
+ asm ("vshasigmad %0,%1,%2,%3"
+ : "=v" (v)
+ : "v" (v), "g" (a), "g" (b)
+ : "memory");
+ return v;
+}
+
+
+/* SHA2 round in vector registers */
+#define R(a,b,c,d,e,f,g,h,k,w) do \
+ { \
+ t1 = (h); \
+ t1 += ((k) + (w)); \
+ t1 += Cho((e),(f),(g)); \
+ t1 += Sum1((e)); \
+ t2 = Sum0((a)); \
+ t2 += Maj((a),(b),(c)); \
+ d += t1; \
+ h = t1 + t2; \
+ } while (0)
+
+#define Cho(b, c, d) (vec_sel(d, c, b))
+
+#define Maj(c, d, b) (vec_sel(c, b, c ^ d))
+
+#define Sum0(x) (vec_vshasigma_u64(x, 1, 0))
+
+#define Sum1(x) (vec_vshasigma_u64(x, 1, 15))
+
+
+/* Message expansion on general purpose registers */
+#define S0(x) (ror64 ((x), 1) ^ ror64 ((x), 8) ^ ((x) >> 7))
+#define S1(x) (ror64 ((x), 19) ^ ror64 ((x), 61) ^ ((x) >> 6))
+
+#define I(i) ( w[i] = buf_get_be64(data + i * 8) )
+#define WN(i) ({ w[i&0x0f] += w[(i-7) &0x0f]; \
+ w[i&0x0f] += S0(w[(i-15)&0x0f]); \
+ w[i&0x0f] += S1(w[(i-2) &0x0f]); \
+ w[i&0x0f]; })
+#define W(i) ({ u64 r = w[i&0x0f]; WN(i); r; })
+#define L(i) w[i&0x0f]
+
+
+unsigned int ASM_FUNC_ATTR
+_gcry_sha512_transform_ppc8(u64 state[8],
+ const unsigned char *data, size_t nblks)
+{
+ /* GPRs used for message expansion as vector intrinsics based generates
+ * slower code. */
+ vector2x_u64 h0, h1, h2, h3, h4, h5, h6, h7;
+ vector2x_u64 a, b, c, d, e, f, g, h, t1, t2;
+ u64 w[16];
+
+ h0 = vec_vsx_ld (8 * 0, (unsigned long long *)state);
+ h1 = vec_rol_elems (h0, 1);
+ h2 = vec_vsx_ld (8 * 2, (unsigned long long *)state);
+ h3 = vec_rol_elems (h2, 1);
+ h4 = vec_vsx_ld (8 * 4, (unsigned long long *)state);
+ h5 = vec_rol_elems (h4, 1);
+ h6 = vec_vsx_ld (8 * 6, (unsigned long long *)state);
+ h7 = vec_rol_elems (h6, 1);
+
+ while (nblks >= 2)
+ {
+ a = h0;
+ b = h1;
+ c = h2;
+ d = h3;
+ e = h4;
+ f = h5;
+ g = h6;
+ h = h7;
+
+ I(0); I(1); I(2); I(3);
+ I(4); I(5); I(6); I(7);
+ I(8); I(9); I(10); I(11);
+ I(12); I(13); I(14); I(15);
+ data += 128;
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], W(48));
+ R(h, a, b, c, d, e, f, g, K[49], W(49));
+ R(g, h, a, b, c, d, e, f, K[50], W(50));
+ R(f, g, h, a, b, c, d, e, K[51], W(51));
+ R(e, f, g, h, a, b, c, d, K[52], W(52));
+ R(d, e, f, g, h, a, b, c, K[53], W(53));
+ R(c, d, e, f, g, h, a, b, K[54], W(54));
+ R(b, c, d, e, f, g, h, a, K[55], W(55));
+ R(a, b, c, d, e, f, g, h, K[56], W(56));
+ R(h, a, b, c, d, e, f, g, K[57], W(57));
+ R(g, h, a, b, c, d, e, f, K[58], W(58));
+ R(f, g, h, a, b, c, d, e, K[59], W(59));
+ R(e, f, g, h, a, b, c, d, K[60], W(60));
+ R(d, e, f, g, h, a, b, c, K[61], W(61));
+ R(c, d, e, f, g, h, a, b, K[62], W(62));
+ R(b, c, d, e, f, g, h, a, K[63], W(63));
+
+ R(a, b, c, d, e, f, g, h, K[64], L(64));
+ R(h, a, b, c, d, e, f, g, K[65], L(65));
+ R(g, h, a, b, c, d, e, f, K[66], L(66));
+ R(f, g, h, a, b, c, d, e, K[67], L(67));
+ I(0); I(1); I(2); I(3);
+ R(e, f, g, h, a, b, c, d, K[68], L(68));
+ R(d, e, f, g, h, a, b, c, K[69], L(69));
+ R(c, d, e, f, g, h, a, b, K[70], L(70));
+ R(b, c, d, e, f, g, h, a, K[71], L(71));
+ I(4); I(5); I(6); I(7);
+ R(a, b, c, d, e, f, g, h, K[72], L(72));
+ R(h, a, b, c, d, e, f, g, K[73], L(73));
+ R(g, h, a, b, c, d, e, f, K[74], L(74));
+ R(f, g, h, a, b, c, d, e, K[75], L(75));
+ I(8); I(9); I(10); I(11);
+ R(e, f, g, h, a, b, c, d, K[76], L(76));
+ R(d, e, f, g, h, a, b, c, K[77], L(77));
+ R(c, d, e, f, g, h, a, b, K[78], L(78));
+ R(b, c, d, e, f, g, h, a, K[79], L(79));
+ I(12); I(13); I(14); I(15);
+ data += 128;
+
+ h0 += a;
+ h1 += b;
+ h2 += c;
+ h3 += d;
+ h4 += e;
+ h5 += f;
+ h6 += g;
+ h7 += h;
+ a = h0;
+ b = h1;
+ c = h2;
+ d = h3;
+ e = h4;
+ f = h5;
+ g = h6;
+ h = h7;
+
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], W(48));
+ R(h, a, b, c, d, e, f, g, K[49], W(49));
+ R(g, h, a, b, c, d, e, f, K[50], W(50));
+ R(f, g, h, a, b, c, d, e, K[51], W(51));
+ R(e, f, g, h, a, b, c, d, K[52], W(52));
+ R(d, e, f, g, h, a, b, c, K[53], W(53));
+ R(c, d, e, f, g, h, a, b, K[54], W(54));
+ R(b, c, d, e, f, g, h, a, K[55], W(55));
+ R(a, b, c, d, e, f, g, h, K[56], W(56));
+ R(h, a, b, c, d, e, f, g, K[57], W(57));
+ R(g, h, a, b, c, d, e, f, K[58], W(58));
+ R(f, g, h, a, b, c, d, e, K[59], W(59));
+ R(e, f, g, h, a, b, c, d, K[60], W(60));
+ R(d, e, f, g, h, a, b, c, K[61], W(61));
+ R(c, d, e, f, g, h, a, b, K[62], W(62));
+ R(b, c, d, e, f, g, h, a, K[63], W(63));
+
+ R(a, b, c, d, e, f, g, h, K[64], L(64));
+ R(h, a, b, c, d, e, f, g, K[65], L(65));
+ R(g, h, a, b, c, d, e, f, K[66], L(66));
+ R(f, g, h, a, b, c, d, e, K[67], L(67));
+ R(e, f, g, h, a, b, c, d, K[68], L(68));
+ R(d, e, f, g, h, a, b, c, K[69], L(69));
+ R(c, d, e, f, g, h, a, b, K[70], L(70));
+ R(b, c, d, e, f, g, h, a, K[71], L(71));
+ R(a, b, c, d, e, f, g, h, K[72], L(72));
+ R(h, a, b, c, d, e, f, g, K[73], L(73));
+ R(g, h, a, b, c, d, e, f, K[74], L(74));
+ R(f, g, h, a, b, c, d, e, K[75], L(75));
+ R(e, f, g, h, a, b, c, d, K[76], L(76));
+ R(d, e, f, g, h, a, b, c, K[77], L(77));
+ R(c, d, e, f, g, h, a, b, K[78], L(78));
+ R(b, c, d, e, f, g, h, a, K[79], L(79));
+
+ h0 += a;
+ h1 += b;
+ h2 += c;
+ h3 += d;
+ h4 += e;
+ h5 += f;
+ h6 += g;
+ h7 += h;
+
+ nblks -= 2;
+ }
+
+ while (nblks)
+ {
+ a = h0;
+ b = h1;
+ c = h2;
+ d = h3;
+ e = h4;
+ f = h5;
+ g = h6;
+ h = h7;
+
+ I(0); I(1); I(2); I(3);
+ I(4); I(5); I(6); I(7);
+ I(8); I(9); I(10); I(11);
+ I(12); I(13); I(14); I(15);
+ data += 128;
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], W(48));
+ R(h, a, b, c, d, e, f, g, K[49], W(49));
+ R(g, h, a, b, c, d, e, f, K[50], W(50));
+ R(f, g, h, a, b, c, d, e, K[51], W(51));
+ R(e, f, g, h, a, b, c, d, K[52], W(52));
+ R(d, e, f, g, h, a, b, c, K[53], W(53));
+ R(c, d, e, f, g, h, a, b, K[54], W(54));
+ R(b, c, d, e, f, g, h, a, K[55], W(55));
+ R(a, b, c, d, e, f, g, h, K[56], W(56));
+ R(h, a, b, c, d, e, f, g, K[57], W(57));
+ R(g, h, a, b, c, d, e, f, K[58], W(58));
+ R(f, g, h, a, b, c, d, e, K[59], W(59));
+ R(e, f, g, h, a, b, c, d, K[60], W(60));
+ R(d, e, f, g, h, a, b, c, K[61], W(61));
+ R(c, d, e, f, g, h, a, b, K[62], W(62));
+ R(b, c, d, e, f, g, h, a, K[63], W(63));
+
+ R(a, b, c, d, e, f, g, h, K[64], L(64));
+ R(h, a, b, c, d, e, f, g, K[65], L(65));
+ R(g, h, a, b, c, d, e, f, K[66], L(66));
+ R(f, g, h, a, b, c, d, e, K[67], L(67));
+ R(e, f, g, h, a, b, c, d, K[68], L(68));
+ R(d, e, f, g, h, a, b, c, K[69], L(69));
+ R(c, d, e, f, g, h, a, b, K[70], L(70));
+ R(b, c, d, e, f, g, h, a, K[71], L(71));
+ R(a, b, c, d, e, f, g, h, K[72], L(72));
+ R(h, a, b, c, d, e, f, g, K[73], L(73));
+ R(g, h, a, b, c, d, e, f, K[74], L(74));
+ R(f, g, h, a, b, c, d, e, K[75], L(75));
+ R(e, f, g, h, a, b, c, d, K[76], L(76));
+ R(d, e, f, g, h, a, b, c, K[77], L(77));
+ R(c, d, e, f, g, h, a, b, K[78], L(78));
+ R(b, c, d, e, f, g, h, a, K[79], L(79));
+
+ h0 += a;
+ h1 += b;
+ h2 += c;
+ h3 += d;
+ h4 += e;
+ h5 += f;
+ h6 += g;
+ h7 += h;
+
+ nblks--;
+ }
+
+ h0 = vec_merge_idx0_elems (h0, h1);
+ h2 = vec_merge_idx0_elems (h2, h3);
+ h4 = vec_merge_idx0_elems (h4, h5);
+ h6 = vec_merge_idx0_elems (h6, h7);
+ vec_vsx_st (h0, 8 * 0, (unsigned long long *)state);
+ vec_vsx_st (h2, 8 * 2, (unsigned long long *)state);
+ vec_vsx_st (h4, 8 * 4, (unsigned long long *)state);
+ vec_vsx_st (h6, 8 * 6, (unsigned long long *)state);
+
+ return sizeof(w);
+}
+#undef R
+#undef Cho
+#undef Maj
+#undef Sum0
+#undef Sum1
+#undef S0
+#undef S1
+#undef I
+#undef W
+#undef I2
+#undef W2
+#undef R2
+
+
+/* SHA2 round in general purpose registers */
+#define R(a,b,c,d,e,f,g,h,k,w) do \
+ { \
+ t1 = (h) + Sum1((e)) + Cho((e),(f),(g)) + ((k) + (w));\
+ t2 = Sum0((a)) + Maj((a),(b),(c)); \
+ d += t1; \
+ h = t1 + t2; \
+ } while (0)
+
+#define Cho(x, y, z) ((x & y) + (~x & z))
+
+#define Maj(z, x, y) ((x & y) + (z & (x ^ y)))
+
+#define Sum0(x) (ror64(x, 28) ^ ror64(x ^ ror64(x, 39-34), 34))
+
+#define Sum1(x) (ror64(x, 14) ^ ror64(x, 18) ^ ror64(x, 41))
+
+
+/* Message expansion on general purpose registers */
+#define S0(x) (ror64 ((x), 1) ^ ror64 ((x), 8) ^ ((x) >> 7))
+#define S1(x) (ror64 ((x), 19) ^ ror64 ((x), 61) ^ ((x) >> 6))
+
+#define I(i) ( w[i] = buf_get_be64(data + i * 8) )
+#define WN(i) ({ w[i&0x0f] += w[(i-7) &0x0f]; \
+ w[i&0x0f] += S0(w[(i-15)&0x0f]); \
+ w[i&0x0f] += S1(w[(i-2) &0x0f]); \
+ w[i&0x0f]; })
+#define W(i) ({ u64 r = w[i&0x0f]; WN(i); r; })
+#define L(i) w[i&0x0f]
+
+
+unsigned int ASM_FUNC_ATTR
+_gcry_sha512_transform_ppc9(u64 state[8], const unsigned char *data,
+ size_t nblks)
+{
+ /* GPRs used for round function and message expansion as vector intrinsics
+ * based generates slower code for POWER9. */
+ u64 a, b, c, d, e, f, g, h, t1, t2;
+ u64 w[16];
+
+ a = state[0];
+ b = state[1];
+ c = state[2];
+ d = state[3];
+ e = state[4];
+ f = state[5];
+ g = state[6];
+ h = state[7];
+
+ while (nblks >= 2)
+ {
+ I(0); I(1); I(2); I(3);
+ I(4); I(5); I(6); I(7);
+ I(8); I(9); I(10); I(11);
+ I(12); I(13); I(14); I(15);
+ data += 128;
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], W(48));
+ R(h, a, b, c, d, e, f, g, K[49], W(49));
+ R(g, h, a, b, c, d, e, f, K[50], W(50));
+ R(f, g, h, a, b, c, d, e, K[51], W(51));
+ R(e, f, g, h, a, b, c, d, K[52], W(52));
+ R(d, e, f, g, h, a, b, c, K[53], W(53));
+ R(c, d, e, f, g, h, a, b, K[54], W(54));
+ R(b, c, d, e, f, g, h, a, K[55], W(55));
+ R(a, b, c, d, e, f, g, h, K[56], W(56));
+ R(h, a, b, c, d, e, f, g, K[57], W(57));
+ R(g, h, a, b, c, d, e, f, K[58], W(58));
+ R(f, g, h, a, b, c, d, e, K[59], W(59));
+ R(e, f, g, h, a, b, c, d, K[60], W(60));
+ R(d, e, f, g, h, a, b, c, K[61], W(61));
+ R(c, d, e, f, g, h, a, b, K[62], W(62));
+ R(b, c, d, e, f, g, h, a, K[63], W(63));
+
+ R(a, b, c, d, e, f, g, h, K[64], L(64));
+ R(h, a, b, c, d, e, f, g, K[65], L(65));
+ R(g, h, a, b, c, d, e, f, K[66], L(66));
+ R(f, g, h, a, b, c, d, e, K[67], L(67));
+ I(0); I(1); I(2); I(3);
+ R(e, f, g, h, a, b, c, d, K[68], L(68));
+ R(d, e, f, g, h, a, b, c, K[69], L(69));
+ R(c, d, e, f, g, h, a, b, K[70], L(70));
+ R(b, c, d, e, f, g, h, a, K[71], L(71));
+ I(4); I(5); I(6); I(7);
+ R(a, b, c, d, e, f, g, h, K[72], L(72));
+ R(h, a, b, c, d, e, f, g, K[73], L(73));
+ R(g, h, a, b, c, d, e, f, K[74], L(74));
+ R(f, g, h, a, b, c, d, e, K[75], L(75));
+ I(8); I(9); I(10); I(11);
+ R(e, f, g, h, a, b, c, d, K[76], L(76));
+ R(d, e, f, g, h, a, b, c, K[77], L(77));
+ R(c, d, e, f, g, h, a, b, K[78], L(78));
+ R(b, c, d, e, f, g, h, a, K[79], L(79));
+ I(12); I(13); I(14); I(15);
+ data += 128;
+
+ a += state[0];
+ b += state[1];
+ c += state[2];
+ d += state[3];
+ e += state[4];
+ f += state[5];
+ g += state[6];
+ h += state[7];
+ state[0] = a;
+ state[1] = b;
+ state[2] = c;
+ state[3] = d;
+ state[4] = e;
+ state[5] = f;
+ state[6] = g;
+ state[7] = h;
+
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], W(48));
+ R(h, a, b, c, d, e, f, g, K[49], W(49));
+ R(g, h, a, b, c, d, e, f, K[50], W(50));
+ R(f, g, h, a, b, c, d, e, K[51], W(51));
+ R(e, f, g, h, a, b, c, d, K[52], W(52));
+ R(d, e, f, g, h, a, b, c, K[53], W(53));
+ R(c, d, e, f, g, h, a, b, K[54], W(54));
+ R(b, c, d, e, f, g, h, a, K[55], W(55));
+ R(a, b, c, d, e, f, g, h, K[56], W(56));
+ R(h, a, b, c, d, e, f, g, K[57], W(57));
+ R(g, h, a, b, c, d, e, f, K[58], W(58));
+ R(f, g, h, a, b, c, d, e, K[59], W(59));
+ R(e, f, g, h, a, b, c, d, K[60], W(60));
+ R(d, e, f, g, h, a, b, c, K[61], W(61));
+ R(c, d, e, f, g, h, a, b, K[62], W(62));
+ R(b, c, d, e, f, g, h, a, K[63], W(63));
+
+ R(a, b, c, d, e, f, g, h, K[64], L(64));
+ R(h, a, b, c, d, e, f, g, K[65], L(65));
+ R(g, h, a, b, c, d, e, f, K[66], L(66));
+ R(f, g, h, a, b, c, d, e, K[67], L(67));
+ R(e, f, g, h, a, b, c, d, K[68], L(68));
+ R(d, e, f, g, h, a, b, c, K[69], L(69));
+ R(c, d, e, f, g, h, a, b, K[70], L(70));
+ R(b, c, d, e, f, g, h, a, K[71], L(71));
+ R(a, b, c, d, e, f, g, h, K[72], L(72));
+ R(h, a, b, c, d, e, f, g, K[73], L(73));
+ R(g, h, a, b, c, d, e, f, K[74], L(74));
+ R(f, g, h, a, b, c, d, e, K[75], L(75));
+ R(e, f, g, h, a, b, c, d, K[76], L(76));
+ R(d, e, f, g, h, a, b, c, K[77], L(77));
+ R(c, d, e, f, g, h, a, b, K[78], L(78));
+ R(b, c, d, e, f, g, h, a, K[79], L(79));
+
+ a += state[0];
+ b += state[1];
+ c += state[2];
+ d += state[3];
+ e += state[4];
+ f += state[5];
+ g += state[6];
+ h += state[7];
+ state[0] = a;
+ state[1] = b;
+ state[2] = c;
+ state[3] = d;
+ state[4] = e;
+ state[5] = f;
+ state[6] = g;
+ state[7] = h;
+
+ nblks -= 2;
+ }
+
+ while (nblks)
+ {
+ I(0); I(1); I(2); I(3);
+ I(4); I(5); I(6); I(7);
+ I(8); I(9); I(10); I(11);
+ I(12); I(13); I(14); I(15);
+ data += 128;
+ R(a, b, c, d, e, f, g, h, K[0], W(0));
+ R(h, a, b, c, d, e, f, g, K[1], W(1));
+ R(g, h, a, b, c, d, e, f, K[2], W(2));
+ R(f, g, h, a, b, c, d, e, K[3], W(3));
+ R(e, f, g, h, a, b, c, d, K[4], W(4));
+ R(d, e, f, g, h, a, b, c, K[5], W(5));
+ R(c, d, e, f, g, h, a, b, K[6], W(6));
+ R(b, c, d, e, f, g, h, a, K[7], W(7));
+ R(a, b, c, d, e, f, g, h, K[8], W(8));
+ R(h, a, b, c, d, e, f, g, K[9], W(9));
+ R(g, h, a, b, c, d, e, f, K[10], W(10));
+ R(f, g, h, a, b, c, d, e, K[11], W(11));
+ R(e, f, g, h, a, b, c, d, K[12], W(12));
+ R(d, e, f, g, h, a, b, c, K[13], W(13));
+ R(c, d, e, f, g, h, a, b, K[14], W(14));
+ R(b, c, d, e, f, g, h, a, K[15], W(15));
+
+ R(a, b, c, d, e, f, g, h, K[16], W(16));
+ R(h, a, b, c, d, e, f, g, K[17], W(17));
+ R(g, h, a, b, c, d, e, f, K[18], W(18));
+ R(f, g, h, a, b, c, d, e, K[19], W(19));
+ R(e, f, g, h, a, b, c, d, K[20], W(20));
+ R(d, e, f, g, h, a, b, c, K[21], W(21));
+ R(c, d, e, f, g, h, a, b, K[22], W(22));
+ R(b, c, d, e, f, g, h, a, K[23], W(23));
+ R(a, b, c, d, e, f, g, h, K[24], W(24));
+ R(h, a, b, c, d, e, f, g, K[25], W(25));
+ R(g, h, a, b, c, d, e, f, K[26], W(26));
+ R(f, g, h, a, b, c, d, e, K[27], W(27));
+ R(e, f, g, h, a, b, c, d, K[28], W(28));
+ R(d, e, f, g, h, a, b, c, K[29], W(29));
+ R(c, d, e, f, g, h, a, b, K[30], W(30));
+ R(b, c, d, e, f, g, h, a, K[31], W(31));
+
+ R(a, b, c, d, e, f, g, h, K[32], W(32));
+ R(h, a, b, c, d, e, f, g, K[33], W(33));
+ R(g, h, a, b, c, d, e, f, K[34], W(34));
+ R(f, g, h, a, b, c, d, e, K[35], W(35));
+ R(e, f, g, h, a, b, c, d, K[36], W(36));
+ R(d, e, f, g, h, a, b, c, K[37], W(37));
+ R(c, d, e, f, g, h, a, b, K[38], W(38));
+ R(b, c, d, e, f, g, h, a, K[39], W(39));
+ R(a, b, c, d, e, f, g, h, K[40], W(40));
+ R(h, a, b, c, d, e, f, g, K[41], W(41));
+ R(g, h, a, b, c, d, e, f, K[42], W(42));
+ R(f, g, h, a, b, c, d, e, K[43], W(43));
+ R(e, f, g, h, a, b, c, d, K[44], W(44));
+ R(d, e, f, g, h, a, b, c, K[45], W(45));
+ R(c, d, e, f, g, h, a, b, K[46], W(46));
+ R(b, c, d, e, f, g, h, a, K[47], W(47));
+
+ R(a, b, c, d, e, f, g, h, K[48], W(48));
+ R(h, a, b, c, d, e, f, g, K[49], W(49));
+ R(g, h, a, b, c, d, e, f, K[50], W(50));
+ R(f, g, h, a, b, c, d, e, K[51], W(51));
+ R(e, f, g, h, a, b, c, d, K[52], W(52));
+ R(d, e, f, g, h, a, b, c, K[53], W(53));
+ R(c, d, e, f, g, h, a, b, K[54], W(54));
+ R(b, c, d, e, f, g, h, a, K[55], W(55));
+ R(a, b, c, d, e, f, g, h, K[56], W(56));
+ R(h, a, b, c, d, e, f, g, K[57], W(57));
+ R(g, h, a, b, c, d, e, f, K[58], W(58));
+ R(f, g, h, a, b, c, d, e, K[59], W(59));
+ R(e, f, g, h, a, b, c, d, K[60], W(60));
+ R(d, e, f, g, h, a, b, c, K[61], W(61));
+ R(c, d, e, f, g, h, a, b, K[62], W(62));
+ R(b, c, d, e, f, g, h, a, K[63], W(63));
+
+ R(a, b, c, d, e, f, g, h, K[64], L(64));
+ R(h, a, b, c, d, e, f, g, K[65], L(65));
+ R(g, h, a, b, c, d, e, f, K[66], L(66));
+ R(f, g, h, a, b, c, d, e, K[67], L(67));
+ R(e, f, g, h, a, b, c, d, K[68], L(68));
+ R(d, e, f, g, h, a, b, c, K[69], L(69));
+ R(c, d, e, f, g, h, a, b, K[70], L(70));
+ R(b, c, d, e, f, g, h, a, K[71], L(71));
+ R(a, b, c, d, e, f, g, h, K[72], L(72));
+ R(h, a, b, c, d, e, f, g, K[73], L(73));
+ R(g, h, a, b, c, d, e, f, K[74], L(74));
+ R(f, g, h, a, b, c, d, e, K[75], L(75));
+ R(e, f, g, h, a, b, c, d, K[76], L(76));
+ R(d, e, f, g, h, a, b, c, K[77], L(77));
+ R(c, d, e, f, g, h, a, b, K[78], L(78));
+ R(b, c, d, e, f, g, h, a, K[79], L(79));
+
+ a += state[0];
+ b += state[1];
+ c += state[2];
+ d += state[3];
+ e += state[4];
+ f += state[5];
+ g += state[6];
+ h += state[7];
+ state[0] = a;
+ state[1] = b;
+ state[2] = c;
+ state[3] = d;
+ state[4] = e;
+ state[5] = f;
+ state[6] = g;
+ state[7] = h;
+
+ nblks--;
+ }
+
+ return sizeof(w);
+}
+
+#endif /* ENABLE_PPC_CRYPTO_SUPPORT */
diff --git a/cipher/sha512.c b/cipher/sha512.c
index 06e8a2b9..b8035eca 100644
--- a/cipher/sha512.c
+++ b/cipher/sha512.c
@@ -104,6 +104,19 @@
#endif
+/* USE_PPC_CRYPTO indicates whether to enable PowerPC vector crypto
+ * accelerated code. */
+#undef USE_PPC_CRYPTO
+#ifdef ENABLE_PPC_CRYPTO_SUPPORT
+# if defined(HAVE_COMPATIBLE_CC_PPC_ALTIVEC) && \
+ defined(HAVE_GCC_INLINE_ASM_PPC_ALTIVEC)
+# if __GNUC__ >= 4
+# define USE_PPC_CRYPTO 1
+# endif
+# endif
+#endif
+
+
typedef struct
{
u64 h0, h1, h2, h3, h4, h5, h6, h7;
@@ -130,6 +143,31 @@ typedef struct
static unsigned int
transform (void *context, const unsigned char *data, size_t nblks);
+#ifdef USE_PPC_CRYPTO
+unsigned int _gcry_sha512_transform_ppc8(u64 state[8],
+ const unsigned char *input_data,
+ size_t num_blks);
+
+unsigned int _gcry_sha512_transform_ppc9(u64 state[8],
+ const unsigned char *input_data,
+ size_t num_blks);
+
+static unsigned int
+do_sha512_transform_ppc8(void *ctx, const unsigned char *data, size_t nblks)
+{
+ SHA512_CONTEXT *hd = ctx;
+ return _gcry_sha512_transform_ppc8 (&hd->state.h0, data, nblks);
+}
+
+static unsigned int
+do_sha512_transform_ppc9(void *ctx, const unsigned char *data, size_t nblks)
+{
+ SHA512_CONTEXT *hd = ctx;
+ return _gcry_sha512_transform_ppc9 (&hd->state.h0, data, nblks);
+}
+#endif
+
+
static void
sha512_init (void *context, unsigned int flags)
{
@@ -166,6 +204,12 @@ sha512_init (void *context, unsigned int flags)
#ifdef USE_AVX2
ctx->use_avx2 = (features & HWF_INTEL_AVX2) && (features & HWF_INTEL_BMI2);
#endif
+#ifdef USE_PPC_CRYPTO
+ if ((features & HWF_PPC_VCRYPTO) != 0)
+ ctx->bctx.bwrite = do_sha512_transform_ppc8;
+ if ((features & HWF_PPC_VCRYPTO) != 0 && (features & HWF_PPC_ARCH_3_00) != 0)
+ ctx->bctx.bwrite = do_sha512_transform_ppc9;
+#endif
(void)features;
}
diff --git a/configure.ac b/configure.ac
index 06e122c9..953a20e9 100644
--- a/configure.ac
+++ b/configure.ac
@@ -1840,6 +1840,115 @@ if test "$gcry_cv_gcc_inline_asm_ppc_arch_3_00" = "yes" ; then
fi
+#
+# Check whether PowerPC AltiVec/VSX intrinsics
+#
+AC_CACHE_CHECK([whether compiler supports PowerPC AltiVec/VSX intrinsics],
+ [gcry_cv_cc_ppc_altivec],
+ [if test "$mpi_cpu_arch" != "ppc" ; then
+ gcry_cv_cc_ppc_altivec="n/a"
+ else
+ gcry_cv_cc_ppc_altivec=no
+ AC_COMPILE_IFELSE([AC_LANG_SOURCE(
+ [[#include <altivec.h>
+ typedef vector unsigned char block;
+ block fn(block in)
+ {
+ block t = vec_perm (in, in, vec_vsx_ld (0, (unsigned char*)0));
+ return vec_cipher_be (t, in);
+ }
+ ]])],
+ [gcry_cv_cc_ppc_altivec=yes])
+ fi])
+if test "$gcry_cv_cc_ppc_altivec" = "yes" ; then
+ AC_DEFINE(HAVE_COMPATIBLE_CC_PPC_ALTIVEC,1,
+ [Defined if underlying compiler supports PowerPC AltiVec/VSX/crypto intrinsics])
+fi
+
+_gcc_cflags_save=$CFLAGS
+CFLAGS="$CFLAGS -maltivec -mvsx -mcrypto"
+
+if test "$gcry_cv_cc_ppc_altivec" = "no" &&
+ test "$mpi_cpu_arch" = "ppc" ; then
+ AC_CACHE_CHECK([whether compiler supports PowerPC AltiVec/VSX/crypto intrinsics with extra GCC flags],
+ [gcry_cv_cc_ppc_altivec_cflags],
+ [gcry_cv_cc_ppc_altivec_cflags=no
+ AC_COMPILE_IFELSE([AC_LANG_SOURCE(
+ [[#include <altivec.h>
+ typedef vector unsigned char block;
+ block fn(block in)
+ {
+ block t = vec_perm (in, in, vec_vsx_ld (0, (unsigned char*)0));
+ return vec_cipher_be (t, in);
+ }]])],
+ [gcry_cv_cc_ppc_altivec_cflags=yes])])
+ if test "$gcry_cv_cc_ppc_altivec_cflags" = "yes" ; then
+ AC_DEFINE(HAVE_COMPATIBLE_CC_PPC_ALTIVEC,1,
+ [Defined if underlying compiler supports PowerPC AltiVec/VSX/crypto intrinsics])
+ AC_DEFINE(HAVE_COMPATIBLE_CC_PPC_ALTIVEC_WITH_CFLAGS,1,
+ [Defined if underlying compiler supports PowerPC AltiVec/VSX/crypto intrinsics with extra GCC flags])
+ fi
+fi
+
+AM_CONDITIONAL(ENABLE_PPC_VCRYPTO_EXTRA_CFLAGS,
+ test "$gcry_cv_cc_ppc_altivec_cflags" = "yes")
+
+# Restore flags.
+CFLAGS=$_gcc_cflags_save;
+
+
+#
+# Check whether GCC inline assembler supports PowerPC AltiVec/VSX/crypto instructions
+#
+AC_CACHE_CHECK([whether GCC inline assembler supports PowerPC AltiVec/VSX/crypto instructions],
+ [gcry_cv_gcc_inline_asm_ppc_altivec],
+ [if test "$mpi_cpu_arch" != "ppc" ; then
+ gcry_cv_gcc_inline_asm_ppc_altivec="n/a"
+ else
+ gcry_cv_gcc_inline_asm_ppc_altivec=no
+ AC_COMPILE_IFELSE([AC_LANG_SOURCE(
+ [[__asm__(".globl testfn;\n"
+ "testfn:\n"
+ "stvx %v31,%r12,%r0;\n"
+ "lvx %v20,%r12,%r0;\n"
+ "vcipher %v0, %v1, %v22;\n"
+ "lxvw4x %vs32, %r0, %r1;\n"
+ "vadduwm %v0, %v1, %v22;\n"
+ "vshasigmaw %v0, %v1, 0, 15;\n"
+ "vshasigmad %v0, %v1, 0, 15;\n"
+ );
+ ]])],
+ [gcry_cv_gcc_inline_asm_ppc_altivec=yes])
+ fi])
+if test "$gcry_cv_gcc_inline_asm_ppc_altivec" = "yes" ; then
+ AC_DEFINE(HAVE_GCC_INLINE_ASM_PPC_ALTIVEC,1,
+ [Defined if inline assembler supports PowerPC AltiVec/VSX/crypto instructions])
+fi
+
+
+#
+# Check whether GCC inline assembler supports PowerISA 3.00 instructions
+#
+AC_CACHE_CHECK([whether GCC inline assembler supports PowerISA 3.00 instructions],
+ [gcry_cv_gcc_inline_asm_ppc_arch_3_00],
+ [if test "$mpi_cpu_arch" != "ppc" ; then
+ gcry_cv_gcc_inline_asm_ppc_arch_3_00="n/a"
+ else
+ gcry_cv_gcc_inline_asm_ppc_arch_3_00=no
+ AC_COMPILE_IFELSE([AC_LANG_SOURCE(
+ [[__asm__(".globl testfn;\n"
+ "testfn:\n"
+ "stxvb16x %r1,%v12,%v30;\n"
+ );
+ ]])],
+ [gcry_cv_gcc_inline_asm_ppc_arch_3_00=yes])
+ fi])
+if test "$gcry_cv_gcc_inline_asm_ppc_arch_3_00" = "yes" ; then
+ AC_DEFINE(HAVE_GCC_INLINE_ASM_PPC_ARCH_3_00,1,
+ [Defined if inline assembler supports PowerISA 3.00 instructions])
+fi
+
+
#######################################
#### Checks for library functions. ####
#######################################
@@ -2510,6 +2619,19 @@ if test "$found" = "1" ; then
# Build with the assembly implementation
GCRYPT_DIGESTS="$GCRYPT_DIGESTS sha256-armv8-aarch64-ce.lo"
;;
+ powerpc64le-*-*)
+ # Build with the crypto extension implementation
+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS sha256-ppc.lo"
+ ;;
+ powerpc64-*-*)
+ # Big-Endian.
+ # Build with the crypto extension implementation
+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS sha256-ppc.lo"
+ ;;
+ powerpc-*-*)
+ # Big-Endian.
+ # Build with the crypto extension implementation
+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS sha256-ppc.lo"
esac
fi
@@ -2529,6 +2651,19 @@ if test "$found" = "1" ; then
# Build with the assembly implementation
GCRYPT_DIGESTS="$GCRYPT_DIGESTS sha512-arm.lo"
;;
+ powerpc64le-*-*)
+ # Build with the crypto extension implementation
+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS sha512-ppc.lo"
+ ;;
+ powerpc64-*-*)
+ # Big-Endian.
+ # Build with the crypto extension implementation
+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS sha512-ppc.lo"
+ ;;
+ powerpc-*-*)
+ # Big-Endian.
+ # Build with the crypto extension implementation
+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS sha512-ppc.lo"
esac
if test x"$neonsupport" = xyes ; then