diff --git a/.gitignore b/.gitignore index 956ba5f..fed427b 100644 --- a/.gitignore +++ b/.gitignore @@ -23,3 +23,4 @@ libgcrypt-1.4.5-hobbled.tar.bz2 /libgcrypt-1.8.5-hobbled.tar.xz /libgcrypt-1.8.6-hobbled.tar.xz /libgcrypt-1.8.7-hobbled.tar.xz +/libgcrypt-1.9.0-hobbled.tar.xz diff --git a/curves.c b/curves.c index b68e0e2..5441b53 100644 --- a/curves.c +++ b/curves.c @@ -32,8 +32,8 @@ #define PGM "curves" #include "t-common.h" -/* Number of curves defined in ../cipger/ecc.c */ -#define N_CURVES 14 +/* Number of curves defined in ../cipger/ecc-curves.c */ +#define N_CURVES 19 /* A real world sample public key. */ static char const sample_key_1[] = @@ -52,8 +52,6 @@ static char const sample_key_1[] = static char const sample_key_1_curve[] = "NIST P-256"; static unsigned int sample_key_1_nbits = 256; - - static void list_curves (void) { @@ -95,7 +93,6 @@ check_matching (void) sample_key_1_nbits, nbits); gcry_sexp_release (key); - } @@ -117,7 +114,6 @@ check_get_params (void) sample_key_1_curve, name); gcry_sexp_release (param); - } @@ -132,10 +128,10 @@ main (int argc, char **argv) if (!gcry_check_version (GCRYPT_VERSION)) die ("version mismatch\n"); - xgcry_control (GCRYCTL_DISABLE_SECMEM, 0); - xgcry_control (GCRYCTL_INITIALIZATION_FINISHED, 0); + xgcry_control ((GCRYCTL_DISABLE_SECMEM, 0)); + xgcry_control ((GCRYCTL_INITIALIZATION_FINISHED, 0)); if (debug) - xgcry_control (GCRYCTL_SET_DEBUG_FLAGS, 1u, 0); + xgcry_control ((GCRYCTL_SET_DEBUG_FLAGS, 1u, 0)); list_curves (); check_matching (); check_get_params (); diff --git a/ecc-curves.c b/ecc-curves.c index 666ebc1..4242117 100644 --- a/ecc-curves.c +++ b/ecc-curves.c @@ -26,6 +26,7 @@ #include "g10lib.h" #include "mpi.h" +#include "mpi-internal.h" #include "cipher.h" #include "context.h" #include "ec-context.h" @@ -33,6 +34,10 @@ #include "ecc-common.h" +static gpg_err_code_t +point_from_keyparam (gcry_mpi_point_t *r_a, + gcry_sexp_t keyparam, const char *name, mpi_ec_t ec); + /* This tables defines aliases for curve names. */ static const struct { @@ -40,12 +45,15 @@ static const struct const char *other; /* Other name. */ } curve_aliases[] = { + { "Ed25519", "1.3.6.1.4.1.11591.15.1" }, /* OpenPGP */ + { "Ed25519", "1.3.101.112" }, /* rfc8410 */ + { "Curve25519", "1.3.6.1.4.1.3029.1.5.1" }, /* OpenPGP */ { "Curve25519", "1.3.101.110" }, /* rfc8410 */ { "Curve25519", "X25519" }, /* rfc8410 */ - { "Ed25519", "1.3.6.1.4.1.11591.15.1" },/* OpenPGP */ - { "Ed25519", "1.3.101.112" }, /* rfc8410 */ + { "Ed448", "1.3.101.113" }, /* rfc8410 */ + { "X448", "1.3.101.111" }, /* rfc8410 */ { "NIST P-224", "secp224r1" }, { "NIST P-224", "1.3.132.0.33" }, /* SECP OID. */ @@ -73,11 +81,26 @@ static const struct { "GOST2001-CryptoPro-A", "1.2.643.2.2.36.0" }, { "GOST2001-CryptoPro-C", "1.2.643.2.2.36.1" }, - { "GOST2012-tc26-A", "1.2.643.7.1.2.1.2.1" }, - { "GOST2012-tc26-B", "1.2.643.7.1.2.1.2.2" }, + { "GOST2012-256-tc26-A", "1.2.643.7.1.2.1.1.1" }, + { "GOST2001-CryptoPro-A", "1.2.643.7.1.2.1.1.2" }, + { "GOST2001-CryptoPro-A", "GOST2012-256-tc26-B" }, + { "GOST2001-CryptoPro-B", "1.2.643.7.1.2.1.1.3" }, + { "GOST2001-CryptoPro-B", "GOST2012-256-tc26-C" }, + { "GOST2001-CryptoPro-C", "1.2.643.7.1.2.1.1.4" }, + { "GOST2001-CryptoPro-C", "GOST2012-256-tc26-D" }, + + { "GOST2012-512-test", "GOST2012-test" }, + { "GOST2012-512-test", "1.2.643.7.1.2.1.2.0" }, + { "GOST2012-512-tc26-A", "GOST2012-tc26-A" }, + { "GOST2012-512-tc26-B", "GOST2012-tc26-B" }, + { "GOST2012-512-tc26-A", "1.2.643.7.1.2.1.2.1" }, + { "GOST2012-512-tc26-B", "1.2.643.7.1.2.1.2.2" }, + { "GOST2012-512-tc26-C", "1.2.643.7.1.2.1.2.3" }, { "secp256k1", "1.3.132.0.10" }, + { "sm2p256v1", "1.2.156.10197.1.301" }, + { NULL, NULL} }; @@ -102,7 +125,7 @@ typedef struct Curves (a,b) has ((A-2)/4,B^-1). */ const char *n; /* The order of the base point. */ const char *g_x, *g_y; /* Base point. */ - const char *h; /* Cofactor. */ + unsigned int h; /* Cofactor. */ } ecc_domain_parms_t; @@ -111,7 +134,7 @@ static const ecc_domain_parms_t domain_parms[] = { { /* (-x^2 + y^2 = 1 + dx^2y^2) */ - "Ed25519", 256, 0, + "Ed25519", 255, 0, MPI_EC_EDWARDS, ECC_DIALECT_ED25519, "0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED", "-0x01", @@ -119,11 +142,11 @@ static const ecc_domain_parms_t domain_parms[] = "0x1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", "0x216936D3CD6E53FEC0A4E231FDD6DC5C692CC7609525A7B2C9562D608F25D51A", "0x6666666666666666666666666666666666666666666666666666666666666658", - "0x08" + 8 }, { /* (y^2 = x^3 + 486662*x^2 + x) */ - "Curve25519", 256, 0, + "Curve25519", 255, 0, MPI_EC_MONTGOMERY, ECC_DIALECT_STANDARD, "0x7FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFED", "0x01DB41", @@ -131,7 +154,45 @@ static const ecc_domain_parms_t domain_parms[] = "0x1000000000000000000000000000000014DEF9DEA2F79CD65812631A5CF5D3ED", "0x0000000000000000000000000000000000000000000000000000000000000009", "0x20AE19A1B8A086B4E01EDD2C7748D14C923D4D7E6D7C61B229E9C5A27ECED3D9", - "0x08" + 8 + /* Note: As per RFC-7748 errata eid4730 the g_y value should be + * "0x5F51E65E475F794B1FE122D388B72EB36DC2B28192839E4DD6163A5D81312C14" + * but that breaks the keygrip. The new value is recovered in + * the function _gcry_ecc_fill_in_curve. See bug #4712. + */ + }, + { + /* (x^2 + y^2 = 1 + dx^2y^2) */ + "Ed448", 448, 0, + MPI_EC_EDWARDS, ECC_DIALECT_SAFECURVE, + "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE" + "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + "0x01", + "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE" + "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF6756", + "0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF" + "7CCA23E9C44EDB49AED63690216CC2728DC58F552378C292AB5844F3", + "0x4F1970C66BED0DED221D15A622BF36DA9E146570470F1767EA6DE324" + "A3D3A46412AE1AF72AB66511433B80E18B00938E2626A82BC70CC05E", + "0x693F46716EB6BC248876203756C9C7624BEA73736CA3984087789C1E" + "05A0C2D73AD3FF1CE67C39C4FDBD132C4ED7C8AD9808795BF230FA14", + 4, + }, + { + /* (y^2 = x^3 + 156326*x^2 + x) */ + "X448", 448, 0, + MPI_EC_MONTGOMERY, ECC_DIALECT_SAFECURVE, + "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFE" + "FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF", + "0x98A9", + "0x01", + "0x3FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFF" + "7CCA23E9C44EDB49AED63690216CC2728DC58F552378C292AB5844F3", + "0x00000000000000000000000000000000000000000000000000000000" + "00000000000000000000000000000000000000000000000000000005", + "0x7D235D1295F5B1F66C98AB6E58326FCECBAE5D34F55545D060F75DC2" + "8DF3F6EDB8027E2346430D211312C4B150677AF76FD7223D457B5B1A", + 4, }, { "NIST P-224", 224, 1, @@ -143,7 +204,7 @@ static const ecc_domain_parms_t domain_parms[] = "0xb70e0cbd6bb4bf7f321390b94a03c1d356c21122343280d6115c1d21", "0xbd376388b5f723fb4c22dfe6cd4375a05a07476444d5819985007e34", - "0x01" + 1 }, { "NIST P-256", 256, 1, @@ -155,7 +216,7 @@ static const ecc_domain_parms_t domain_parms[] = "0x6b17d1f2e12c4247f8bce6e563a440f277037d812deb33a0f4a13945d898c296", "0x4fe342e2fe1a7f9b8ee7eb4a7c0f9e162bce33576b315ececbb6406837bf51f5", - "0x01" + 1 }, { "NIST P-384", 384, 1, @@ -173,7 +234,7 @@ static const ecc_domain_parms_t domain_parms[] = "5502f25dbf55296c3a545e3872760ab7", "0x3617de4a96262c6f5d9e98bf9292dc29f8f41dbd289a147ce9da3113b5f0b8c0" "0a60b1ce1d7e819d7a431d7c90ea0e5f", - "0x01" + 1 }, { "NIST P-521", 521, 1, @@ -184,14 +245,14 @@ static const ecc_domain_parms_t domain_parms[] = "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffc", "0x051953eb9618e1c9a1f929a21a0b68540eea2da725b99b315f3b8b489918ef10" "9e156193951ec7e937b1652c0bd3bb1bf073573df883d2c34f1ef451fd46b503f00", - "0x1fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" - "ffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409", + "0x01ffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + "fffa51868783bf2f966b7fcc0148f709a5d03bb5c9b8899c47aebb6fb71e91386409", "0x00c6858e06b70404e9cd9e3ecb662395b4429c648139053fb521f828af606b4d" "3dbaa14b5e77efe75928fe1dc127a2ffa8de3348b3c1856a429bf97e7e31c2e5bd66", "0x011839296a789a3bc0045c8a5fb42c7d1bd998f54449579b446817afbd17273e" "662c97ee72995ef42640c550b9013fad0761353c7086a272c24088be94769fd16650", - "0x01" + 1 }, { @@ -204,7 +265,7 @@ static const ecc_domain_parms_t domain_parms[] = "0x0000000000000000000000000000000000000000000000000000000000000002", "0x08e2a8a0e65147d4bd6316030e16d19c85c97f0a9ca267122b96abbcea7e8fc8", - "0x01" + 1 }, { "GOST2001-CryptoPro-A", 256, 0, @@ -215,7 +276,7 @@ static const ecc_domain_parms_t domain_parms[] = "0xffffffffffffffffffffffffffffffff6c611070995ad10045841b09b761b893", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x8d91e471e0989cda27df505a453f2b7635294f2ddf23e3b122acc99c9e9f1e14", - "0x01" + 1 }, { "GOST2001-CryptoPro-B", 256, 0, @@ -226,7 +287,7 @@ static const ecc_domain_parms_t domain_parms[] = "0x800000000000000000000000000000015f700cfff1a624e5e497161bcc8a198f", "0x0000000000000000000000000000000000000000000000000000000000000001", "0x3fa8124359f96680b83d1c3eb2c070e5c545c9858d03ecfb744bf8d717717efc", - "0x01" + 1 }, { "GOST2001-CryptoPro-C", 256, 0, @@ -237,10 +298,21 @@ static const ecc_domain_parms_t domain_parms[] = "0x9b9f605f5a858107ab1ec85e6b41c8aa582ca3511eddfb74f02f3a6598980bb9", "0x0000000000000000000000000000000000000000000000000000000000000000", "0x41ece55743711a8c3cbf3783cd08c0ee4d4dc440d4641a8f366e550dfdb3bb67", - "0x01" + 1 }, { - "GOST2012-test", 511, 0, + "GOST2012-256-A", 256, 0, + MPI_EC_WEIERSTRASS, ECC_DIALECT_STANDARD, + "0xfffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffd97", + "0xc2173f1513981673af4892c23035a27ce25e2013bf95aa33b22c656f277e7335", + "0x295f9bae7428ed9ccc20e7c359a9d41a22fccd9108e17bf7ba9337a6f8ae9513", + "0x400000000000000000000000000000000fd8cddfc87b6635c115af556c360c67", + "0x91e38443a5e82c0d880923425712b2bb658b9196932e02c78b2582fe742daa28", + "0x32879423ab1a0375895786c4bb46e9565fde0b5344766740af268adb32322e5c", + 4 + }, + { + "GOST2012-512-test", 511, 0, MPI_EC_WEIERSTRASS, ECC_DIALECT_STANDARD, "0x4531acd1fe0023c7550d267b6b2fee80922b14b2ffb90f04d4eb7c09b5d2d15d" "f1d852741af4704a0458047e80e4546d35b8336fac224dd81664bbf528be6373", @@ -254,10 +326,10 @@ static const ecc_domain_parms_t domain_parms[] = "fd60611262cd838dc6b60aa7eee804e28bc849977fac33b4b530f1b120248a9a", "0x2bb312a43bd2ce6e0d020613c857acddcfbf061e91e5f2c3f32447c259f39b2" "c83ab156d77f1496bf7eb3351e1ee4e43dc1a18b91b24640b6dbb92cb1add371e", - "0x01" + 1 }, { - "GOST2012-tc26-A", 512, 0, + "GOST2012-512-tc26-A", 512, 0, MPI_EC_WEIERSTRASS, ECC_DIALECT_STANDARD, "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdc7", @@ -271,10 +343,10 @@ static const ecc_domain_parms_t domain_parms[] = "0000000000000000000000000000000000000000000000000000000000000003", "0x7503cfe87a836ae3a61b8816e25450e6ce5e1c93acf1abc1778064fdcbefa921" "df1626be4fd036e93d75e6a50e3a41e98028fe5fc235f5b889a589cb5215f2a4", - "0x01" + 1 }, { - "GOST2012-tc26-B", 512, 0, + "GOST2012-512-tc26-B", 512, 0, MPI_EC_WEIERSTRASS, ECC_DIALECT_STANDARD, "0x8000000000000000000000000000000000000000000000000000000000000000" "000000000000000000000000000000000000000000000000000000000000006f", @@ -288,7 +360,24 @@ static const ecc_domain_parms_t domain_parms[] = "0000000000000000000000000000000000000000000000000000000000000002", "0x1a8f7eda389b094c2c071e3647a8940f3c123b697578c213be6dd9e6c8ec7335" "dcb228fd1edf4a39152cbcaaf8c0398828041055f94ceeec7e21340780fe41bd", - "0x01" + 1 + }, + { + "GOST2012-512-tc26-C", 512, 0, + MPI_EC_WEIERSTRASS, ECC_DIALECT_STANDARD, + "0xffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + "fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffdc7", + "0xdc9203e514a721875485a529d2c722fb187bc8980eb866644de41c68e1430645" + "46e861c0e2c9edd92ade71f46fcf50ff2ad97f951fda9f2a2eb6546f39689bd3", + "0xb4c4ee28cebc6c2c8ac12952cf37f16ac7efb6a9f69f4b57ffda2e4f0de5ade0" + "38cbc2fff719d2c18de0284b8bfef3b52b8cc7a5f5bf0a3c8d2319a5312557e1", + "0x3fffffffffffffffffffffffffffffffffffffffffffffffffffffffffffffff" + "c98cdba46506ab004c33a9ff5147502cc8eda9e7a769a12694623cef47f023ed", + "0xe2e31edfc23de7bdebe241ce593ef5de2295b7a9cbaef021d385f7074cea043a" + "a27272a7ae602bf2a7b9033db9ed3610c6fb85487eae97aac5bc7928c1950148", + "0xf5ce40d95b5eb899abbccff5911cb8577939804d6527378b8c108c3d2090ff9be" + "18e2d33e3021ed2ef32d85822423b6304f726aa854bae07d0396e9a9addc40f", + 4 }, { @@ -300,7 +389,19 @@ static const ecc_domain_parms_t domain_parms[] = "0xFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141", "0x79BE667EF9DCBBAC55A06295CE870B07029BFCDB2DCE28D959F2815B16F81798", "0x483ADA7726A3C4655DA4FBFC0E1108A8FD17B448A68554199C47D08FFB10D4B8", - "0x01" + 1 + }, + + { + "sm2p256v1", 256, 0, + MPI_EC_WEIERSTRASS, ECC_DIALECT_STANDARD, + "0xfffffffeffffffffffffffffffffffffffffffff00000000ffffffffffffffff", + "0xfffffffeffffffffffffffffffffffffffffffff00000000fffffffffffffffc", + "0x28e9fa9e9d9f5e344d5a9e4bcf6509a7f39789f515ab8f92ddbcbd414d940e93", + "0xfffffffeffffffffffffffffffffffff7203df6b21c6052b53bbf40939d54123", + "0x32c4ae2c1f1981195f9904466a39c9948fe30bbff2660be1715a4589334c74c7", + "0xbc3736a2f4f6779c59bdcee36b692153d0a9877cc62a474002df32e52139f0a0", + 1 }, { NULL, 0, 0, 0, 0, NULL, NULL, NULL, NULL, NULL } @@ -431,22 +532,43 @@ _gcry_ecc_fill_in_curve (unsigned int nbits, const char *name, { curve->a = scanval (domain_parms[idx].a); if (curve->a->sign) - mpi_add (curve->a, curve->p, curve->a); + { + mpi_resize (curve->a, curve->p->nlimbs); + _gcry_mpih_sub_n (curve->a->d, curve->p->d, + curve->a->d, curve->p->nlimbs); + curve->a->nlimbs = curve->p->nlimbs; + curve->a->sign = 0; + } } if (!curve->b) { curve->b = scanval (domain_parms[idx].b); if (curve->b->sign) - mpi_add (curve->b, curve->p, curve->b); + { + mpi_resize (curve->b, curve->p->nlimbs); + _gcry_mpih_sub_n (curve->b->d, curve->p->d, + curve->b->d, curve->p->nlimbs); + curve->b->nlimbs = curve->p->nlimbs; + curve->b->sign = 0; + } } if (!curve->n) curve->n = scanval (domain_parms[idx].n); - if (!curve->h) - curve->h = scanval (domain_parms[idx].h); if (!curve->G.x) curve->G.x = scanval (domain_parms[idx].g_x); if (!curve->G.y) curve->G.y = scanval (domain_parms[idx].g_y); + curve->h = domain_parms[idx].h; + + /* + * In the constants of domain_parms, we defined Curve25519 + * domain parameters as the ones in RFC-7748 before the errata + * (eid4730). To keep the computation having exact same values, + * we recover the new value of g_y, here. + */ + if (!strcmp (resname, "Curve25519")) + mpi_sub (curve->G.y, curve->p, curve->G.y); + if (!curve->G.z) curve->G.z = mpi_alloc_set_ui (1); if (!curve->name) @@ -458,15 +580,15 @@ _gcry_ecc_fill_in_curve (unsigned int nbits, const char *name, /* Give the name of the curve NAME, store the curve parameters into P, - A, B, G, N, and H if they point to NULL value. Note that G is returned - in standard uncompressed format. Also update MODEL and DIALECT if - they are not NULL. */ + A, B, G, and N if they point to NULL value. Note that G is + returned in standard uncompressed format. Also update MODEL and + DIALECT if they are not NULL. */ gpg_err_code_t _gcry_ecc_update_curve_param (const char *name, enum gcry_mpi_ec_models *model, enum ecc_dialects *dialect, gcry_mpi_t *p, gcry_mpi_t *a, gcry_mpi_t *b, - gcry_mpi_t *g, gcry_mpi_t *n, gcry_mpi_t *h) + gcry_mpi_t *g, gcry_mpi_t *n) { int idx; @@ -516,11 +638,6 @@ _gcry_ecc_update_curve_param (const char *name, _gcry_mpi_release (*n); *n = scanval (domain_parms[idx].n); } - if (h) - { - _gcry_mpi_release (*h); - *h = scanval (domain_parms[idx].h); - } return 0; } @@ -533,7 +650,7 @@ _gcry_ecc_get_curve (gcry_sexp_t keyparms, int iterator, unsigned int *r_nbits) gpg_err_code_t rc; const char *result = NULL; elliptic_curve_t E; - gcry_mpi_t mpi_g = NULL; + gcry_mpi_point_t G = NULL; gcry_mpi_t tmp = NULL; int idx; @@ -558,9 +675,8 @@ _gcry_ecc_get_curve (gcry_sexp_t keyparms, int iterator, unsigned int *r_nbits) /* * Extract the curve parameters.. */ - rc = gpg_err_code (sexp_extract_param (keyparms, NULL, "-pabgnh", - &E.p, &E.a, &E.b, &mpi_g, &E.n, &E.h, - NULL)); + rc = gpg_err_code (sexp_extract_param (keyparms, NULL, "pabn", + &E.p, &E.a, &E.b, &E.n, NULL)); if (rc == GPG_ERR_NO_OBJ) { /* This might be the second use case of checking whether a @@ -591,72 +707,100 @@ _gcry_ecc_get_curve (gcry_sexp_t keyparms, int iterator, unsigned int *r_nbits) if (rc) goto leave; - if (mpi_g) - { - _gcry_mpi_point_init (&E.G); - if (_gcry_ecc_os2ec (&E.G, mpi_g)) - goto leave; - } + rc = point_from_keyparam (&G, keyparms, "g", NULL); + if (rc) + goto leave; + + _gcry_mpi_point_init (&E.G); + _gcry_mpi_point_set (&E.G, G->x, G->y, G->z); for (idx = 0; domain_parms[idx].desc; idx++) { mpi_free (tmp); tmp = scanval (domain_parms[idx].p); - if (!mpi_cmp (tmp, E.p)) + if (mpi_cmp (tmp, E.p)) + continue; + + mpi_free (tmp); + tmp = scanval (domain_parms[idx].a); + if (tmp->sign) { - mpi_free (tmp); - tmp = scanval (domain_parms[idx].a); - if (!mpi_cmp (tmp, E.a)) + if (!mpi_cmpabs (tmp, E.a)) + /* For backward compatibility to <= libgcrypt 1.8, we + allow this match to support existing keys in SEXP. */ + ; + else { - mpi_free (tmp); - tmp = scanval (domain_parms[idx].b); - if (!mpi_cmp (tmp, E.b)) - { - mpi_free (tmp); - tmp = scanval (domain_parms[idx].n); - if (!mpi_cmp (tmp, E.n)) - { - mpi_free (tmp); - tmp = scanval (domain_parms[idx].h); - if (!mpi_cmp (tmp, E.h)) - { - mpi_free (tmp); - tmp = scanval (domain_parms[idx].g_x); - if (!mpi_cmp (tmp, E.G.x)) - { - mpi_free (tmp); - tmp = scanval (domain_parms[idx].g_y); - if (!mpi_cmp (tmp, E.G.y)) - { - result = domain_parms[idx].desc; - if (r_nbits) - *r_nbits = domain_parms[idx].nbits; - goto leave; - } - } - } - } - } + mpi_resize (tmp, E.p->nlimbs); + _gcry_mpih_sub_n (tmp->d, E.p->d, + tmp->d, E.p->nlimbs); + tmp->nlimbs = E.p->nlimbs; + tmp->sign = 0; + if (mpi_cmp (tmp, E.a)) + continue; } } + else if (mpi_cmp (tmp, E.a)) + continue; + + mpi_free (tmp); + tmp = scanval (domain_parms[idx].b); + if (tmp->sign) + { + if (!mpi_cmpabs (tmp, E.b)) + /* Same for backward compatibility, see above. */ + ; + else + { + mpi_resize (tmp, E.p->nlimbs); + _gcry_mpih_sub_n (tmp->d, E.p->d, + tmp->d, E.p->nlimbs); + tmp->nlimbs = E.p->nlimbs; + tmp->sign = 0; + if (mpi_cmp (tmp, E.b)) + continue; + } + } + else if (mpi_cmp (tmp, E.b)) + continue; + + mpi_free (tmp); + tmp = scanval (domain_parms[idx].n); + if (mpi_cmp (tmp, E.n)) + continue; + + mpi_free (tmp); + tmp = scanval (domain_parms[idx].g_x); + if (mpi_cmp (tmp, E.G.x)) + continue; + + mpi_free (tmp); + tmp = scanval (domain_parms[idx].g_y); + if (mpi_cmp (tmp, E.G.y)) + continue; + + result = domain_parms[idx].desc; + if (r_nbits) + *r_nbits = domain_parms[idx].nbits; + break; } leave: + _gcry_mpi_point_release (G); _gcry_mpi_release (tmp); _gcry_mpi_release (E.p); _gcry_mpi_release (E.a); _gcry_mpi_release (E.b); - _gcry_mpi_release (mpi_g); _gcry_mpi_point_free_parts (&E.G); _gcry_mpi_release (E.n); - _gcry_mpi_release (E.h); return result; } /* Helper to extract an MPI from key parameters. */ static gpg_err_code_t -mpi_from_keyparam (gcry_mpi_t *r_a, gcry_sexp_t keyparam, const char *name) +mpi_from_keyparam (gcry_mpi_t *r_a, gcry_sexp_t keyparam, const char *name, + int opaque) { gcry_err_code_t ec = 0; gcry_sexp_t l1; @@ -664,7 +808,7 @@ mpi_from_keyparam (gcry_mpi_t *r_a, gcry_sexp_t keyparam, const char *name) l1 = sexp_find_token (keyparam, name, 0); if (l1) { - *r_a = sexp_nth_mpi (l1, 1, GCRYMPI_FMT_USG); + *r_a = sexp_nth_mpi (l1, 1, opaque? GCRYMPI_FMT_OPAQUE : GCRYMPI_FMT_USG); sexp_release (l1); if (!*r_a) ec = GPG_ERR_INV_OBJ; @@ -696,10 +840,7 @@ point_from_keyparam (gcry_mpi_point_t *r_a, return GPG_ERR_INV_OBJ; point = mpi_point_new (0); - if (ec && ec->dialect == ECC_DIALECT_ED25519) - rc = _gcry_ecc_eddsa_decodepoint (a, ec, point, NULL, NULL); - else - rc = _gcry_ecc_os2ec (point, a); + rc = _gcry_mpi_ec_decode_point (point, a, ec); mpi_free (a); if (rc) { @@ -718,14 +859,14 @@ point_from_keyparam (gcry_mpi_point_t *r_a, if (!tmpname) return gpg_err_code_from_syserror (); strcpy (stpcpy (tmpname, name), ".x"); - rc = mpi_from_keyparam (&x, keyparam, tmpname); + rc = mpi_from_keyparam (&x, keyparam, tmpname, 0); if (rc) { xfree (tmpname); return rc; } strcpy (stpcpy (tmpname, name), ".y"); - rc = mpi_from_keyparam (&y, keyparam, tmpname); + rc = mpi_from_keyparam (&y, keyparam, tmpname, 0); if (rc) { mpi_free (x); @@ -733,7 +874,7 @@ point_from_keyparam (gcry_mpi_point_t *r_a, return rc; } strcpy (stpcpy (tmpname, name), ".z"); - rc = mpi_from_keyparam (&z, keyparam, tmpname); + rc = mpi_from_keyparam (&z, keyparam, tmpname, 0); if (rc) { mpi_free (y); @@ -761,33 +902,22 @@ point_from_keyparam (gcry_mpi_point_t *r_a, } -/* This function creates a new context for elliptic curve operations. - Either KEYPARAM or CURVENAME must be given. If both are given and - KEYPARAM has no curve parameter, CURVENAME is used to add missing - parameters. On success 0 is returned and the new context stored at - R_CTX. On error NULL is stored at R_CTX and an error code is - returned. The context needs to be released using - gcry_ctx_release. */ -gpg_err_code_t -_gcry_mpi_ec_new (gcry_ctx_t *r_ctx, - gcry_sexp_t keyparam, const char *curvename) + +static gpg_err_code_t +mpi_ec_get_elliptic_curve (elliptic_curve_t *E, int *r_flags, + gcry_sexp_t keyparam, const char *curvename) { gpg_err_code_t errc; - gcry_ctx_t ctx = NULL; - enum gcry_mpi_ec_models model = MPI_EC_WEIERSTRASS; - enum ecc_dialects dialect = ECC_DIALECT_STANDARD; - gcry_mpi_t p = NULL; - gcry_mpi_t a = NULL; - gcry_mpi_t b = NULL; - gcry_mpi_point_t G = NULL; - gcry_mpi_t n = NULL; - gcry_mpi_t h = NULL; - gcry_mpi_point_t Q = NULL; - gcry_mpi_t d = NULL; - int flags = 0; + unsigned int nbits; gcry_sexp_t l1; - *r_ctx = NULL; + errc = _gcry_pk_util_get_nbits (keyparam, &nbits); + if (errc) + return errc; + + E->model = MPI_EC_WEIERSTRASS; + E->dialect = ECC_DIALECT_STANDARD; + E->h = 1; if (keyparam) { @@ -795,11 +925,23 @@ _gcry_mpi_ec_new (gcry_ctx_t *r_ctx, l1 = sexp_find_token (keyparam, "flags", 0); if (l1) { + int flags = 0; + errc = _gcry_pk_util_parse_flaglist (l1, &flags, NULL); sexp_release (l1); l1 = NULL; if (errc) goto leave; + + *r_flags |= flags; + } + + /* Parse the deprecated optional transient-key flag. */ + l1 = sexp_find_token (keyparam, "transient-key", 0); + if (l1) + { + *r_flags |= PUBKEY_FLAG_TRANSIENT_KEY; + sexp_release (l1); } /* Check whether a curve name was given. */ @@ -807,26 +949,41 @@ _gcry_mpi_ec_new (gcry_ctx_t *r_ctx, /* If we don't have a curve name or if override parameters have explicitly been requested, parse them. */ - if (!l1 || (flags & PUBKEY_FLAG_PARAM)) + if (!l1 || (*r_flags & PUBKEY_FLAG_PARAM)) { - errc = mpi_from_keyparam (&p, keyparam, "p"); + gcry_mpi_point_t G = NULL; + gcry_mpi_t cofactor = NULL; + + errc = mpi_from_keyparam (&E->p, keyparam, "p", 0); if (errc) goto leave; - errc = mpi_from_keyparam (&a, keyparam, "a"); + errc = mpi_from_keyparam (&E->a, keyparam, "a", 0); if (errc) goto leave; - errc = mpi_from_keyparam (&b, keyparam, "b"); + errc = mpi_from_keyparam (&E->b, keyparam, "b", 0); if (errc) goto leave; errc = point_from_keyparam (&G, keyparam, "g", NULL); if (errc) goto leave; - errc = mpi_from_keyparam (&n, keyparam, "n"); + if (G) + { + _gcry_mpi_point_init (&E->G); + mpi_point_set (&E->G, G->x, G->y, G->z); + mpi_point_set (G, NULL, NULL, NULL); + mpi_point_release (G); + } + errc = mpi_from_keyparam (&E->n, keyparam, "n", 0); if (errc) goto leave; - errc = mpi_from_keyparam (&h, keyparam, "h"); + errc = mpi_from_keyparam (&cofactor, keyparam, "h", 0); if (errc) goto leave; + if (cofactor) + { + mpi_get_ui (&E->h, cofactor); + mpi_free (cofactor); + } } } else @@ -836,10 +993,9 @@ _gcry_mpi_ec_new (gcry_ctx_t *r_ctx, in missing values. If no curve parameter is available try an optional provided curvename. If only the curvename has been given use that one. */ - if (l1 || curvename) + if (l1 || curvename || nbits) { char *name; - elliptic_curve_t *E; if (l1) { @@ -854,129 +1010,213 @@ _gcry_mpi_ec_new (gcry_ctx_t *r_ctx, else name = NULL; - E = xtrycalloc (1, sizeof *E); - if (!E) - { - errc = gpg_err_code_from_syserror (); - xfree (name); - goto leave; - } - - errc = _gcry_ecc_fill_in_curve (0, name? name : curvename, E, NULL); + errc = _gcry_ecc_fill_in_curve (nbits, name? name : curvename, E, NULL); xfree (name); if (errc) - { - xfree (E); - goto leave; - } - - model = E->model; - dialect = E->dialect; - - if (!p) - { - p = E->p; - E->p = NULL; - } - if (!a) - { - a = E->a; - E->a = NULL; - } - if (!b) - { - b = E->b; - E->b = NULL; - } - if (!G) - { - G = mpi_point_snatch_set (NULL, E->G.x, E->G.y, E->G.z); - E->G.x = NULL; - E->G.y = NULL; - E->G.z = NULL; - } - if (!n) - { - n = E->n; - E->n = NULL; - } - if (!h) - { - h = E->h; - E->h = NULL; - } - _gcry_ecc_curve_free (E); - xfree (E); - } - - - errc = _gcry_mpi_ec_p_new (&ctx, model, dialect, flags, p, a, b); - if (!errc) - { - mpi_ec_t ec = _gcry_ctx_get_pointer (ctx, CONTEXT_TYPE_EC); - - if (b) - { - mpi_free (ec->b); - ec->b = b; - b = NULL; - } - if (G) - { - ec->G = G; - G = NULL; - } - if (n) - { - ec->n = n; - n = NULL; - } - if (h) - { - ec->h = h; - h = NULL; - } - - /* Now that we know the curve name we can look for the public key - Q. point_from_keyparam needs to know the curve parameters so - that it is able to use the correct decompression. Parsing - the private key D could have been done earlier but it is less - surprising if we do it here as well. */ - if (keyparam) - { - errc = point_from_keyparam (&Q, keyparam, "q", ec); - if (errc) - goto leave; - errc = mpi_from_keyparam (&d, keyparam, "d"); - if (errc) - goto leave; - } - - if (Q) - { - ec->Q = Q; - Q = NULL; - } - if (d) - { - ec->d = d; - d = NULL; - } - - *r_ctx = ctx; - ctx = NULL; + goto leave; } leave: + return errc; +} + +static gpg_err_code_t +mpi_ec_setup_elliptic_curve (mpi_ec_t ec, int flags, + elliptic_curve_t *E, gcry_sexp_t keyparam) +{ + gpg_err_code_t errc = 0; + + ec->G = mpi_point_snatch_set (NULL, E->G.x, E->G.y, E->G.z); + E->G.x = NULL; + E->G.y = NULL; + E->G.z = NULL; + ec->n = E->n; + E->n = NULL; + ec->h = E->h; + ec->name = E->name; + + /* Now that we know the curve name we can look for the public key + Q. point_from_keyparam needs to know the curve parameters so + that it is able to use the correct decompression. Parsing + the private key D could have been done earlier but it is less + surprising if we do it here as well. */ + if (keyparam) + { + int is_opaque_bytes = ((ec->dialect == ECC_DIALECT_ED25519 + && (flags & PUBKEY_FLAG_EDDSA)) + || (ec->dialect == ECC_DIALECT_SAFECURVE)); + + errc = point_from_keyparam (&ec->Q, keyparam, "q", ec); + if (errc) + return errc; + errc = mpi_from_keyparam (&ec->d, keyparam, "d", is_opaque_bytes); + + /* Size of opaque bytes should match size of P. */ + if (!errc && ec->d && is_opaque_bytes) + { + unsigned int n = mpi_get_nbits (ec->d); + unsigned int len; + + len = (ec->nbits+7)/8; + /* EdDSA requires additional bit for sign. */ + if ((ec->nbits%8) == 0 && ec->model == MPI_EC_EDWARDS) + len++; + + if ((n+7)/8 != len) + { + if ((n+7)/8 < len && ec->dialect == ECC_DIALECT_ED25519) + { + /* + * GnuPG (<= 2.2) or OpenPGP implementations with no + * SOS support may remove zeros at the beginning. + * Recover those zeros. + */ + const unsigned char *buf; + unsigned char *value; + + buf = mpi_get_opaque (ec->d, &n); + if (!buf) + return GPG_ERR_INV_OBJ; + + value = xtrycalloc_secure (1, len); + if (!value) + return gpg_err_code_from_syserror (); + + memset (value, 0, len - (n+7)/8); + memcpy (value + len - (n+7)/8, buf, (n+7)/8); + mpi_set_opaque (ec->d, value, len); + } + else + { + if (DBG_CIPHER) + log_debug ("scalar size (%d) != prime size (%d)", + (n+7)/8, len); + + errc = GPG_ERR_INV_OBJ; + } + } + } + } + + return errc; +} + +gpg_err_code_t +_gcry_mpi_ec_internal_new (mpi_ec_t *r_ec, int *r_flags, const char *name_op, + gcry_sexp_t keyparam, const char *curvename) +{ + gpg_err_code_t errc; + elliptic_curve_t E; + mpi_ec_t ec; + + *r_ec = NULL; + + memset (&E, 0, sizeof E); + errc = mpi_ec_get_elliptic_curve (&E, r_flags, keyparam, curvename); + if (errc) + goto leave; + + ec = _gcry_mpi_ec_p_internal_new (E.model, E.dialect, *r_flags, + E.p, E.a, E.b); + if (!ec) + goto leave; + + errc = mpi_ec_setup_elliptic_curve (ec, *r_flags, &E, keyparam); + if (errc) + { + _gcry_mpi_ec_free (ec); + goto leave; + } + else + *r_ec = ec; + + if (!errc && DBG_CIPHER) + { + gcry_mpi_t mpi_q = NULL; + gcry_sexp_t l1; + char msg[80]; + + l1 = sexp_find_token (keyparam, "q", 0); + if (l1) + { + mpi_q = sexp_nth_mpi (l1, 1, GCRYMPI_FMT_OPAQUE); + sexp_release (l1); + } + + log_debug ("%s info: %s/%s%s\n", name_op, + _gcry_ecc_model2str (ec->model), + _gcry_ecc_dialect2str (ec->dialect), + (*r_flags & PUBKEY_FLAG_EDDSA)? "+EdDSA" : ""); + if (ec->name) + log_debug ("%s name: %s\n", name_op, ec->name); + snprintf (msg, sizeof msg, "%s p", name_op); + log_printmpi (msg, ec->p); + snprintf (msg, sizeof msg, "%s a", name_op); + log_printmpi (msg, ec->a); + snprintf (msg, sizeof msg, "%s b", name_op); + log_printmpi (msg, ec->b); + snprintf (msg, sizeof msg, "%s g", name_op); + log_printpnt (msg, ec->G, NULL); + snprintf (msg, sizeof msg, "%s n", name_op); + log_printmpi (msg, ec->n); + log_debug ("%s h:+%02x\n", name_op, ec->h); + if (mpi_q) + { + snprintf (msg, sizeof msg, "%s q", name_op); + log_printmpi (msg, mpi_q); + mpi_free (mpi_q); + } + if (!fips_mode () && ec->d) + { + snprintf (msg, sizeof msg, "%s d", name_op); + log_printmpi (msg, ec->d); + } + } + + leave: + _gcry_ecc_curve_free (&E); + return errc; +} + +/* This function creates a new context for elliptic curve operations. + Either KEYPARAM or CURVENAME must be given. If both are given and + KEYPARAM has no curve parameter, CURVENAME is used to add missing + parameters. On success 0 is returned and the new context stored at + R_CTX. On error NULL is stored at R_CTX and an error code is + returned. The context needs to be released using + gcry_ctx_release. */ +gpg_err_code_t +_gcry_mpi_ec_new (gcry_ctx_t *r_ctx, + gcry_sexp_t keyparam, const char *curvename) +{ + gpg_err_code_t errc; + elliptic_curve_t E; + gcry_ctx_t ctx = NULL; + int flags = 0; + mpi_ec_t ec; + + *r_ctx = NULL; + + memset (&E, 0, sizeof E); + errc = mpi_ec_get_elliptic_curve (&E, &flags, keyparam, curvename); + if (errc) + goto leave; + + errc = _gcry_mpi_ec_p_new (&ctx, E.model, E.dialect, flags, E.p, E.a, E.b); + if (errc) + goto leave; + + ec = _gcry_ctx_get_pointer (ctx, CONTEXT_TYPE_EC); + errc = mpi_ec_setup_elliptic_curve (ec, flags, &E, keyparam); + if (errc) + goto leave; + + *r_ctx = ctx; + ctx = NULL; + + leave: + _gcry_ecc_curve_free (&E); _gcry_ctx_release (ctx); - mpi_free (p); - mpi_free (a); - mpi_free (b); - _gcry_mpi_point_release (G); - mpi_free (n); - mpi_free (h); - _gcry_mpi_point_release (Q); - mpi_free (d); return errc; } @@ -989,7 +1229,7 @@ _gcry_ecc_get_param_sexp (const char *name) elliptic_curve_t E; mpi_ec_t ctx; gcry_mpi_t g_x, g_y; - gcry_mpi_t pkey[7]; + gcry_mpi_t pkey[5]; gcry_sexp_t result; int i; @@ -999,10 +1239,10 @@ _gcry_ecc_get_param_sexp (const char *name) g_x = mpi_new (0); g_y = mpi_new (0); - ctx = _gcry_mpi_ec_p_internal_new (MPI_EC_WEIERSTRASS, - ECC_DIALECT_STANDARD, + ctx = _gcry_mpi_ec_p_internal_new (E.model, + E.dialect, 0, - E.p, E.a, NULL); + E.p, E.a, E.b); if (_gcry_mpi_ec_get_affine (g_x, g_y, &E.G, ctx)) log_fatal ("ecc get param: Failed to get affine coordinates\n"); _gcry_mpi_ec_free (ctx); @@ -1013,18 +1253,16 @@ _gcry_ecc_get_param_sexp (const char *name) pkey[2] = E.b; pkey[3] = _gcry_ecc_ec2os (g_x, g_y, E.p); pkey[4] = E.n; - pkey[5] = E.h; - pkey[6] = NULL; mpi_free (g_x); mpi_free (g_y); if (sexp_build (&result, NULL, - "(public-key(ecc(p%m)(a%m)(b%m)(g%m)(n%m)(h%m)))", - pkey[0], pkey[1], pkey[2], pkey[3], pkey[4], pkey[5])) + "(public-key(ecc(p%m)(a%m)(b%m)(g%m)(n%m)(h%u)))", + pkey[0], pkey[1], pkey[2], pkey[3], pkey[4], E.h)) result = NULL; - for (i=0; pkey[i]; i++) + for (i=0; i < DIM (pkey); i++) _gcry_mpi_release (pkey[i]); return result; @@ -1048,8 +1286,12 @@ _gcry_ecc_get_mpi (const char *name, mpi_ec_t ec, int copy) return mpi_is_const (ec->b) && !copy? ec->b : mpi_copy (ec->b); if (!strcmp (name, "n") && ec->n) return mpi_is_const (ec->n) && !copy? ec->n : mpi_copy (ec->n); - if (!strcmp (name, "h") && ec->h) - return mpi_is_const (ec->h) && !copy? ec->h : mpi_copy (ec->h); + if (!strcmp (name, "h")) + { + gcry_mpi_t h = _gcry_mpi_get_const (ec->h); + + return !copy? h : mpi_set (NULL, h); + } if (!strcmp (name, "d") && ec->d) return mpi_is_const (ec->d) && !copy? ec->d : mpi_copy (ec->d); @@ -1075,7 +1317,7 @@ _gcry_ecc_get_mpi (const char *name, mpi_ec_t ec, int copy) { /* If only the private key is given, compute the public key. */ if (!ec->Q) - ec->Q = _gcry_ecc_compute_public (NULL, ec, NULL, NULL); + ec->Q = _gcry_ecc_compute_public (NULL, ec); if (!ec->Q) return NULL; @@ -1108,7 +1350,7 @@ _gcry_ecc_get_point (const char *name, mpi_ec_t ec) { /* If only the private key is given, compute the public key. */ if (!ec->Q) - ec->Q = _gcry_ecc_compute_public (NULL, ec, NULL, NULL); + ec->Q = _gcry_ecc_compute_public (NULL, ec); if (ec->Q) return point_copy (ec->Q); @@ -1150,8 +1392,7 @@ _gcry_ecc_set_mpi (const char *name, gcry_mpi_t newvalue, mpi_ec_t ec) } else if (!strcmp (name, "h")) { - mpi_free (ec->h); - ec->h = mpi_copy (newvalue); + mpi_get_ui (&ec->h, newvalue); } else if (*name == 'q' && (!name[1] || name[1] == '@')) { @@ -1159,10 +1400,7 @@ _gcry_ecc_set_mpi (const char *name, gcry_mpi_t newvalue, mpi_ec_t ec) { if (!ec->Q) ec->Q = mpi_point_new (0); - if (ec->dialect == ECC_DIALECT_ED25519) - rc = _gcry_ecc_eddsa_decodepoint (newvalue, ec, ec->Q, NULL, NULL); - else - rc = _gcry_ecc_os2ec (ec->Q, newvalue); + rc = _gcry_mpi_ec_decode_point (ec->Q, newvalue, ec); } if (rc || !newvalue) { diff --git a/libgcrypt-1.8.3-cmac-selftest.patch b/libgcrypt-1.8.3-cmac-selftest.patch deleted file mode 100644 index d480092..0000000 --- a/libgcrypt-1.8.3-cmac-selftest.patch +++ /dev/null @@ -1,322 +0,0 @@ -diff -up libgcrypt-1.8.3/cipher/cipher-cmac.c.cmac-selftest libgcrypt-1.8.3/cipher/cipher-cmac.c ---- libgcrypt-1.8.3/cipher/cipher-cmac.c.cmac-selftest 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.3/cipher/cipher-cmac.c 2019-05-31 17:33:35.594407152 +0200 -@@ -251,3 +251,246 @@ _gcry_cipher_cmac_set_subkeys (gcry_ciph - - return GPG_ERR_NO_ERROR; - } -+ -+/* CMAC selftests. -+ * Copyright (C) 2008 Free Software Foundation, Inc. -+ * Copyright (C) 2019 Red Hat, Inc. -+ */ -+ -+ -+ -+/* Check one MAC with MAC ALGO using the regular MAC -+ * API. (DATA,DATALEN) is the data to be MACed, (KEY,KEYLEN) the key -+ * and (EXPECT,EXPECTLEN) the expected result. If TRUNC is set, the -+ * EXPECTLEN may be less than the digest length. Returns NULL on -+ * success or a string describing the failure. */ -+static const char * -+check_one (int algo, -+ const void *data, size_t datalen, -+ const void *key, size_t keylen, -+ const void *expect, size_t expectlen) -+{ -+ gcry_mac_hd_t hd; -+ unsigned char mac[512]; /* hardcoded to avoid allocation */ -+ size_t macoutlen = expectlen; -+ -+/* printf ("MAC algo %d\n", algo); */ -+ if (_gcry_mac_get_algo_maclen (algo) != expectlen || -+ expectlen > sizeof (mac)) -+ return "invalid tests data"; -+ if (_gcry_mac_open (&hd, algo, 0, NULL)) -+ return "gcry_mac_open failed"; -+ if (_gcry_mac_setkey (hd, key, keylen)) -+ { -+ _gcry_mac_close (hd); -+ return "gcry_md_setkey failed"; -+ } -+ if (_gcry_mac_write (hd, data, datalen)) -+ { -+ _gcry_mac_close (hd); -+ return "gcry_mac_write failed"; -+ } -+ if (_gcry_mac_read (hd, mac, &macoutlen)) -+ { -+ _gcry_mac_close (hd); -+ return "gcry_mac_read failed"; -+ } -+ _gcry_mac_close (hd); -+ if (macoutlen != expectlen || memcmp (mac, expect, expectlen)) -+ { -+/* int i; */ -+ -+/* fputs (" {", stdout); */ -+/* for (i=0; i < expectlen-1; i++) */ -+/* { */ -+/* if (i && !(i % 8)) */ -+/* fputs ("\n ", stdout); */ -+/* printf (" 0x%02x,", mac[i]); */ -+/* } */ -+/* printf (" 0x%02x } },\n", mac[i]); */ -+ -+ return "does not match"; -+ } -+ return NULL; -+} -+ -+ -+static gpg_err_code_t -+selftests_cmac_tdes (int extended, selftest_report_func_t report) -+{ -+ const char *what; -+ const char *errtxt; -+ -+ what = "Basic TDES"; -+ errtxt = check_one (GCRY_MAC_CMAC_3DES, -+ "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a" -+ "\xae\x2d\x8a\x57", 20, -+ "\x8a\xa8\x3b\xf8\xcb\xda\x10\x62\x0b\xc1\xbf\x19\xfb\xb6\xcd\x58" -+ "\xbc\x31\x3d\x4a\x37\x1c\xa8\xb5", 24, -+ "\x74\x3d\xdb\xe0\xce\x2d\xc2\xed", 8); -+ if (errtxt) -+ goto failed; -+ -+ if (extended) -+ { -+ what = "Extended TDES #1"; -+ errtxt = check_one (GCRY_MAC_CMAC_3DES, -+ "", 0, -+ "\x8a\xa8\x3b\xf8\xcb\xda\x10\x62\x0b\xc1\xbf\x19\xfb\xb6\xcd\x58" -+ "\xbc\x31\x3d\x4a\x37\x1c\xa8\xb5", 24, -+ "\xb7\xa6\x88\xe1\x22\xff\xaf\x95", 8); -+ if (errtxt) -+ goto failed; -+ -+ what = "Extended TDES #2"; -+ errtxt = check_one (GCRY_MAC_CMAC_3DES, -+ "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96", 8, -+ "\x8a\xa8\x3b\xf8\xcb\xda\x10\x62\x0b\xc1\xbf\x19\xfb\xb6\xcd\x58" -+ "\xbc\x31\x3d\x4a\x37\x1c\xa8\xb5", 24, -+ "\x8e\x8f\x29\x31\x36\x28\x37\x97", 8); -+ if (errtxt) -+ goto failed; -+ -+ what = "Extended TDES #3"; -+ errtxt = check_one (GCRY_MAC_CMAC_3DES, -+ "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a" -+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51", 32, -+ "\x8a\xa8\x3b\xf8\xcb\xda\x10\x62\x0b\xc1\xbf\x19\xfb\xb6\xcd\x58" -+ "\xbc\x31\x3d\x4a\x37\x1c\xa8\xb5", 24, -+ "\x33\xe6\xb1\x09\x24\x00\xea\xe5", 8); -+ if (errtxt) -+ goto failed; -+ } -+ -+ return 0; /* Succeeded. */ -+ -+ failed: -+ if (report) -+ report ("cmac", GCRY_MAC_CMAC_3DES, what, errtxt); -+ return GPG_ERR_SELFTEST_FAILED; -+} -+ -+ -+ -+static gpg_err_code_t -+selftests_cmac_aes (int extended, selftest_report_func_t report) -+{ -+ const char *what; -+ const char *errtxt; -+ -+ what = "Basic AES128"; -+ errtxt = check_one (GCRY_MAC_CMAC_AES, -+ "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a" -+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" -+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11", 40, -+ "\x2b\x7e\x15\x16\x28\xae\xd2\xa6\xab\xf7\x15\x88\x09\xcf\x4f\x3c", 16, -+ "\xdf\xa6\x67\x47\xde\x9a\xe6\x30\x30\xca\x32\x61\x14\x97\xc8\x27", 16); -+ if (errtxt) -+ goto failed; -+ -+ what = "Basic AES192"; -+ errtxt = check_one (GCRY_MAC_CMAC_AES, -+ "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a" -+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" -+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11", 40, -+ "\x8e\x73\xb0\xf7\xda\x0e\x64\x52\xc8\x10\xf3\x2b\x80\x90\x79\xe5" -+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b", 24, -+ "\x8a\x1d\xe5\xbe\x2e\xb3\x1a\xad\x08\x9a\x82\xe6\xee\x90\x8b\x0e", 16); -+ if (errtxt) -+ goto failed; -+ -+ what = "Basic AES256"; -+ errtxt = check_one (GCRY_MAC_CMAC_AES, -+ "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a" -+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" -+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11", 40, -+ "\x60\x3d\xeb\x10\x15\xca\x71\xbe\x2b\x73\xae\xf0\x85\x7d\x77\x81" -+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7\x2d\x98\x10\xa3\x09\x14\xdf\xf4", 32, -+ "\xaa\xf3\xd8\xf1\xde\x56\x40\xc2\x32\xf5\xb1\x69\xb9\xc9\x11\xe6", 16); -+ if (errtxt) -+ goto failed; -+ if (extended) -+ { -+ what = "Extended AES #1"; -+ errtxt = check_one (GCRY_MAC_CMAC_AES, -+ "", 0, -+ "\x2b\x7e\x15\x16\x28\xae\xd2\xa6\xab\xf7\x15\x88\x09\xcf\x4f\x3c", 16, -+ "\xbb\x1d\x69\x29\xe9\x59\x37\x28\x7f\xa3\x7d\x12\x9b\x75\x67\x46", 16); -+ if (errtxt) -+ goto failed; -+ -+ what = "Extended AES #2"; -+ errtxt = check_one (GCRY_MAC_CMAC_AES, -+ "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a", 16, -+ "\x8e\x73\xb0\xf7\xda\x0e\x64\x52\xc8\x10\xf3\x2b\x80\x90\x79\xe5" -+ "\x62\xf8\xea\xd2\x52\x2c\x6b\x7b", 24, -+ "\x9e\x99\xa7\xbf\x31\xe7\x10\x90\x06\x62\xf6\x5e\x61\x7c\x51\x84", 16); -+ if (errtxt) -+ goto failed; -+ -+ what = "Extended AES #3"; -+ errtxt = check_one (GCRY_MAC_CMAC_AES, -+ "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a" -+ "\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51" -+ "\x30\xc8\x1c\x46\xa3\x5c\xe4\x11\xe5\xfb\xc1\x19\x1a\x0a\x52\xef" -+ "\xf6\x9f\x24\x45\xdf\x4f\x9b\x17\xad\x2b\x41\x7b\xe6\x6c\x37\x10", 64, -+ "\x60\x3d\xeb\x10\x15\xca\x71\xbe\x2b\x73\xae\xf0\x85\x7d\x77\x81" -+ "\x1f\x35\x2c\x07\x3b\x61\x08\xd7\x2d\x98\x10\xa3\x09\x14\xdf\xf4", 32, -+ "\xe1\x99\x21\x90\x54\x9f\x6e\xd5\x69\x6a\x2c\x05\x6c\x31\x54\x10", 16 ); -+ if (errtxt) -+ goto failed; -+ } -+ -+ return 0; /* Succeeded. */ -+ -+ failed: -+ if (report) -+ report ("cmac", GCRY_MAC_CMAC_AES, what, errtxt); -+ return GPG_ERR_SELFTEST_FAILED; -+} -+ -+ -+/* Run a full self-test for ALGO and return 0 on success. */ -+static gpg_err_code_t -+run_cmac_selftests (int algo, int extended, selftest_report_func_t report) -+{ -+ gpg_err_code_t ec; -+ -+ switch (algo) -+ { -+ case GCRY_MAC_CMAC_3DES: -+ ec = selftests_cmac_tdes (extended, report); -+ break; -+ case GCRY_MAC_CMAC_AES: -+ ec = selftests_cmac_aes (extended, report); -+ break; -+ -+ default: -+ ec = GPG_ERR_MAC_ALGO; -+ break; -+ } -+ return ec; -+} -+ -+ -+ -+ -+/* Run the selftests for CMAC with CMAC algorithm ALGO with optional -+ reporting function REPORT. */ -+gpg_error_t -+_gcry_cmac_selftest (int algo, int extended, selftest_report_func_t report) -+{ -+ gcry_err_code_t ec = 0; -+ -+ if (!_gcry_mac_algo_info( algo, GCRYCTL_TEST_ALGO, NULL, NULL )) -+ { -+ ec = run_cmac_selftests (algo, extended, report); -+ } -+ else -+ { -+ ec = GPG_ERR_MAC_ALGO; -+ if (report) -+ report ("mac", algo, "module", "algorithm not available"); -+ } -+ return gpg_error (ec); -+} -diff -up libgcrypt-1.8.3/src/cipher-proto.h.cmac-selftest libgcrypt-1.8.3/src/cipher-proto.h ---- libgcrypt-1.8.3/src/cipher-proto.h.cmac-selftest 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.3/src/cipher-proto.h 2019-05-31 17:29:34.574588234 +0200 -@@ -256,6 +256,8 @@ gcry_error_t _gcry_pk_selftest (int algo - selftest_report_func_t report); - gcry_error_t _gcry_hmac_selftest (int algo, int extended, - selftest_report_func_t report); -+gcry_error_t _gcry_cmac_selftest (int algo, int extended, -+ selftest_report_func_t report); - - gcry_error_t _gcry_random_selftest (selftest_report_func_t report); - -diff -up libgcrypt-1.8.3/src/fips.c.cmac-selftest libgcrypt-1.8.3/src/fips.c ---- libgcrypt-1.8.3/src/fips.c.cmac-selftest 2018-11-01 15:40:36.051865535 +0100 -+++ libgcrypt-1.8.3/src/fips.c 2019-05-31 17:31:20.157756640 +0200 -@@ -521,29 +521,32 @@ run_digest_selftests (int extended) - - /* Run self-tests for all HMAC algorithms. Return 0 on success. */ - static int --run_hmac_selftests (int extended) -+run_mac_selftests (int extended) - { -- static int algos[] = -+ static int algos[][2] = - { -- GCRY_MD_SHA1, -- GCRY_MD_SHA224, -- GCRY_MD_SHA256, -- GCRY_MD_SHA384, -- GCRY_MD_SHA512, -- GCRY_MD_SHA3_224, -- GCRY_MD_SHA3_256, -- GCRY_MD_SHA3_384, -- GCRY_MD_SHA3_512, -- 0 -+ { GCRY_MD_SHA1, 0 }, -+ { GCRY_MD_SHA224, 0 }, -+ { GCRY_MD_SHA256, 0 }, -+ { GCRY_MD_SHA384, 0 }, -+ { GCRY_MD_SHA512, 0 }, -+ { GCRY_MD_SHA3_224, 0 }, -+ { GCRY_MD_SHA3_256, 0 }, -+ { GCRY_MD_SHA3_384, 0 }, -+ { GCRY_MD_SHA3_512, 0 }, -+ { GCRY_MAC_CMAC_3DES, 1 }, -+ { GCRY_MAC_CMAC_AES, 1 }, -+ { 0, 0 } - }; - int idx; - gpg_error_t err; - int anyerr = 0; - -- for (idx=0; algos[idx]; idx++) -+ for (idx=0; algos[idx][0]; idx++) - { -- err = _gcry_hmac_selftest (algos[idx], extended, reporter); -- reporter ("hmac", algos[idx], NULL, -+ err = algos[idx][1] ? _gcry_cmac_selftest (algos[idx][0], extended, reporter) : -+ _gcry_hmac_selftest (algos[idx][0], extended, reporter); -+ reporter (algos[idx][1] ? "cmac" : "hmac", algos[idx][0], NULL, - err? gpg_strerror (err):NULL); - if (err) - anyerr = 1; -@@ -747,7 +750,7 @@ _gcry_fips_run_selftests (int extended) - if (run_digest_selftests (extended)) - goto leave; - -- if (run_hmac_selftests (extended)) -+ if (run_mac_selftests (extended)) - goto leave; - - /* Run random tests before the pubkey tests because the latter diff --git a/libgcrypt-1.8.3-fips-ctor.patch b/libgcrypt-1.8.3-fips-ctor.patch index 66abd59..adecbe4 100644 --- a/libgcrypt-1.8.3-fips-ctor.patch +++ b/libgcrypt-1.8.3-fips-ctor.patch @@ -50,7 +50,7 @@ diff -up libgcrypt-1.8.3/src/global.c.fips-ctor libgcrypt-1.8.3/src/global.c break; case GCRYCTL_SET_ENFORCED_FIPS_FLAG: -- if (!any_init_done) +- if (!_gcry_global_any_init_done) + if (fips_mode ()) { - /* Not yet initialized at all. Set the enforced fips mode flag */ diff --git a/libgcrypt-1.8.3-fips-enttest.patch b/libgcrypt-1.8.3-fips-enttest.patch index b6b09ba..34e54bf 100644 --- a/libgcrypt-1.8.3-fips-enttest.patch +++ b/libgcrypt-1.8.3-fips-enttest.patch @@ -2,7 +2,7 @@ diff -up libgcrypt-1.8.3/random/random-drbg.c.fips-enttest libgcrypt-1.8.3/rando --- libgcrypt-1.8.3/random/random-drbg.c.fips-enttest 2017-11-23 19:16:58.000000000 +0100 +++ libgcrypt-1.8.3/random/random-drbg.c 2019-06-24 10:04:23.219547141 +0200 @@ -317,6 +317,7 @@ struct drbg_state_s - unsigned char *ctr_null; /* CTR mode zero buffer */ + gcry_cipher_hd_t ctr_handle; /* CTR mode cipher handle */ int seeded:1; /* DRBG fully seeded? */ int pr:1; /* Prediction resistance enabled? */ + int ent_primed:1; /* Previous entropy data primed? */ diff --git a/libgcrypt-1.8.4-tests-fipsmode.patch b/libgcrypt-1.8.4-tests-fipsmode.patch index 1442a0b..d6e29d1 100644 --- a/libgcrypt-1.8.4-tests-fipsmode.patch +++ b/libgcrypt-1.8.4-tests-fipsmode.patch @@ -2,7 +2,7 @@ diff -up libgcrypt-1.8.4/tests/basic.c.tests-fipsmode libgcrypt-1.8.4/tests/basi --- libgcrypt-1.8.4/tests/basic.c.tests-fipsmode 2018-04-17 17:29:40.000000000 +0200 +++ libgcrypt-1.8.4/tests/basic.c 2019-02-12 13:30:48.935791024 +0100 @@ -6964,7 +6964,7 @@ check_ciphers (void) - check_one_cipher (algos[i], GCRY_CIPHER_MODE_CTR, 0); + check_one_cipher (algos[i], GCRY_CIPHER_MODE_EAX, 0); if (gcry_cipher_get_algo_blklen (algos[i]) == GCRY_CCM_BLOCK_LEN) check_one_cipher (algos[i], GCRY_CIPHER_MODE_CCM, 0); - if (gcry_cipher_get_algo_blklen (algos[i]) == GCRY_GCM_BLOCK_LEN) @@ -10,7 +10,7 @@ diff -up libgcrypt-1.8.4/tests/basic.c.tests-fipsmode libgcrypt-1.8.4/tests/basi check_one_cipher (algos[i], GCRY_CIPHER_MODE_GCM, 0); if (gcry_cipher_get_algo_blklen (algos[i]) == GCRY_OCB_BLOCK_LEN) check_one_cipher (algos[i], GCRY_CIPHER_MODE_OCB, 0); -@@ -7010,11 +7010,17 @@ check_cipher_modes(void) +@@ -7010,12 +7010,18 @@ check_cipher_modes(void) check_cfb_cipher (); check_ofb_cipher (); check_ccm_cipher (); @@ -24,6 +24,7 @@ diff -up libgcrypt-1.8.4/tests/basic.c.tests-fipsmode libgcrypt-1.8.4/tests/basi + check_ocb_cipher (); + } check_xts_cipher (); + check_eax_cipher (); - check_gost28147_cipher (); + if (!in_fips_mode) + { @@ -46,7 +47,7 @@ diff -up libgcrypt-1.8.4/tests/basic.c.tests-fipsmode libgcrypt-1.8.4/tests/basi gcry_md_hd_t md; - /* First trigger a self-test. */ -- xgcry_control (GCRYCTL_FORCE_FIPS_MODE, 0); +- xgcry_control ((GCRYCTL_FORCE_FIPS_MODE, 0)); if (!gcry_control (GCRYCTL_OPERATIONAL_P, 0)) fail ("not in operational state after self-test\n"); @@ -58,7 +59,7 @@ diff -up libgcrypt-1.8.4/tests/basic.c.tests-fipsmode libgcrypt-1.8.4/tests/basi - { - /* Now run a self-test and to get back into - operational state. */ -- xgcry_control (GCRYCTL_FORCE_FIPS_MODE, 0); +- xgcry_control ((GCRYCTL_FORCE_FIPS_MODE, 0)); - if (!gcry_control (GCRYCTL_OPERATIONAL_P, 0)) - fail ("did not reach operational after error " - "and self-test\n"); @@ -70,7 +71,7 @@ diff -up libgcrypt-1.8.4/tests/benchmark.c.tests-fipsmode libgcrypt-1.8.4/tests/ --- libgcrypt-1.8.4/tests/benchmark.c.tests-fipsmode 2019-02-12 11:31:44.859603883 +0100 +++ libgcrypt-1.8.4/tests/benchmark.c 2019-02-12 14:10:40.271999352 +0100 @@ -872,8 +872,10 @@ cipher_bench ( const char *algoname ) - || (blklen == 1 && modes[modeidx].mode != GCRY_CIPHER_MODE_STREAM)) + && algo != GCRY_CIPHER_CHACHA20) continue; - if (modes[modeidx].req_blocksize > 0 @@ -150,9 +151,9 @@ diff -up libgcrypt-1.8.4/tests/t-cv25519.c.tests-fipsmode libgcrypt-1.8.4/tests/ --- libgcrypt-1.8.4/tests/t-cv25519.c.tests-fipsmode 2017-11-23 19:16:58.000000000 +0100 +++ libgcrypt-1.8.4/tests/t-cv25519.c 2019-02-12 14:02:35.935705390 +0100 @@ -560,6 +560,9 @@ main (int argc, char **argv) - xgcry_control (GCRYCTL_SET_DEBUG_FLAGS, 1u , 0); - xgcry_control (GCRYCTL_ENABLE_QUICK_RANDOM, 0); - xgcry_control (GCRYCTL_INITIALIZATION_FINISHED, 0); + xgcry_control ((GCRYCTL_SET_DEBUG_FLAGS, 1u , 0)); + xgcry_control ((GCRYCTL_ENABLE_QUICK_RANDOM, 0)); + xgcry_control ((GCRYCTL_INITIALIZATION_FINISHED, 0)); + /* Curve25519 isn't supported in fips mode */ + if (gcry_fips_mode_active()) + return 77; @@ -163,13 +164,13 @@ diff -up libgcrypt-1.8.4/tests/t-secmem.c.tests-fipsmode libgcrypt-1.8.4/tests/t --- libgcrypt-1.8.4/tests/t-secmem.c.tests-fipsmode 2017-11-23 19:19:54.000000000 +0100 +++ libgcrypt-1.8.4/tests/t-secmem.c 2019-02-12 11:51:02.462190538 +0100 @@ -174,7 +174,8 @@ main (int argc, char **argv) - xgcry_control (GCRYCTL_SET_DEBUG_FLAGS, 1u , 0); - xgcry_control (GCRYCTL_ENABLE_QUICK_RANDOM, 0); - xgcry_control (GCRYCTL_INIT_SECMEM, pool_size, 0); + xgcry_control ((GCRYCTL_SET_DEBUG_FLAGS, 1u , 0)); + xgcry_control ((GCRYCTL_ENABLE_QUICK_RANDOM, 0)); + xgcry_control ((GCRYCTL_INIT_SECMEM, pool_size, 0)); - gcry_set_outofcore_handler (outofcore_handler, NULL); + if (!gcry_fips_mode_active ()) + gcry_set_outofcore_handler (outofcore_handler, NULL); - xgcry_control (GCRYCTL_INITIALIZATION_FINISHED, 0); + xgcry_control ((GCRYCTL_INITIALIZATION_FINISHED, 0)); /* Libgcrypt prints a warning when the first overflow is allocated; @@ -184,7 +185,8 @@ main (int argc, char **argv) diff --git a/libgcrypt-1.8.4-use-poll.patch b/libgcrypt-1.8.4-use-poll.patch index b96c6ce..d55b6a3 100644 --- a/libgcrypt-1.8.4-use-poll.patch +++ b/libgcrypt-1.8.4-use-poll.patch @@ -6,9 +6,9 @@ diff -up libgcrypt-1.8.4/random/rndlinux.c.use-poll libgcrypt-1.8.4/random/rndli #include #include +#include - #if defined(__linux__) && defined(HAVE_SYSCALL) + #if defined(__linux__) || !defined(HAVE_GETENTROPY) + #ifdef HAVE_SYSCALL # include - #endif @@ -241,9 +242,8 @@ _gcry_rndlinux_gather_random (void (*add return with something we will actually use 100ms. */ while (length) @@ -18,8 +18,8 @@ diff -up libgcrypt-1.8.4/random/rndlinux.c.use-poll libgcrypt-1.8.4/random/rndli int rc; + struct pollfd pfd; - /* If we have a modern Linux kernel, we first try to use the new - * getrandom syscall. That call guarantees that the kernel's + /* If we have a modern operating system, we first try to use the new + * getentropy function. That call guarantees that the kernel's @@ -300,36 +300,25 @@ _gcry_rndlinux_gather_random (void (*add any_need_entropy = 1; } diff --git a/libgcrypt-1.8.5-aes-perf.patch b/libgcrypt-1.8.5-aes-perf.patch deleted file mode 100644 index 268ce70..0000000 --- a/libgcrypt-1.8.5-aes-perf.patch +++ /dev/null @@ -1,8156 +0,0 @@ -diff -up libgcrypt-1.8.5/cipher/arcfour.c.aes-perf libgcrypt-1.8.5/cipher/arcfour.c ---- libgcrypt-1.8.5/cipher/arcfour.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/arcfour.c 2020-04-22 18:29:41.662862382 +0200 -@@ -184,10 +184,12 @@ do_arcfour_setkey (void *context, const - } - - static gcry_err_code_t --arcfour_setkey ( void *context, const byte *key, unsigned int keylen ) -+arcfour_setkey ( void *context, const byte *key, unsigned int keylen, -+ gcry_cipher_hd_t hd ) - { - ARCFOUR_context *ctx = (ARCFOUR_context *) context; - gcry_err_code_t rc = do_arcfour_setkey (ctx, key, keylen ); -+ (void)hd; - return rc; - } - -@@ -207,11 +209,11 @@ selftest(void) - static const byte ciphertext_1[] = - { 0xF1, 0x38, 0x29, 0xC9, 0xDE }; - -- arcfour_setkey( &ctx, key_1, sizeof(key_1)); -+ arcfour_setkey( &ctx, key_1, sizeof(key_1), NULL); - encrypt_stream( &ctx, scratch, plaintext_1, sizeof(plaintext_1)); - if ( memcmp (scratch, ciphertext_1, sizeof (ciphertext_1))) - return "Arcfour encryption test 1 failed."; -- arcfour_setkey( &ctx, key_1, sizeof(key_1)); -+ arcfour_setkey( &ctx, key_1, sizeof(key_1), NULL); - encrypt_stream(&ctx, scratch, scratch, sizeof(plaintext_1)); /* decrypt */ - if ( memcmp (scratch, plaintext_1, sizeof (plaintext_1))) - return "Arcfour decryption test 1 failed."; -diff -up libgcrypt-1.8.5/cipher/blowfish.c.aes-perf libgcrypt-1.8.5/cipher/blowfish.c ---- libgcrypt-1.8.5/cipher/blowfish.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/blowfish.c 2020-04-22 18:29:41.663862363 +0200 -@@ -37,6 +37,7 @@ - #include "g10lib.h" - #include "cipher.h" - #include "bufhelp.h" -+#include "cipher-internal.h" - #include "cipher-selftest.h" - - #define BLOWFISH_BLOCKSIZE 8 -@@ -67,7 +68,8 @@ typedef struct { - u32 p[BLOWFISH_ROUNDS+2]; - } BLOWFISH_context; - --static gcry_err_code_t bf_setkey (void *c, const byte *key, unsigned keylen); -+static gcry_err_code_t bf_setkey (void *c, const byte *key, unsigned keylen, -+ gcry_cipher_hd_t hd); - static unsigned int encrypt_block (void *bc, byte *outbuf, const byte *inbuf); - static unsigned int decrypt_block (void *bc, byte *outbuf, const byte *inbuf); - -@@ -703,7 +705,7 @@ _gcry_blowfish_ctr_enc(void *context, un - /* Encrypt the counter. */ - do_encrypt_block(ctx, tmpbuf, ctr); - /* XOR the input with the encrypted counter and store in output. */ -- buf_xor(outbuf, tmpbuf, inbuf, BLOWFISH_BLOCKSIZE); -+ cipher_block_xor(outbuf, tmpbuf, inbuf, BLOWFISH_BLOCKSIZE); - outbuf += BLOWFISH_BLOCKSIZE; - inbuf += BLOWFISH_BLOCKSIZE; - /* Increment the counter. */ -@@ -771,7 +773,7 @@ _gcry_blowfish_cbc_dec(void *context, un - the intermediate result to SAVEBUF. */ - do_decrypt_block (ctx, savebuf, inbuf); - -- buf_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOWFISH_BLOCKSIZE); -+ cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOWFISH_BLOCKSIZE); - inbuf += BLOWFISH_BLOCKSIZE; - outbuf += BLOWFISH_BLOCKSIZE; - } -@@ -828,7 +830,7 @@ _gcry_blowfish_cfb_dec(void *context, un - for ( ;nblocks; nblocks-- ) - { - do_encrypt_block(ctx, iv, iv); -- buf_xor_n_copy(outbuf, iv, inbuf, BLOWFISH_BLOCKSIZE); -+ cipher_block_xor_n_copy(outbuf, iv, inbuf, BLOWFISH_BLOCKSIZE); - outbuf += BLOWFISH_BLOCKSIZE; - inbuf += BLOWFISH_BLOCKSIZE; - } -@@ -897,7 +899,7 @@ selftest(void) - const char *r; - - bf_setkey( (void *) &c, -- (const unsigned char*)"abcdefghijklmnopqrstuvwxyz", 26 ); -+ (const unsigned char*)"abcdefghijklmnopqrstuvwxyz", 26, NULL ); - encrypt_block( (void *) &c, buffer, plain ); - if( memcmp( buffer, "\x32\x4E\xD0\xFE\xF4\x13\xA2\x03", 8 ) ) - return "Blowfish selftest failed (1)."; -@@ -905,7 +907,7 @@ selftest(void) - if( memcmp( buffer, plain, 8 ) ) - return "Blowfish selftest failed (2)."; - -- bf_setkey( (void *) &c, key3, 8 ); -+ bf_setkey( (void *) &c, key3, 8, NULL ); - encrypt_block( (void *) &c, buffer, plain3 ); - if( memcmp( buffer, cipher3, 8 ) ) - return "Blowfish selftest failed (3)."; -@@ -1095,10 +1097,12 @@ do_bf_setkey (BLOWFISH_context *c, const - - - static gcry_err_code_t --bf_setkey (void *context, const byte *key, unsigned keylen) -+bf_setkey (void *context, const byte *key, unsigned keylen, -+ gcry_cipher_hd_t hd) - { - BLOWFISH_context *c = (BLOWFISH_context *) context; - gcry_err_code_t rc = do_bf_setkey (c, key, keylen); -+ (void)hd; - return rc; - } - -diff -up libgcrypt-1.8.5/cipher/bufhelp.h.aes-perf libgcrypt-1.8.5/cipher/bufhelp.h ---- libgcrypt-1.8.5/cipher/bufhelp.h.aes-perf 2018-04-17 17:35:28.000000000 +0200 -+++ libgcrypt-1.8.5/cipher/bufhelp.h 2020-04-22 18:29:41.663862363 +0200 -@@ -450,7 +450,21 @@ static inline void buf_put_le64(void *_b - out->a = le_bswap64(val); - } - -- - #endif /*BUFHELP_UNALIGNED_ACCESS*/ - -+ -+/* Host-endian get/put macros */ -+#ifdef WORDS_BIGENDIAN -+# define buf_get_he32 buf_get_be32 -+# define buf_put_he32 buf_put_be32 -+# define buf_get_he64 buf_get_be64 -+# define buf_put_he64 buf_put_be64 -+#else -+# define buf_get_he32 buf_get_le32 -+# define buf_put_he32 buf_put_le32 -+# define buf_get_he64 buf_get_le64 -+# define buf_put_he64 buf_put_le64 -+#endif -+ -+ - #endif /*GCRYPT_BUFHELP_H*/ -diff -up libgcrypt-1.8.5/cipher/camellia-glue.c.aes-perf libgcrypt-1.8.5/cipher/camellia-glue.c ---- libgcrypt-1.8.5/cipher/camellia-glue.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/camellia-glue.c 2020-04-22 18:29:41.664862344 +0200 -@@ -204,7 +204,8 @@ extern void _gcry_camellia_aesni_avx2_oc - static const char *selftest(void); - - static gcry_err_code_t --camellia_setkey(void *c, const byte *key, unsigned keylen) -+camellia_setkey(void *c, const byte *key, unsigned keylen, -+ gcry_cipher_hd_t hd) - { - CAMELLIA_context *ctx=c; - static int initialized=0; -@@ -213,6 +214,8 @@ camellia_setkey(void *c, const byte *key - unsigned int hwf = _gcry_get_hw_features (); - #endif - -+ (void)hd; -+ - if(keylen!=16 && keylen!=24 && keylen!=32) - return GPG_ERR_INV_KEYLEN; - -@@ -427,7 +430,7 @@ _gcry_camellia_ctr_enc(void *context, un - /* Encrypt the counter. */ - Camellia_EncryptBlock(ctx->keybitlength, ctr, ctx->keytable, tmpbuf); - /* XOR the input with the encrypted counter and store in output. */ -- buf_xor(outbuf, tmpbuf, inbuf, CAMELLIA_BLOCK_SIZE); -+ cipher_block_xor(outbuf, tmpbuf, inbuf, CAMELLIA_BLOCK_SIZE); - outbuf += CAMELLIA_BLOCK_SIZE; - inbuf += CAMELLIA_BLOCK_SIZE; - /* Increment the counter. */ -@@ -520,7 +523,8 @@ _gcry_camellia_cbc_dec(void *context, un - the intermediate result to SAVEBUF. */ - Camellia_DecryptBlock(ctx->keybitlength, inbuf, ctx->keytable, savebuf); - -- buf_xor_n_copy_2(outbuf, savebuf, iv, inbuf, CAMELLIA_BLOCK_SIZE); -+ cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, -+ CAMELLIA_BLOCK_SIZE); - inbuf += CAMELLIA_BLOCK_SIZE; - outbuf += CAMELLIA_BLOCK_SIZE; - } -@@ -602,7 +606,7 @@ _gcry_camellia_cfb_dec(void *context, un - for ( ;nblocks; nblocks-- ) - { - Camellia_EncryptBlock(ctx->keybitlength, iv, ctx->keytable, iv); -- buf_xor_n_copy(outbuf, iv, inbuf, CAMELLIA_BLOCK_SIZE); -+ cipher_block_xor_n_copy(outbuf, iv, inbuf, CAMELLIA_BLOCK_SIZE); - outbuf += CAMELLIA_BLOCK_SIZE; - inbuf += CAMELLIA_BLOCK_SIZE; - } -@@ -991,7 +995,7 @@ selftest(void) - 0x20,0xef,0x7c,0x91,0x9e,0x3a,0x75,0x09 - }; - -- camellia_setkey(&ctx,key_128,sizeof(key_128)); -+ camellia_setkey(&ctx,key_128,sizeof(key_128),NULL); - camellia_encrypt(&ctx,scratch,plaintext); - if(memcmp(scratch,ciphertext_128,sizeof(ciphertext_128))!=0) - return "CAMELLIA-128 test encryption failed."; -@@ -999,7 +1003,7 @@ selftest(void) - if(memcmp(scratch,plaintext,sizeof(plaintext))!=0) - return "CAMELLIA-128 test decryption failed."; - -- camellia_setkey(&ctx,key_192,sizeof(key_192)); -+ camellia_setkey(&ctx,key_192,sizeof(key_192),NULL); - camellia_encrypt(&ctx,scratch,plaintext); - if(memcmp(scratch,ciphertext_192,sizeof(ciphertext_192))!=0) - return "CAMELLIA-192 test encryption failed."; -@@ -1007,7 +1011,7 @@ selftest(void) - if(memcmp(scratch,plaintext,sizeof(plaintext))!=0) - return "CAMELLIA-192 test decryption failed."; - -- camellia_setkey(&ctx,key_256,sizeof(key_256)); -+ camellia_setkey(&ctx,key_256,sizeof(key_256),NULL); - camellia_encrypt(&ctx,scratch,plaintext); - if(memcmp(scratch,ciphertext_256,sizeof(ciphertext_256))!=0) - return "CAMELLIA-256 test encryption failed."; -diff -up libgcrypt-1.8.5/cipher/cast5.c.aes-perf libgcrypt-1.8.5/cipher/cast5.c ---- libgcrypt-1.8.5/cipher/cast5.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cast5.c 2020-04-22 18:29:41.665862325 +0200 -@@ -44,6 +44,7 @@ - #include "cipher.h" - #include "bithelp.h" - #include "bufhelp.h" -+#include "cipher-internal.h" - #include "cipher-selftest.h" - - /* USE_AMD64_ASM indicates whether to use AMD64 assembly code. */ -@@ -72,7 +73,8 @@ typedef struct { - #endif - } CAST5_context; - --static gcry_err_code_t cast_setkey (void *c, const byte *key, unsigned keylen); -+static gcry_err_code_t cast_setkey (void *c, const byte *key, unsigned keylen, -+ gcry_cipher_hd_t hd); - static unsigned int encrypt_block (void *c, byte *outbuf, const byte *inbuf); - static unsigned int decrypt_block (void *c, byte *outbuf, const byte *inbuf); - -@@ -671,7 +673,7 @@ _gcry_cast5_ctr_enc(void *context, unsig - /* Encrypt the counter. */ - do_encrypt_block(ctx, tmpbuf, ctr); - /* XOR the input with the encrypted counter and store in output. */ -- buf_xor(outbuf, tmpbuf, inbuf, CAST5_BLOCKSIZE); -+ cipher_block_xor(outbuf, tmpbuf, inbuf, CAST5_BLOCKSIZE); - outbuf += CAST5_BLOCKSIZE; - inbuf += CAST5_BLOCKSIZE; - /* Increment the counter. */ -@@ -739,7 +741,7 @@ _gcry_cast5_cbc_dec(void *context, unsig - the intermediate result to SAVEBUF. */ - do_decrypt_block (ctx, savebuf, inbuf); - -- buf_xor_n_copy_2(outbuf, savebuf, iv, inbuf, CAST5_BLOCKSIZE); -+ cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, CAST5_BLOCKSIZE); - inbuf += CAST5_BLOCKSIZE; - outbuf += CAST5_BLOCKSIZE; - } -@@ -795,7 +797,7 @@ _gcry_cast5_cfb_dec(void *context, unsig - for ( ;nblocks; nblocks-- ) - { - do_encrypt_block(ctx, iv, iv); -- buf_xor_n_copy(outbuf, iv, inbuf, CAST5_BLOCKSIZE); -+ cipher_block_xor_n_copy(outbuf, iv, inbuf, CAST5_BLOCKSIZE); - outbuf += CAST5_BLOCKSIZE; - inbuf += CAST5_BLOCKSIZE; - } -@@ -863,7 +865,7 @@ selftest(void) - byte buffer[8]; - const char *r; - -- cast_setkey( &c, key, 16 ); -+ cast_setkey( &c, key, 16, NULL ); - encrypt_block( &c, buffer, plain ); - if( memcmp( buffer, cipher, 8 ) ) - return "1"; -@@ -884,10 +886,10 @@ selftest(void) - 0x80,0xAC,0x05,0xB8,0xE8,0x3D,0x69,0x6E }; - - for(i=0; i < 1000000; i++ ) { -- cast_setkey( &c, b0, 16 ); -+ cast_setkey( &c, b0, 16, NULL ); - encrypt_block( &c, a0, a0 ); - encrypt_block( &c, a0+8, a0+8 ); -- cast_setkey( &c, a0, 16 ); -+ cast_setkey( &c, a0, 16, NULL ); - encrypt_block( &c, b0, b0 ); - encrypt_block( &c, b0+8, b0+8 ); - } -@@ -1029,10 +1031,12 @@ do_cast_setkey( CAST5_context *c, const - } - - static gcry_err_code_t --cast_setkey (void *context, const byte *key, unsigned keylen ) -+cast_setkey (void *context, const byte *key, unsigned keylen, -+ gcry_cipher_hd_t hd ) - { - CAST5_context *c = (CAST5_context *) context; - gcry_err_code_t rc = do_cast_setkey (c, key, keylen); -+ (void)hd; - return rc; - } - -diff -up libgcrypt-1.8.5/cipher/chacha20.c.aes-perf libgcrypt-1.8.5/cipher/chacha20.c ---- libgcrypt-1.8.5/cipher/chacha20.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/chacha20.c 2020-04-22 18:29:41.665862325 +0200 -@@ -419,10 +419,12 @@ chacha20_do_setkey (CHACHA20_context_t * - - - static gcry_err_code_t --chacha20_setkey (void *context, const byte * key, unsigned int keylen) -+chacha20_setkey (void *context, const byte *key, unsigned int keylen, -+ gcry_cipher_hd_t hd) - { - CHACHA20_context_t *ctx = (CHACHA20_context_t *) context; - gcry_err_code_t rc = chacha20_do_setkey (ctx, key, keylen); -+ (void)hd; - _gcry_burn_stack (4 + sizeof (void *) + 4 * sizeof (void *)); - return rc; - } -@@ -569,7 +571,7 @@ selftest (void) - /* 16-byte alignment required for amd64 implementation. */ - ctx = (CHACHA20_context_t *)((uintptr_t)(ctxbuf + 15) & ~(uintptr_t)15); - -- chacha20_setkey (ctx, key_1, sizeof key_1); -+ chacha20_setkey (ctx, key_1, sizeof key_1, NULL); - chacha20_setiv (ctx, nonce_1, sizeof nonce_1); - scratch[sizeof (scratch) - 1] = 0; - chacha20_encrypt_stream (ctx, scratch, plaintext_1, sizeof plaintext_1); -@@ -577,7 +579,7 @@ selftest (void) - return "ChaCha20 encryption test 1 failed."; - if (scratch[sizeof (scratch) - 1]) - return "ChaCha20 wrote too much."; -- chacha20_setkey (ctx, key_1, sizeof (key_1)); -+ chacha20_setkey (ctx, key_1, sizeof (key_1), NULL); - chacha20_setiv (ctx, nonce_1, sizeof nonce_1); - chacha20_encrypt_stream (ctx, scratch, scratch, sizeof plaintext_1); - if (memcmp (scratch, plaintext_1, sizeof plaintext_1)) -@@ -585,12 +587,12 @@ selftest (void) - - for (i = 0; i < sizeof buf; i++) - buf[i] = i; -- chacha20_setkey (ctx, key_1, sizeof key_1); -+ chacha20_setkey (ctx, key_1, sizeof key_1, NULL); - chacha20_setiv (ctx, nonce_1, sizeof nonce_1); - /*encrypt */ - chacha20_encrypt_stream (ctx, buf, buf, sizeof buf); - /*decrypt */ -- chacha20_setkey (ctx, key_1, sizeof key_1); -+ chacha20_setkey (ctx, key_1, sizeof key_1, NULL); - chacha20_setiv (ctx, nonce_1, sizeof nonce_1); - chacha20_encrypt_stream (ctx, buf, buf, 1); - chacha20_encrypt_stream (ctx, buf + 1, buf + 1, (sizeof buf) - 1 - 1); -@@ -600,13 +602,13 @@ selftest (void) - if (buf[i] != (byte) i) - return "ChaCha20 encryption test 2 failed."; - -- chacha20_setkey (ctx, key_1, sizeof key_1); -+ chacha20_setkey (ctx, key_1, sizeof key_1, NULL); - chacha20_setiv (ctx, nonce_1, sizeof nonce_1); - /* encrypt */ - for (i = 0; i < sizeof buf; i++) - chacha20_encrypt_stream (ctx, &buf[i], &buf[i], 1); - /* decrypt */ -- chacha20_setkey (ctx, key_1, sizeof key_1); -+ chacha20_setkey (ctx, key_1, sizeof key_1, NULL); - chacha20_setiv (ctx, nonce_1, sizeof nonce_1); - chacha20_encrypt_stream (ctx, buf, buf, sizeof buf); - for (i = 0; i < sizeof buf; i++) -diff -up libgcrypt-1.8.5/cipher/cipher-aeswrap.c.aes-perf libgcrypt-1.8.5/cipher/cipher-aeswrap.c ---- libgcrypt-1.8.5/cipher/cipher-aeswrap.c.aes-perf 2018-04-17 17:49:00.000000000 +0200 -+++ libgcrypt-1.8.5/cipher/cipher-aeswrap.c 2020-04-22 18:29:41.665862325 +0200 -@@ -99,7 +99,7 @@ _gcry_cipher_aeswrap_encrypt (gcry_ciphe - break; - } - /* A := MSB_64(B) ^ t */ -- buf_xor(a, b, t, 8); -+ cipher_block_xor(a, b, t, 8); - /* R[i] := LSB_64(B) */ - memcpy (r+i*8, b+8, 8); - } -@@ -170,7 +170,7 @@ _gcry_cipher_aeswrap_decrypt (gcry_ciphe - for (i = n; i >= 1; i--) - { - /* B := AES_k^1( (A ^ t)| R[i] ) */ -- buf_xor(b, a, t, 8); -+ cipher_block_xor(b, a, t, 8); - memcpy (b+8, r+(i-1)*8, 8); - nburn = c->spec->decrypt (&c->context.c, b, b); - burn = nburn > burn ? nburn : burn; -diff -up libgcrypt-1.8.5/cipher/cipher.c.aes-perf libgcrypt-1.8.5/cipher/cipher.c ---- libgcrypt-1.8.5/cipher/cipher.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher.c 2020-04-22 18:29:41.666862306 +0200 -@@ -92,6 +92,8 @@ static gcry_cipher_spec_t *cipher_list[] - - - -+static void _gcry_cipher_setup_mode_ops(gcry_cipher_hd_t c, int mode); -+ - - static int - map_algo (int algo) -@@ -532,6 +534,7 @@ _gcry_cipher_open_internal (gcry_cipher_ - h->bulk.ctr_enc = _gcry_aes_ctr_enc; - h->bulk.ocb_crypt = _gcry_aes_ocb_crypt; - h->bulk.ocb_auth = _gcry_aes_ocb_auth; -+ h->bulk.xts_crypt = _gcry_aes_xts_crypt; - break; - #endif /*USE_AES*/ - #ifdef USE_BLOWFISH -@@ -592,6 +595,9 @@ _gcry_cipher_open_internal (gcry_cipher_ - break; - } - -+ /* Setup mode routines. */ -+ _gcry_cipher_setup_mode_ops(h, mode); -+ - /* Setup defaults depending on the mode. */ - switch (mode) - { -@@ -609,8 +615,7 @@ _gcry_cipher_open_internal (gcry_cipher_ - default: - break; - } -- -- } -+ } - } - - /* Done. */ -@@ -675,7 +680,7 @@ cipher_setkey (gcry_cipher_hd_t c, byte - } - } - -- rc = c->spec->setkey (&c->context.c, key, keylen); -+ rc = c->spec->setkey (&c->context.c, key, keylen, c); - if (!rc) - { - /* Duplicate initial context. */ -@@ -701,7 +706,7 @@ cipher_setkey (gcry_cipher_hd_t c, byte - case GCRY_CIPHER_MODE_XTS: - /* Setup tweak cipher with second part of XTS key. */ - rc = c->spec->setkey (c->u_mode.xts.tweak_context, key + keylen, -- keylen); -+ keylen, c); - if (!rc) - { - /* Duplicate initial tweak context. */ -@@ -872,85 +877,78 @@ do_ecb_decrypt (gcry_cipher_hd_t c, - } - - --/**************** -- * Encrypt INBUF to OUTBUF with the mode selected at open. -- * inbuf and outbuf may overlap or be the same. -- * Depending on the mode some constraints apply to INBUFLEN. -- */ - static gcry_err_code_t --cipher_encrypt (gcry_cipher_hd_t c, byte *outbuf, size_t outbuflen, -- const byte *inbuf, size_t inbuflen) -+do_stream_encrypt (gcry_cipher_hd_t c, -+ unsigned char *outbuf, size_t outbuflen, -+ const unsigned char *inbuf, size_t inbuflen) -+{ -+ (void)outbuflen; -+ c->spec->stencrypt (&c->context.c, outbuf, (void *)inbuf, inbuflen); -+ return 0; -+} -+ -+static gcry_err_code_t -+do_stream_decrypt (gcry_cipher_hd_t c, -+ unsigned char *outbuf, size_t outbuflen, -+ const unsigned char *inbuf, size_t inbuflen) -+{ -+ (void)outbuflen; -+ c->spec->stdecrypt (&c->context.c, outbuf, (void *)inbuf, inbuflen); -+ return 0; -+} -+ -+ -+static gcry_err_code_t -+do_encrypt_none_unknown (gcry_cipher_hd_t c, byte *outbuf, size_t outbuflen, -+ const byte *inbuf, size_t inbuflen) - { - gcry_err_code_t rc; - -- if (c->mode != GCRY_CIPHER_MODE_NONE && !c->marks.key) -- { -- log_error ("cipher_encrypt: key not set\n"); -- return GPG_ERR_MISSING_KEY; -- } -+ (void)outbuflen; - - switch (c->mode) - { -- case GCRY_CIPHER_MODE_ECB: -- rc = do_ecb_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_CBC: -- rc = _gcry_cipher_cbc_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_CFB: -- rc = _gcry_cipher_cfb_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -+ case GCRY_CIPHER_MODE_CMAC: -+ rc = GPG_ERR_INV_CIPHER_MODE; - break; - -- case GCRY_CIPHER_MODE_CFB8: -- rc = _gcry_cipher_cfb8_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -+ case GCRY_CIPHER_MODE_NONE: -+ if (fips_mode () || !_gcry_get_debug_flag (0)) -+ { -+ fips_signal_error ("cipher mode NONE used"); -+ rc = GPG_ERR_INV_CIPHER_MODE; -+ } -+ else -+ { -+ if (inbuf != outbuf) -+ memmove (outbuf, inbuf, inbuflen); -+ rc = 0; -+ } - break; - -- case GCRY_CIPHER_MODE_OFB: -- rc = _gcry_cipher_ofb_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -+ default: -+ log_fatal ("cipher_encrypt: invalid mode %d\n", c->mode ); -+ rc = GPG_ERR_INV_CIPHER_MODE; - break; -+ } - -- case GCRY_CIPHER_MODE_CTR: -- rc = _gcry_cipher_ctr_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -+ return rc; -+} - -- case GCRY_CIPHER_MODE_AESWRAP: -- rc = _gcry_cipher_aeswrap_encrypt (c, outbuf, outbuflen, -- inbuf, inbuflen); -- break; -+static gcry_err_code_t -+do_decrypt_none_unknown (gcry_cipher_hd_t c, byte *outbuf, size_t outbuflen, -+ const byte *inbuf, size_t inbuflen) -+{ -+ gcry_err_code_t rc; - -- case GCRY_CIPHER_MODE_CCM: -- rc = _gcry_cipher_ccm_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -+ (void)outbuflen; - -+ switch (c->mode) -+ { - case GCRY_CIPHER_MODE_CMAC: - rc = GPG_ERR_INV_CIPHER_MODE; - break; - -- case GCRY_CIPHER_MODE_GCM: -- rc = _gcry_cipher_gcm_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_POLY1305: -- rc = _gcry_cipher_poly1305_encrypt (c, outbuf, outbuflen, -- inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_OCB: -- rc = _gcry_cipher_ocb_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_XTS: -- rc = _gcry_cipher_xts_crypt (c, outbuf, outbuflen, inbuf, inbuflen, 1); -- break; -- -- case GCRY_CIPHER_MODE_STREAM: -- c->spec->stencrypt (&c->context.c, -- outbuf, (byte*)/*arggg*/inbuf, inbuflen); -- rc = 0; -- break; -- - case GCRY_CIPHER_MODE_NONE: - if (fips_mode () || !_gcry_get_debug_flag (0)) - { -@@ -966,7 +964,7 @@ cipher_encrypt (gcry_cipher_hd_t c, byte - break; - - default: -- log_fatal ("cipher_encrypt: invalid mode %d\n", c->mode ); -+ log_fatal ("cipher_decrypt: invalid mode %d\n", c->mode ); - rc = GPG_ERR_INV_CIPHER_MODE; - break; - } -@@ -991,7 +989,13 @@ _gcry_cipher_encrypt (gcry_cipher_hd_t h - inlen = outsize; - } - -- rc = cipher_encrypt (h, out, outsize, in, inlen); -+ if (h->mode != GCRY_CIPHER_MODE_NONE && !h->marks.key) -+ { -+ log_error ("cipher_decrypt: key not set\n"); -+ return GPG_ERR_MISSING_KEY; -+ } -+ -+ rc = h->mode_ops.encrypt (h, out, outsize, in, inlen); - - /* Failsafe: Make sure that the plaintext will never make it into - OUT if the encryption returned an error. */ -@@ -1002,110 +1006,10 @@ _gcry_cipher_encrypt (gcry_cipher_hd_t h - } - - -- - /**************** -- * Decrypt INBUF to OUTBUF with the mode selected at open. -- * inbuf and outbuf may overlap or be the same. -- * Depending on the mode some some constraints apply to INBUFLEN. -+ * Decrypt IN and write it to OUT. If IN is NULL, in-place encryption has -+ * been requested. - */ --static gcry_err_code_t --cipher_decrypt (gcry_cipher_hd_t c, byte *outbuf, size_t outbuflen, -- const byte *inbuf, size_t inbuflen) --{ -- gcry_err_code_t rc; -- -- if (c->mode != GCRY_CIPHER_MODE_NONE && !c->marks.key) -- { -- log_error ("cipher_decrypt: key not set\n"); -- return GPG_ERR_MISSING_KEY; -- } -- -- switch (c->mode) -- { -- case GCRY_CIPHER_MODE_ECB: -- rc = do_ecb_decrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_CBC: -- rc = _gcry_cipher_cbc_decrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_CFB: -- rc = _gcry_cipher_cfb_decrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_CFB8: -- rc = _gcry_cipher_cfb8_decrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_OFB: -- rc = _gcry_cipher_ofb_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_CTR: -- rc = _gcry_cipher_ctr_encrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_AESWRAP: -- rc = _gcry_cipher_aeswrap_decrypt (c, outbuf, outbuflen, -- inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_CCM: -- rc = _gcry_cipher_ccm_decrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_CMAC: -- rc = GPG_ERR_INV_CIPHER_MODE; -- break; -- -- case GCRY_CIPHER_MODE_GCM: -- rc = _gcry_cipher_gcm_decrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_POLY1305: -- rc = _gcry_cipher_poly1305_decrypt (c, outbuf, outbuflen, -- inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_OCB: -- rc = _gcry_cipher_ocb_decrypt (c, outbuf, outbuflen, inbuf, inbuflen); -- break; -- -- case GCRY_CIPHER_MODE_XTS: -- rc = _gcry_cipher_xts_crypt (c, outbuf, outbuflen, inbuf, inbuflen, 0); -- break; -- -- case GCRY_CIPHER_MODE_STREAM: -- c->spec->stdecrypt (&c->context.c, -- outbuf, (byte*)/*arggg*/inbuf, inbuflen); -- rc = 0; -- break; -- -- case GCRY_CIPHER_MODE_NONE: -- if (fips_mode () || !_gcry_get_debug_flag (0)) -- { -- fips_signal_error ("cipher mode NONE used"); -- rc = GPG_ERR_INV_CIPHER_MODE; -- } -- else -- { -- if (inbuf != outbuf) -- memmove (outbuf, inbuf, inbuflen); -- rc = 0; -- } -- break; -- -- default: -- log_fatal ("cipher_decrypt: invalid mode %d\n", c->mode ); -- rc = GPG_ERR_INV_CIPHER_MODE; -- break; -- } -- -- return rc; --} -- -- - gcry_err_code_t - _gcry_cipher_decrypt (gcry_cipher_hd_t h, void *out, size_t outsize, - const void *in, size_t inlen) -@@ -1116,9 +1020,14 @@ _gcry_cipher_decrypt (gcry_cipher_hd_t h - inlen = outsize; - } - -- return cipher_decrypt (h, out, outsize, in, inlen); --} -+ if (h->mode != GCRY_CIPHER_MODE_NONE && !h->marks.key) -+ { -+ log_error ("cipher_decrypt: key not set\n"); -+ return GPG_ERR_MISSING_KEY; -+ } - -+ return h->mode_ops.decrypt (h, out, outsize, in, inlen); -+} - - - /**************** -@@ -1149,33 +1058,10 @@ _gcry_cipher_setkey (gcry_cipher_hd_t hd - gcry_err_code_t - _gcry_cipher_setiv (gcry_cipher_hd_t hd, const void *iv, size_t ivlen) - { -- gcry_err_code_t rc = 0; -- -- switch (hd->mode) -- { -- case GCRY_CIPHER_MODE_CCM: -- rc = _gcry_cipher_ccm_set_nonce (hd, iv, ivlen); -- break; -- -- case GCRY_CIPHER_MODE_GCM: -- rc = _gcry_cipher_gcm_setiv (hd, iv, ivlen); -- break; -- -- case GCRY_CIPHER_MODE_POLY1305: -- rc = _gcry_cipher_poly1305_setiv (hd, iv, ivlen); -- break; -- -- case GCRY_CIPHER_MODE_OCB: -- rc = _gcry_cipher_ocb_set_nonce (hd, iv, ivlen); -- break; -- -- default: -- rc = cipher_setiv (hd, iv, ivlen); -- break; -- } -- return rc; -+ return hd->mode_ops.setiv (hd, iv, ivlen); - } - -+ - /* Set counter for CTR mode. (CTR,CTRLEN) must denote a buffer of - block size length, or (NULL,0) to set the CTR to the all-zero - block. */ -@@ -1209,38 +1095,40 @@ _gcry_cipher_getctr (gcry_cipher_hd_t hd - return 0; - } - -+ - gcry_err_code_t - _gcry_cipher_authenticate (gcry_cipher_hd_t hd, const void *abuf, - size_t abuflen) - { - gcry_err_code_t rc; - -- switch (hd->mode) -+ if (hd->mode_ops.authenticate) - { -- case GCRY_CIPHER_MODE_CCM: -- rc = _gcry_cipher_ccm_authenticate (hd, abuf, abuflen); -- break; -- -- case GCRY_CIPHER_MODE_CMAC: -- rc = _gcry_cipher_cmac_authenticate (hd, abuf, abuflen); -- break; -+ rc = hd->mode_ops.authenticate (hd, abuf, abuflen); -+ } -+ else -+ { -+ log_error ("gcry_cipher_authenticate: invalid mode %d\n", hd->mode); -+ rc = GPG_ERR_INV_CIPHER_MODE; -+ } - -- case GCRY_CIPHER_MODE_GCM: -- rc = _gcry_cipher_gcm_authenticate (hd, abuf, abuflen); -- break; -+ return rc; -+} - -- case GCRY_CIPHER_MODE_POLY1305: -- rc = _gcry_cipher_poly1305_authenticate (hd, abuf, abuflen); -- break; - -- case GCRY_CIPHER_MODE_OCB: -- rc = _gcry_cipher_ocb_authenticate (hd, abuf, abuflen); -- break; -+gcry_err_code_t -+_gcry_cipher_gettag (gcry_cipher_hd_t hd, void *outtag, size_t taglen) -+{ -+ gcry_err_code_t rc; - -- default: -- log_error ("gcry_cipher_authenticate: invalid mode %d\n", hd->mode); -+ if (hd->mode_ops.get_tag) -+ { -+ rc = hd->mode_ops.get_tag (hd, outtag, taglen); -+ } -+ else -+ { -+ log_error ("gcry_cipher_gettag: invalid mode %d\n", hd->mode); - rc = GPG_ERR_INV_CIPHER_MODE; -- break; - } - - return rc; -@@ -1248,76 +1136,166 @@ _gcry_cipher_authenticate (gcry_cipher_h - - - gcry_err_code_t --_gcry_cipher_gettag (gcry_cipher_hd_t hd, void *outtag, size_t taglen) -+_gcry_cipher_checktag (gcry_cipher_hd_t hd, const void *intag, size_t taglen) - { - gcry_err_code_t rc; - -- switch (hd->mode) -+ if (hd->mode_ops.check_tag) - { -- case GCRY_CIPHER_MODE_CCM: -- rc = _gcry_cipher_ccm_get_tag (hd, outtag, taglen); -+ rc = hd->mode_ops.check_tag (hd, intag, taglen); -+ } -+ else -+ { -+ log_error ("gcry_cipher_checktag: invalid mode %d\n", hd->mode); -+ rc = GPG_ERR_INV_CIPHER_MODE; -+ } -+ -+ return rc; -+} -+ -+ -+ -+static void -+_gcry_cipher_setup_mode_ops(gcry_cipher_hd_t c, int mode) -+{ -+ /* Setup encryption and decryption routines. */ -+ switch (mode) -+ { -+ case GCRY_CIPHER_MODE_STREAM: -+ c->mode_ops.encrypt = do_stream_encrypt; -+ c->mode_ops.decrypt = do_stream_decrypt; - break; - -- case GCRY_CIPHER_MODE_CMAC: -- rc = _gcry_cipher_cmac_get_tag (hd, outtag, taglen); -+ case GCRY_CIPHER_MODE_ECB: -+ c->mode_ops.encrypt = do_ecb_encrypt; -+ c->mode_ops.decrypt = do_ecb_decrypt; -+ break; -+ -+ case GCRY_CIPHER_MODE_CBC: -+ c->mode_ops.encrypt = _gcry_cipher_cbc_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_cbc_decrypt; -+ break; -+ -+ case GCRY_CIPHER_MODE_CFB: -+ c->mode_ops.encrypt = _gcry_cipher_cfb_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_cfb_decrypt; -+ break; -+ -+ case GCRY_CIPHER_MODE_CFB8: -+ c->mode_ops.encrypt = _gcry_cipher_cfb8_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_cfb8_decrypt; -+ break; -+ -+ case GCRY_CIPHER_MODE_OFB: -+ c->mode_ops.encrypt = _gcry_cipher_ofb_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_ofb_encrypt; -+ break; -+ -+ case GCRY_CIPHER_MODE_CTR: -+ c->mode_ops.encrypt = _gcry_cipher_ctr_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_ctr_encrypt; -+ break; -+ -+ case GCRY_CIPHER_MODE_AESWRAP: -+ c->mode_ops.encrypt = _gcry_cipher_aeswrap_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_aeswrap_decrypt; -+ break; -+ -+ case GCRY_CIPHER_MODE_CCM: -+ c->mode_ops.encrypt = _gcry_cipher_ccm_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_ccm_decrypt; - break; - - case GCRY_CIPHER_MODE_GCM: -- rc = _gcry_cipher_gcm_get_tag (hd, outtag, taglen); -+ c->mode_ops.encrypt = _gcry_cipher_gcm_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_gcm_decrypt; - break; - - case GCRY_CIPHER_MODE_POLY1305: -- rc = _gcry_cipher_poly1305_get_tag (hd, outtag, taglen); -+ c->mode_ops.encrypt = _gcry_cipher_poly1305_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_poly1305_decrypt; - break; - - case GCRY_CIPHER_MODE_OCB: -- rc = _gcry_cipher_ocb_get_tag (hd, outtag, taglen); -+ c->mode_ops.encrypt = _gcry_cipher_ocb_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_ocb_decrypt; -+ break; -+ -+ case GCRY_CIPHER_MODE_XTS: -+ c->mode_ops.encrypt = _gcry_cipher_xts_encrypt; -+ c->mode_ops.decrypt = _gcry_cipher_xts_decrypt; - break; - - default: -- log_error ("gcry_cipher_gettag: invalid mode %d\n", hd->mode); -- rc = GPG_ERR_INV_CIPHER_MODE; -+ c->mode_ops.encrypt = do_encrypt_none_unknown; -+ c->mode_ops.decrypt = do_decrypt_none_unknown; - break; - } - -- return rc; --} -+ /* Setup IV setting routine. */ -+ switch (mode) -+ { -+ case GCRY_CIPHER_MODE_CCM: -+ c->mode_ops.setiv = _gcry_cipher_ccm_set_nonce; -+ break; -+ -+ case GCRY_CIPHER_MODE_GCM: -+ c->mode_ops.setiv = _gcry_cipher_gcm_setiv; -+ break; - -+ case GCRY_CIPHER_MODE_POLY1305: -+ c->mode_ops.setiv = _gcry_cipher_poly1305_setiv; -+ break; - --gcry_err_code_t --_gcry_cipher_checktag (gcry_cipher_hd_t hd, const void *intag, size_t taglen) --{ -- gcry_err_code_t rc; -+ case GCRY_CIPHER_MODE_OCB: -+ c->mode_ops.setiv = _gcry_cipher_ocb_set_nonce; -+ break; - -- switch (hd->mode) -+ default: -+ c->mode_ops.setiv = cipher_setiv; -+ break; -+ } -+ -+ -+ /* Setup authentication routines for AEAD modes. */ -+ switch (mode) - { - case GCRY_CIPHER_MODE_CCM: -- rc = _gcry_cipher_ccm_check_tag (hd, intag, taglen); -+ c->mode_ops.authenticate = _gcry_cipher_ccm_authenticate; -+ c->mode_ops.get_tag = _gcry_cipher_ccm_get_tag; -+ c->mode_ops.check_tag = _gcry_cipher_ccm_check_tag; - break; - - case GCRY_CIPHER_MODE_CMAC: -- rc = _gcry_cipher_cmac_check_tag (hd, intag, taglen); -+ c->mode_ops.authenticate = _gcry_cipher_cmac_authenticate; -+ c->mode_ops.get_tag = _gcry_cipher_cmac_get_tag; -+ c->mode_ops.check_tag = _gcry_cipher_cmac_check_tag; - break; - - case GCRY_CIPHER_MODE_GCM: -- rc = _gcry_cipher_gcm_check_tag (hd, intag, taglen); -+ c->mode_ops.authenticate = _gcry_cipher_gcm_authenticate; -+ c->mode_ops.get_tag = _gcry_cipher_gcm_get_tag; -+ c->mode_ops.check_tag = _gcry_cipher_gcm_check_tag; - break; - - case GCRY_CIPHER_MODE_POLY1305: -- rc = _gcry_cipher_poly1305_check_tag (hd, intag, taglen); -+ c->mode_ops.authenticate = _gcry_cipher_poly1305_authenticate; -+ c->mode_ops.get_tag = _gcry_cipher_poly1305_get_tag; -+ c->mode_ops.check_tag = _gcry_cipher_poly1305_check_tag; - break; - - case GCRY_CIPHER_MODE_OCB: -- rc = _gcry_cipher_ocb_check_tag (hd, intag, taglen); -+ c->mode_ops.authenticate = _gcry_cipher_ocb_authenticate; -+ c->mode_ops.get_tag = _gcry_cipher_ocb_get_tag; -+ c->mode_ops.check_tag = _gcry_cipher_ocb_check_tag; - break; - - default: -- log_error ("gcry_cipher_checktag: invalid mode %d\n", hd->mode); -- rc = GPG_ERR_INV_CIPHER_MODE; -+ c->mode_ops.authenticate = NULL; -+ c->mode_ops.get_tag = NULL; -+ c->mode_ops.check_tag = NULL; - break; - } -- -- return rc; - } - - -diff -up libgcrypt-1.8.5/cipher/cipher-cbc.c.aes-perf libgcrypt-1.8.5/cipher/cipher-cbc.c ---- libgcrypt-1.8.5/cipher/cipher-cbc.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-cbc.c 2020-04-22 18:29:41.666862306 +0200 -@@ -79,7 +79,7 @@ _gcry_cipher_cbc_encrypt (gcry_cipher_hd - - for (n=0; n < nblocks; n++ ) - { -- buf_xor (outbuf, inbuf, ivp, blocksize); -+ cipher_block_xor (outbuf, inbuf, ivp, blocksize); - nburn = enc_fn ( &c->context.c, outbuf, outbuf ); - burn = nburn > burn ? nburn : burn; - ivp = outbuf; -@@ -116,7 +116,7 @@ _gcry_cipher_cbc_encrypt (gcry_cipher_hd - - nburn = enc_fn (&c->context.c, outbuf, outbuf); - burn = nburn > burn ? nburn : burn; -- buf_cpy (c->u_iv.iv, outbuf, blocksize); -+ cipher_block_cpy (c->u_iv.iv, outbuf, blocksize); - } - - if (burn > 0) -@@ -158,7 +158,7 @@ _gcry_cipher_cbc_decrypt (gcry_cipher_hd - nblocks--; - if ((inbuflen % blocksize) == 0) - nblocks--; -- buf_cpy (c->lastiv, c->u_iv.iv, blocksize); -+ cipher_block_cpy (c->lastiv, c->u_iv.iv, blocksize); - } - - if (c->bulk.cbc_dec) -@@ -176,7 +176,8 @@ _gcry_cipher_cbc_decrypt (gcry_cipher_hd - storage here because it is not used otherwise. */ - nburn = dec_fn ( &c->context.c, c->lastiv, inbuf ); - burn = nburn > burn ? nburn : burn; -- buf_xor_n_copy_2(outbuf, c->lastiv, c->u_iv.iv, inbuf, blocksize); -+ cipher_block_xor_n_copy_2 (outbuf, c->lastiv, c->u_iv.iv, inbuf, -+ blocksize); - inbuf += blocksize; - outbuf += blocksize; - } -@@ -191,7 +192,7 @@ _gcry_cipher_cbc_decrypt (gcry_cipher_hd - else - restbytes = inbuflen % blocksize; - -- buf_cpy (c->lastiv, c->u_iv.iv, blocksize ); /* Save Cn-2. */ -+ cipher_block_cpy (c->lastiv, c->u_iv.iv, blocksize ); /* Save Cn-2. */ - buf_cpy (c->u_iv.iv, inbuf + blocksize, restbytes ); /* Save Cn. */ - - nburn = dec_fn ( &c->context.c, outbuf, inbuf ); -@@ -203,7 +204,7 @@ _gcry_cipher_cbc_decrypt (gcry_cipher_hd - c->u_iv.iv[i] = outbuf[i]; - nburn = dec_fn (&c->context.c, outbuf, c->u_iv.iv); - burn = nburn > burn ? nburn : burn; -- buf_xor(outbuf, outbuf, c->lastiv, blocksize); -+ cipher_block_xor(outbuf, outbuf, c->lastiv, blocksize); - /* c->lastiv is now really lastlastiv, does this matter? */ - } - -diff -up libgcrypt-1.8.5/cipher/cipher-ccm.c.aes-perf libgcrypt-1.8.5/cipher/cipher-ccm.c ---- libgcrypt-1.8.5/cipher/cipher-ccm.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-ccm.c 2020-04-22 18:29:41.666862306 +0200 -@@ -67,7 +67,8 @@ do_cbc_mac (gcry_cipher_hd_t c, const un - if (unused > 0) - { - /* Process one block from macbuf. */ -- buf_xor(c->u_iv.iv, c->u_iv.iv, c->u_mode.ccm.macbuf, blocksize); -+ cipher_block_xor(c->u_iv.iv, c->u_iv.iv, c->u_mode.ccm.macbuf, -+ blocksize); - set_burn (burn, enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv )); - - unused = 0; -@@ -86,7 +87,7 @@ do_cbc_mac (gcry_cipher_hd_t c, const un - { - while (inlen >= blocksize) - { -- buf_xor(c->u_iv.iv, c->u_iv.iv, inbuf, blocksize); -+ cipher_block_xor(c->u_iv.iv, c->u_iv.iv, inbuf, blocksize); - - set_burn (burn, enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv )); - -@@ -272,7 +273,7 @@ _gcry_cipher_ccm_tag (gcry_cipher_hd_t c - burn = do_cbc_mac (c, NULL, 0, 1); /* Perform final padding. */ - - /* Add S_0 */ -- buf_xor (c->u_iv.iv, c->u_iv.iv, c->u_mode.ccm.s0, 16); -+ cipher_block_xor (c->u_iv.iv, c->u_iv.iv, c->u_mode.ccm.s0, 16); - - wipememory (c->u_ctr.ctr, 16); - wipememory (c->u_mode.ccm.s0, 16); -diff -up libgcrypt-1.8.5/cipher/cipher-cfb.c.aes-perf libgcrypt-1.8.5/cipher/cipher-cfb.c ---- libgcrypt-1.8.5/cipher/cipher-cfb.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-cfb.c 2020-04-22 18:29:41.667862287 +0200 -@@ -91,7 +91,7 @@ _gcry_cipher_cfb_encrypt (gcry_cipher_hd - nburn = enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv ); - burn = nburn > burn ? nburn : burn; - /* XOR the input with the IV and store input into IV. */ -- buf_xor_2dst(outbuf, c->u_iv.iv, inbuf, blocksize); -+ cipher_block_xor_2dst(outbuf, c->u_iv.iv, inbuf, blocksize); - outbuf += blocksize; - inbuf += blocksize; - inbuflen -= blocksize; -@@ -101,11 +101,11 @@ _gcry_cipher_cfb_encrypt (gcry_cipher_hd - if ( inbuflen >= blocksize ) - { - /* Save the current IV and then encrypt the IV. */ -- buf_cpy( c->lastiv, c->u_iv.iv, blocksize ); -+ cipher_block_cpy( c->lastiv, c->u_iv.iv, blocksize ); - nburn = enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv ); - burn = nburn > burn ? nburn : burn; - /* XOR the input with the IV and store input into IV */ -- buf_xor_2dst(outbuf, c->u_iv.iv, inbuf, blocksize); -+ cipher_block_xor_2dst(outbuf, c->u_iv.iv, inbuf, blocksize); - outbuf += blocksize; - inbuf += blocksize; - inbuflen -= blocksize; -@@ -113,7 +113,7 @@ _gcry_cipher_cfb_encrypt (gcry_cipher_hd - if ( inbuflen ) - { - /* Save the current IV and then encrypt the IV. */ -- buf_cpy( c->lastiv, c->u_iv.iv, blocksize ); -+ cipher_block_cpy( c->lastiv, c->u_iv.iv, blocksize ); - nburn = enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv ); - burn = nburn > burn ? nburn : burn; - c->unused = blocksize; -@@ -193,7 +193,7 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd - nburn = enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv ); - burn = nburn > burn ? nburn : burn; - /* XOR the input with the IV and store input into IV. */ -- buf_xor_n_copy(outbuf, c->u_iv.iv, inbuf, blocksize); -+ cipher_block_xor_n_copy(outbuf, c->u_iv.iv, inbuf, blocksize); - outbuf += blocksize; - inbuf += blocksize; - inbuflen -= blocksize; -@@ -203,11 +203,11 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd - if (inbuflen >= blocksize ) - { - /* Save the current IV and then encrypt the IV. */ -- buf_cpy ( c->lastiv, c->u_iv.iv, blocksize); -+ cipher_block_cpy ( c->lastiv, c->u_iv.iv, blocksize); - nburn = enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv ); - burn = nburn > burn ? nburn : burn; - /* XOR the input with the IV and store input into IV */ -- buf_xor_n_copy(outbuf, c->u_iv.iv, inbuf, blocksize); -+ cipher_block_xor_n_copy(outbuf, c->u_iv.iv, inbuf, blocksize); - outbuf += blocksize; - inbuf += blocksize; - inbuflen -= blocksize; -@@ -216,7 +216,7 @@ _gcry_cipher_cfb_decrypt (gcry_cipher_hd - if (inbuflen) - { - /* Save the current IV and then encrypt the IV. */ -- buf_cpy ( c->lastiv, c->u_iv.iv, blocksize ); -+ cipher_block_cpy ( c->lastiv, c->u_iv.iv, blocksize ); - nburn = enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv ); - burn = nburn > burn ? nburn : burn; - c->unused = blocksize; -diff -up libgcrypt-1.8.5/cipher/cipher-cmac.c.aes-perf libgcrypt-1.8.5/cipher/cipher-cmac.c ---- libgcrypt-1.8.5/cipher/cipher-cmac.c.aes-perf 2020-04-22 18:29:41.643862745 +0200 -+++ libgcrypt-1.8.5/cipher/cipher-cmac.c 2020-04-22 18:29:41.667862287 +0200 -@@ -63,7 +63,7 @@ cmac_write (gcry_cipher_hd_t c, const by - for (; inlen && c->unused < blocksize; inlen--) - c->lastiv[c->unused++] = *inbuf++; - -- buf_xor (c->u_iv.iv, c->u_iv.iv, c->lastiv, blocksize); -+ cipher_block_xor (c->u_iv.iv, c->u_iv.iv, c->lastiv, blocksize); - set_burn (burn, enc_fn (&c->context.c, c->u_iv.iv, c->u_iv.iv)); - - c->unused = 0; -@@ -83,7 +83,7 @@ cmac_write (gcry_cipher_hd_t c, const by - else - while (inlen > blocksize) - { -- buf_xor (c->u_iv.iv, c->u_iv.iv, inbuf, blocksize); -+ cipher_block_xor (c->u_iv.iv, c->u_iv.iv, inbuf, blocksize); - set_burn (burn, enc_fn (&c->context.c, c->u_iv.iv, c->u_iv.iv)); - inlen -= blocksize; - inbuf += blocksize; -@@ -174,9 +174,9 @@ cmac_final (gcry_cipher_hd_t c) - c->lastiv[count++] = 0; - } - -- buf_xor (c->lastiv, c->lastiv, subkey, blocksize); -+ cipher_block_xor (c->lastiv, c->lastiv, subkey, blocksize); - -- buf_xor (c->u_iv.iv, c->u_iv.iv, c->lastiv, blocksize); -+ cipher_block_xor (c->u_iv.iv, c->u_iv.iv, c->lastiv, blocksize); - burn = c->spec->encrypt (&c->context.c, c->u_iv.iv, c->u_iv.iv); - if (burn) - _gcry_burn_stack (burn + 4 * sizeof (void *)); -diff -up libgcrypt-1.8.5/cipher/cipher-ctr.c.aes-perf libgcrypt-1.8.5/cipher/cipher-ctr.c ---- libgcrypt-1.8.5/cipher/cipher-ctr.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-ctr.c 2020-04-22 18:29:41.667862287 +0200 -@@ -81,24 +81,34 @@ _gcry_cipher_ctr_encrypt (gcry_cipher_hd - { - unsigned char tmp[MAX_BLOCKSIZE]; - -- do { -- nburn = enc_fn (&c->context.c, tmp, c->u_ctr.ctr); -- burn = nburn > burn ? nburn : burn; -- -- for (i = blocksize; i > 0; i--) -- { -- c->u_ctr.ctr[i-1]++; -- if (c->u_ctr.ctr[i-1] != 0) -- break; -- } -- -- n = blocksize < inbuflen ? blocksize : inbuflen; -- buf_xor(outbuf, inbuf, tmp, n); -- -- inbuflen -= n; -- outbuf += n; -- inbuf += n; -- } while (inbuflen); -+ do -+ { -+ nburn = enc_fn (&c->context.c, tmp, c->u_ctr.ctr); -+ burn = nburn > burn ? nburn : burn; -+ -+ for (i = blocksize; i > 0; i--) -+ { -+ c->u_ctr.ctr[i-1]++; -+ if (c->u_ctr.ctr[i-1] != 0) -+ break; -+ } -+ -+ if (inbuflen < blocksize) -+ break; -+ n = blocksize; -+ cipher_block_xor(outbuf, inbuf, tmp, blocksize); -+ -+ inbuflen -= n; -+ outbuf += n; -+ inbuf += n; -+ } -+ while (inbuflen); -+ -+ if (inbuflen) -+ { -+ n = inbuflen; -+ buf_xor(outbuf, inbuf, tmp, inbuflen); -+ } - - /* Save the unused bytes of the counter. */ - c->unused = blocksize - n; -diff -up libgcrypt-1.8.5/cipher/cipher-gcm.c.aes-perf libgcrypt-1.8.5/cipher/cipher-gcm.c ---- libgcrypt-1.8.5/cipher/cipher-gcm.c.aes-perf 2018-04-17 17:27:25.000000000 +0200 -+++ libgcrypt-1.8.5/cipher/cipher-gcm.c 2020-04-22 18:29:41.667862287 +0200 -@@ -150,7 +150,7 @@ do_ghash (unsigned char *result, const u - u32 A; - int i; - -- buf_xor (V, result, buf, 16); -+ cipher_block_xor (V, result, buf, 16); - V[0] = be_bswap64 (V[0]); - V[1] = be_bswap64 (V[1]); - -@@ -259,7 +259,7 @@ do_ghash (unsigned char *result, const u - u32 T[3]; - int i; - -- buf_xor (V, result, buf, 16); /* V is big-endian */ -+ cipher_block_xor (V, result, buf, 16); /* V is big-endian */ - - /* First round can be manually tweaked based on fact that 'tmp' is zero. */ - i = 15; -@@ -342,7 +342,7 @@ do_ghash (unsigned char *hsub, unsigned - #else - unsigned long T[4]; - -- buf_xor (V, result, buf, 16); -+ cipher_block_xor (V, result, buf, 16); - for (i = 0; i < 4; i++) - { - V[i] = (V[i] & 0x00ff00ff) << 8 | (V[i] & 0xff00ff00) >> 8; -@@ -358,7 +358,7 @@ do_ghash (unsigned char *hsub, unsigned - for (j = 0x80; j; j >>= 1) - { - if (hsub[i] & j) -- buf_xor (p, p, V, 16); -+ cipher_block_xor (p, p, V, 16); - if (bshift (V)) - V[0] ^= 0xe1000000; - } -@@ -598,7 +598,7 @@ gcm_ctr_encrypt (gcry_cipher_hd_t c, byt - } - - fix_ctr = 1; -- buf_cpy(ctr_copy, c->u_ctr.ctr, GCRY_GCM_BLOCK_LEN); -+ cipher_block_cpy(ctr_copy, c->u_ctr.ctr, GCRY_GCM_BLOCK_LEN); - } - } - -@@ -928,8 +928,8 @@ _gcry_cipher_gcm_tag (gcry_cipher_hd_t c - /* Add bitlengths to tag. */ - do_ghash_buf(c, c->u_mode.gcm.u_tag.tag, (byte*)bitlengths, - GCRY_GCM_BLOCK_LEN, 1); -- buf_xor (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.tagiv, -- c->u_mode.gcm.u_tag.tag, GCRY_GCM_BLOCK_LEN); -+ cipher_block_xor (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.tagiv, -+ c->u_mode.gcm.u_tag.tag, GCRY_GCM_BLOCK_LEN); - c->marks.tag = 1; - - wipememory (bitlengths, sizeof (bitlengths)); -diff -up libgcrypt-1.8.5/cipher/cipher-gcm-intel-pclmul.c.aes-perf libgcrypt-1.8.5/cipher/cipher-gcm-intel-pclmul.c ---- libgcrypt-1.8.5/cipher/cipher-gcm-intel-pclmul.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-gcm-intel-pclmul.c 2020-04-22 18:29:41.668862268 +0200 -@@ -248,7 +248,8 @@ static inline void gfmul_pclmul_aggr4(vo - void - _gcry_ghash_setup_intel_pclmul (gcry_cipher_hd_t c) - { -- u64 tmp[2]; -+ static const unsigned char be_mask[16] __attribute__ ((aligned (16))) = -+ { 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }; - #if defined(__x86_64__) && defined(__WIN64__) - char win64tmp[3 * 16]; - -@@ -262,15 +263,19 @@ _gcry_ghash_setup_intel_pclmul (gcry_cip - #endif - - /* Swap endianness of hsub. */ -- tmp[0] = buf_get_be64(c->u_mode.gcm.u_ghash_key.key + 8); -- tmp[1] = buf_get_be64(c->u_mode.gcm.u_ghash_key.key + 0); -- buf_cpy (c->u_mode.gcm.u_ghash_key.key, tmp, GCRY_GCM_BLOCK_LEN); -+ asm volatile ("movdqu (%[key]), %%xmm0\n\t" -+ "pshufb %[be_mask], %%xmm0\n\t" -+ "movdqu %%xmm0, (%[key])\n\t" -+ : -+ : [key] "r" (c->u_mode.gcm.u_ghash_key.key), -+ [be_mask] "m" (*be_mask) -+ : "memory"); - - #ifdef __x86_64__ -- asm volatile ("movdqu %[h_1], %%xmm0\n\t" -- "movdqa %%xmm0, %%xmm1\n\t" -+ asm volatile ("movdqa %%xmm0, %%xmm1\n\t" -+ : - : -- : [h_1] "m" (*tmp)); -+ : "memory"); - - gfmul_pclmul (); /* H•H => H² */ - -@@ -324,8 +329,6 @@ _gcry_ghash_setup_intel_pclmul (gcry_cip - ::: "cc" ); - #endif - #endif -- -- wipememory (tmp, sizeof(tmp)); - } - - -diff -up libgcrypt-1.8.5/cipher/cipher-internal.h.aes-perf libgcrypt-1.8.5/cipher/cipher-internal.h ---- libgcrypt-1.8.5/cipher/cipher-internal.h.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-internal.h 2020-04-22 18:29:41.668862268 +0200 -@@ -121,6 +121,25 @@ struct gcry_cipher_handle - interface does not easily allow to retrieve this value. */ - int algo; - -+ /* A structure with function pointers for mode operations. */ -+ struct { -+ gcry_err_code_t (*encrypt)(gcry_cipher_hd_t c, -+ unsigned char *outbuf, size_t outbuflen, -+ const unsigned char *inbuf, size_t inbuflen); -+ gcry_err_code_t (*decrypt)(gcry_cipher_hd_t c, -+ unsigned char *outbuf, size_t outbuflen, -+ const unsigned char *inbuf, size_t inbuflen); -+ gcry_err_code_t (*setiv)(gcry_cipher_hd_t c, const unsigned char *iv, -+ size_t ivlen); -+ -+ gcry_err_code_t (*authenticate)(gcry_cipher_hd_t c, -+ const unsigned char *abuf, size_t abuflen); -+ gcry_err_code_t (*get_tag)(gcry_cipher_hd_t c, unsigned char *outtag, -+ size_t taglen); -+ gcry_err_code_t (*check_tag)(gcry_cipher_hd_t c, const unsigned char *intag, -+ size_t taglen); -+ } mode_ops; -+ - /* A structure with function pointers for bulk operations. Due to - limitations of the module system (we don't want to change the - API) we need to keep these function pointers here. The cipher -@@ -146,7 +165,7 @@ struct gcry_cipher_handle - const void *inbuf_arg, size_t nblocks, int encrypt); - size_t (*ocb_auth)(gcry_cipher_hd_t c, const void *abuf_arg, - size_t nblocks); -- void (*xts_crypt)(gcry_cipher_hd_t c, unsigned char *tweak, -+ void (*xts_crypt)(void *context, unsigned char *tweak, - void *outbuf_arg, const void *inbuf_arg, - size_t nblocks, int encrypt); - } bulk; -@@ -479,9 +498,12 @@ gcry_err_code_t _gcry_cipher_ocb_check_t - - - /*-- cipher-xts.c --*/ --gcry_err_code_t _gcry_cipher_xts_crypt -+gcry_err_code_t _gcry_cipher_xts_encrypt - /* */ (gcry_cipher_hd_t c, unsigned char *outbuf, size_t outbuflen, -- const unsigned char *inbuf, size_t inbuflen, int encrypt); -+ const unsigned char *inbuf, size_t inbuflen); -+gcry_err_code_t _gcry_cipher_xts_decrypt -+/* */ (gcry_cipher_hd_t c, unsigned char *outbuf, size_t outbuflen, -+ const unsigned char *inbuf, size_t inbuflen); - - - /* Return the L-value for block N. Note: 'cipher_ocb.c' ensures that N -@@ -506,4 +528,145 @@ ocb_get_l (gcry_cipher_hd_t c, u64 n) - return c->u_mode.ocb.L[ntz]; - } - -+/* Optimized function for cipher block copying */ -+static inline void -+cipher_block_cpy(void *_dst, const void *_src, size_t blocksize) -+{ -+ byte *dst = _dst; -+ const byte *src = _src; -+ u64 s[2]; -+ -+ if (blocksize == 8) -+ { -+ buf_put_he64(dst + 0, buf_get_he64(src + 0)); -+ } -+ else /* blocksize == 16 */ -+ { -+ s[0] = buf_get_he64(src + 0); -+ s[1] = buf_get_he64(src + 8); -+ buf_put_he64(dst + 0, s[0]); -+ buf_put_he64(dst + 8, s[1]); -+ } -+} -+ -+ -+/* Optimized function for cipher block xoring */ -+static inline void -+cipher_block_xor(void *_dst, const void *_src1, const void *_src2, -+ size_t blocksize) -+{ -+ byte *dst = _dst; -+ const byte *src1 = _src1; -+ const byte *src2 = _src2; -+ u64 s1[2]; -+ u64 s2[2]; -+ -+ if (blocksize == 8) -+ { -+ buf_put_he64(dst + 0, buf_get_he64(src1 + 0) ^ buf_get_he64(src2 + 0)); -+ } -+ else /* blocksize == 16 */ -+ { -+ s1[0] = buf_get_he64(src1 + 0); -+ s1[1] = buf_get_he64(src1 + 8); -+ s2[0] = buf_get_he64(src2 + 0); -+ s2[1] = buf_get_he64(src2 + 8); -+ buf_put_he64(dst + 0, s1[0] ^ s2[0]); -+ buf_put_he64(dst + 8, s1[1] ^ s2[1]); -+ } -+} -+ -+ -+/* Optimized function for in-place cipher block xoring */ -+static inline void -+cipher_block_xor_1(void *_dst, const void *_src, size_t blocksize) -+{ -+ cipher_block_xor (_dst, _dst, _src, blocksize); -+} -+ -+ -+/* Optimized function for cipher block xoring with two destination cipher -+ blocks. Used mainly by CFB mode encryption. */ -+static inline void -+cipher_block_xor_2dst(void *_dst1, void *_dst2, const void *_src, -+ size_t blocksize) -+{ -+ byte *dst1 = _dst1; -+ byte *dst2 = _dst2; -+ const byte *src = _src; -+ u64 d2[2]; -+ u64 s[2]; -+ -+ if (blocksize == 8) -+ { -+ d2[0] = buf_get_he64(dst2 + 0) ^ buf_get_he64(src + 0); -+ buf_put_he64(dst2 + 0, d2[0]); -+ buf_put_he64(dst1 + 0, d2[0]); -+ } -+ else /* blocksize == 16 */ -+ { -+ s[0] = buf_get_he64(src + 0); -+ s[1] = buf_get_he64(src + 8); -+ d2[0] = buf_get_he64(dst2 + 0); -+ d2[1] = buf_get_he64(dst2 + 8); -+ d2[0] = d2[0] ^ s[0]; -+ d2[1] = d2[1] ^ s[1]; -+ buf_put_he64(dst2 + 0, d2[0]); -+ buf_put_he64(dst2 + 8, d2[1]); -+ buf_put_he64(dst1 + 0, d2[0]); -+ buf_put_he64(dst1 + 8, d2[1]); -+ } -+} -+ -+ -+/* Optimized function for combined cipher block xoring and copying. -+ Used by mainly CBC mode decryption. */ -+static inline void -+cipher_block_xor_n_copy_2(void *_dst_xor, const void *_src_xor, -+ void *_srcdst_cpy, const void *_src_cpy, -+ size_t blocksize) -+{ -+ byte *dst_xor = _dst_xor; -+ byte *srcdst_cpy = _srcdst_cpy; -+ const byte *src_xor = _src_xor; -+ const byte *src_cpy = _src_cpy; -+ u64 sc[2]; -+ u64 sx[2]; -+ u64 sdc[2]; -+ -+ if (blocksize == 8) -+ { -+ sc[0] = buf_get_he64(src_cpy + 0); -+ buf_put_he64(dst_xor + 0, -+ buf_get_he64(srcdst_cpy + 0) ^ buf_get_he64(src_xor + 0)); -+ buf_put_he64(srcdst_cpy + 0, sc[0]); -+ } -+ else /* blocksize == 16 */ -+ { -+ sc[0] = buf_get_he64(src_cpy + 0); -+ sc[1] = buf_get_he64(src_cpy + 8); -+ sx[0] = buf_get_he64(src_xor + 0); -+ sx[1] = buf_get_he64(src_xor + 8); -+ sdc[0] = buf_get_he64(srcdst_cpy + 0); -+ sdc[1] = buf_get_he64(srcdst_cpy + 8); -+ sx[0] ^= sdc[0]; -+ sx[1] ^= sdc[1]; -+ buf_put_he64(dst_xor + 0, sx[0]); -+ buf_put_he64(dst_xor + 8, sx[1]); -+ buf_put_he64(srcdst_cpy + 0, sc[0]); -+ buf_put_he64(srcdst_cpy + 8, sc[1]); -+ } -+} -+ -+ -+/* Optimized function for combined cipher block xoring and copying. -+ Used by mainly CFB mode decryption. */ -+static inline void -+cipher_block_xor_n_copy(void *_dst_xor, void *_srcdst_cpy, const void *_src, -+ size_t blocksize) -+{ -+ cipher_block_xor_n_copy_2(_dst_xor, _src, _srcdst_cpy, _src, blocksize); -+} -+ -+ - #endif /*G10_CIPHER_INTERNAL_H*/ -diff -up libgcrypt-1.8.5/cipher/cipher-ocb.c.aes-perf libgcrypt-1.8.5/cipher/cipher-ocb.c ---- libgcrypt-1.8.5/cipher/cipher-ocb.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-ocb.c 2020-04-22 18:29:41.668862268 +0200 -@@ -82,7 +82,7 @@ static void - double_block_cpy (unsigned char *d, const unsigned char *s) - { - if (d != s) -- buf_cpy (d, s, OCB_BLOCK_LEN); -+ cipher_block_cpy (d, s, OCB_BLOCK_LEN); - double_block (d); - } - -@@ -181,8 +181,8 @@ _gcry_cipher_ocb_set_nonce (gcry_cipher_ - nburn = c->spec->encrypt (&c->context.c, ktop, ktop); - burn = nburn > burn ? nburn : burn; - /* Stretch = Ktop || (Ktop[1..64] xor Ktop[9..72]) */ -- buf_cpy (stretch, ktop, OCB_BLOCK_LEN); -- buf_xor (stretch + OCB_BLOCK_LEN, ktop, ktop + 1, 8); -+ cipher_block_cpy (stretch, ktop, OCB_BLOCK_LEN); -+ cipher_block_xor (stretch + OCB_BLOCK_LEN, ktop, ktop + 1, 8); - /* Offset_0 = Stretch[1+bottom..128+bottom] - (We use the IV field to store the offset) */ - bit_copy (c->u_iv.iv, stretch, bottom, OCB_BLOCK_LEN); -@@ -267,18 +267,18 @@ _gcry_cipher_ocb_authenticate (gcry_ciph - } - else - { -- buf_cpy (l_tmp, ocb_get_l (c, c->u_mode.ocb.aad_nblocks), -- OCB_BLOCK_LEN); -+ cipher_block_cpy (l_tmp, ocb_get_l (c, c->u_mode.ocb.aad_nblocks), -+ OCB_BLOCK_LEN); - } - - /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -- buf_xor_1 (c->u_mode.ocb.aad_offset, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_offset, l_tmp, OCB_BLOCK_LEN); - /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i) */ -- buf_xor (l_tmp, c->u_mode.ocb.aad_offset, -- c->u_mode.ocb.aad_leftover, OCB_BLOCK_LEN); -+ cipher_block_xor (l_tmp, c->u_mode.ocb.aad_offset, -+ c->u_mode.ocb.aad_leftover, OCB_BLOCK_LEN); - nburn = c->spec->encrypt (&c->context.c, l_tmp, l_tmp); - burn = nburn > burn ? nburn : burn; -- buf_xor_1 (c->u_mode.ocb.aad_sum, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_sum, l_tmp, OCB_BLOCK_LEN); - - c->u_mode.ocb.aad_nleftover = 0; - } -@@ -309,12 +309,13 @@ _gcry_cipher_ocb_authenticate (gcry_ciph - ocb_get_L_big(c, c->u_mode.ocb.aad_nblocks, l_tmp); - - /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -- buf_xor_1 (c->u_mode.ocb.aad_offset, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_offset, l_tmp, OCB_BLOCK_LEN); - /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i) */ -- buf_xor (l_tmp, c->u_mode.ocb.aad_offset, abuf, OCB_BLOCK_LEN); -+ cipher_block_xor (l_tmp, c->u_mode.ocb.aad_offset, abuf, -+ OCB_BLOCK_LEN); - nburn = c->spec->encrypt (&c->context.c, l_tmp, l_tmp); - burn = nburn > burn ? nburn : burn; -- buf_xor_1 (c->u_mode.ocb.aad_sum, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_sum, l_tmp, OCB_BLOCK_LEN); - - abuf += OCB_BLOCK_LEN; - abuflen -= OCB_BLOCK_LEN; -@@ -349,14 +350,15 @@ _gcry_cipher_ocb_authenticate (gcry_ciph - gcry_assert(c->u_mode.ocb.aad_nblocks & table_size_mask); - - /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -- buf_xor_1 (c->u_mode.ocb.aad_offset, -- ocb_get_l (c, c->u_mode.ocb.aad_nblocks), -- OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_offset, -+ ocb_get_l (c, c->u_mode.ocb.aad_nblocks), -+ OCB_BLOCK_LEN); - /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i) */ -- buf_xor (l_tmp, c->u_mode.ocb.aad_offset, abuf, OCB_BLOCK_LEN); -+ cipher_block_xor (l_tmp, c->u_mode.ocb.aad_offset, abuf, -+ OCB_BLOCK_LEN); - nburn = c->spec->encrypt (&c->context.c, l_tmp, l_tmp); - burn = nburn > burn ? nburn : burn; -- buf_xor_1 (c->u_mode.ocb.aad_sum, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_sum, l_tmp, OCB_BLOCK_LEN); - - abuf += OCB_BLOCK_LEN; - abuflen -= OCB_BLOCK_LEN; -@@ -397,18 +399,18 @@ ocb_aad_finalize (gcry_cipher_hd_t c) - if (c->u_mode.ocb.aad_nleftover) - { - /* Offset_* = Offset_m xor L_* */ -- buf_xor_1 (c->u_mode.ocb.aad_offset, -- c->u_mode.ocb.L_star, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_offset, -+ c->u_mode.ocb.L_star, OCB_BLOCK_LEN); - /* CipherInput = (A_* || 1 || zeros(127-bitlen(A_*))) xor Offset_* */ - buf_cpy (l_tmp, c->u_mode.ocb.aad_leftover, c->u_mode.ocb.aad_nleftover); - memset (l_tmp + c->u_mode.ocb.aad_nleftover, 0, - OCB_BLOCK_LEN - c->u_mode.ocb.aad_nleftover); - l_tmp[c->u_mode.ocb.aad_nleftover] = 0x80; -- buf_xor_1 (l_tmp, c->u_mode.ocb.aad_offset, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (l_tmp, c->u_mode.ocb.aad_offset, OCB_BLOCK_LEN); - /* Sum = Sum_m xor ENCIPHER(K, CipherInput) */ - nburn = c->spec->encrypt (&c->context.c, l_tmp, l_tmp); - burn = nburn > burn ? nburn : burn; -- buf_xor_1 (c->u_mode.ocb.aad_sum, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_sum, l_tmp, OCB_BLOCK_LEN); - - c->u_mode.ocb.aad_nleftover = 0; - } -@@ -431,7 +433,7 @@ ocb_checksum (unsigned char *chksum, con - while (nblks > 0) - { - /* Checksum_i = Checksum_{i-1} xor P_i */ -- buf_xor_1(chksum, plainbuf, OCB_BLOCK_LEN); -+ cipher_block_xor_1(chksum, plainbuf, OCB_BLOCK_LEN); - - plainbuf += OCB_BLOCK_LEN; - nblks--; -@@ -491,12 +493,12 @@ ocb_crypt (gcry_cipher_hd_t c, int encry - } - - /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -- buf_xor_1 (c->u_iv.iv, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_iv.iv, l_tmp, OCB_BLOCK_LEN); - /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */ -- buf_xor (outbuf, c->u_iv.iv, inbuf, OCB_BLOCK_LEN); -+ cipher_block_xor (outbuf, c->u_iv.iv, inbuf, OCB_BLOCK_LEN); - nburn = crypt_fn (&c->context.c, outbuf, outbuf); - burn = nburn > burn ? nburn : burn; -- buf_xor_1 (outbuf, c->u_iv.iv, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (outbuf, c->u_iv.iv, OCB_BLOCK_LEN); - - if (!encrypt) - { -@@ -551,14 +553,14 @@ ocb_crypt (gcry_cipher_hd_t c, int encry - gcry_assert(c->u_mode.ocb.data_nblocks & table_size_mask); - - /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -- buf_xor_1 (c->u_iv.iv, -- ocb_get_l (c, c->u_mode.ocb.data_nblocks), -- OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_iv.iv, -+ ocb_get_l (c, c->u_mode.ocb.data_nblocks), -+ OCB_BLOCK_LEN); - /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */ -- buf_xor (outbuf, c->u_iv.iv, inbuf, OCB_BLOCK_LEN); -+ cipher_block_xor (outbuf, c->u_iv.iv, inbuf, OCB_BLOCK_LEN); - nburn = crypt_fn (&c->context.c, outbuf, outbuf); - burn = nburn > burn ? nburn : burn; -- buf_xor_1 (outbuf, c->u_iv.iv, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (outbuf, c->u_iv.iv, OCB_BLOCK_LEN); - - inbuf += OCB_BLOCK_LEN; - inbuflen -= OCB_BLOCK_LEN; -@@ -584,7 +586,7 @@ ocb_crypt (gcry_cipher_hd_t c, int encry - unsigned char pad[OCB_BLOCK_LEN]; - - /* Offset_* = Offset_m xor L_* */ -- buf_xor_1 (c->u_iv.iv, c->u_mode.ocb.L_star, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_iv.iv, c->u_mode.ocb.L_star, OCB_BLOCK_LEN); - /* Pad = ENCIPHER(K, Offset_*) */ - nburn = c->spec->encrypt (&c->context.c, pad, c->u_iv.iv); - burn = nburn > burn ? nburn : burn; -@@ -596,7 +598,7 @@ ocb_crypt (gcry_cipher_hd_t c, int encry - buf_cpy (l_tmp, inbuf, inbuflen); - memset (l_tmp + inbuflen, 0, OCB_BLOCK_LEN - inbuflen); - l_tmp[inbuflen] = 0x80; -- buf_xor_1 (c->u_ctr.ctr, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_ctr.ctr, l_tmp, OCB_BLOCK_LEN); - /* C_* = P_* xor Pad[1..bitlen(P_*)] */ - buf_xor (outbuf, inbuf, pad, inbuflen); - } -@@ -604,13 +606,13 @@ ocb_crypt (gcry_cipher_hd_t c, int encry - { - /* P_* = C_* xor Pad[1..bitlen(C_*)] */ - /* Checksum_* = Checksum_m xor (P_* || 1 || zeros(127-bitlen(P_*))) */ -- buf_cpy (l_tmp, pad, OCB_BLOCK_LEN); -+ cipher_block_cpy (l_tmp, pad, OCB_BLOCK_LEN); - buf_cpy (l_tmp, inbuf, inbuflen); -- buf_xor_1 (l_tmp, pad, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (l_tmp, pad, OCB_BLOCK_LEN); - l_tmp[inbuflen] = 0x80; - buf_cpy (outbuf, l_tmp, inbuflen); - -- buf_xor_1 (c->u_ctr.ctr, l_tmp, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_ctr.ctr, l_tmp, OCB_BLOCK_LEN); - } - } - -@@ -618,8 +620,10 @@ ocb_crypt (gcry_cipher_hd_t c, int encry - if (c->marks.finalize) - { - /* Tag = ENCIPHER(K, Checksum xor Offset xor L_$) xor HASH(K,A) */ -- buf_xor (c->u_mode.ocb.tag, c->u_ctr.ctr, c->u_iv.iv, OCB_BLOCK_LEN); -- buf_xor_1 (c->u_mode.ocb.tag, c->u_mode.ocb.L_dollar, OCB_BLOCK_LEN); -+ cipher_block_xor (c->u_mode.ocb.tag, c->u_ctr.ctr, c->u_iv.iv, -+ OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.tag, c->u_mode.ocb.L_dollar, -+ OCB_BLOCK_LEN); - nburn = c->spec->encrypt (&c->context.c, - c->u_mode.ocb.tag, c->u_mode.ocb.tag); - burn = nburn > burn ? nburn : burn; -@@ -672,7 +676,8 @@ compute_tag_if_needed (gcry_cipher_hd_t - if (!c->marks.tag) - { - ocb_aad_finalize (c); -- buf_xor_1 (c->u_mode.ocb.tag, c->u_mode.ocb.aad_sum, OCB_BLOCK_LEN); -+ cipher_block_xor_1 (c->u_mode.ocb.tag, c->u_mode.ocb.aad_sum, -+ OCB_BLOCK_LEN); - c->marks.tag = 1; - } - } -diff -up libgcrypt-1.8.5/cipher/cipher-ofb.c.aes-perf libgcrypt-1.8.5/cipher/cipher-ofb.c ---- libgcrypt-1.8.5/cipher/cipher-ofb.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-ofb.c 2020-04-22 18:29:41.668862268 +0200 -@@ -76,7 +76,7 @@ _gcry_cipher_ofb_encrypt (gcry_cipher_hd - /* Encrypt the IV (and save the current one). */ - nburn = enc_fn ( &c->context.c, c->u_iv.iv, c->u_iv.iv ); - burn = nburn > burn ? nburn : burn; -- buf_xor(outbuf, c->u_iv.iv, inbuf, blocksize); -+ cipher_block_xor(outbuf, c->u_iv.iv, inbuf, blocksize); - outbuf += blocksize; - inbuf += blocksize; - inbuflen -= blocksize; -diff -up libgcrypt-1.8.5/cipher/cipher-selftest.c.aes-perf libgcrypt-1.8.5/cipher/cipher-selftest.c ---- libgcrypt-1.8.5/cipher/cipher-selftest.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-selftest.c 2020-04-22 18:29:41.669862248 +0200 -@@ -105,7 +105,7 @@ _gcry_selftest_helper_cbc (const char *c - ciphertext = plaintext2 + nblocks * blocksize; - - /* Initialize ctx */ -- if (setkey_func (ctx, key, sizeof(key)) != GPG_ERR_NO_ERROR) -+ if (setkey_func (ctx, key, sizeof(key), NULL) != GPG_ERR_NO_ERROR) - { - xfree(mem); - return "setkey failed"; -@@ -228,7 +228,7 @@ _gcry_selftest_helper_cfb (const char *c - ciphertext = plaintext2 + nblocks * blocksize; - - /* Initialize ctx */ -- if (setkey_func (ctx, key, sizeof(key)) != GPG_ERR_NO_ERROR) -+ if (setkey_func (ctx, key, sizeof(key), NULL) != GPG_ERR_NO_ERROR) - { - xfree(mem); - return "setkey failed"; -@@ -351,7 +351,7 @@ _gcry_selftest_helper_ctr (const char *c - ciphertext2 = ciphertext + nblocks * blocksize; - - /* Initialize ctx */ -- if (setkey_func (ctx, key, sizeof(key)) != GPG_ERR_NO_ERROR) -+ if (setkey_func (ctx, key, sizeof(key), NULL) != GPG_ERR_NO_ERROR) - { - xfree(mem); - return "setkey failed"; -diff -up libgcrypt-1.8.5/cipher/cipher-xts.c.aes-perf libgcrypt-1.8.5/cipher/cipher-xts.c ---- libgcrypt-1.8.5/cipher/cipher-xts.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/cipher-xts.c 2020-04-22 18:29:41.669862248 +0200 -@@ -93,7 +93,8 @@ _gcry_cipher_xts_crypt (gcry_cipher_hd_t - /* Use a bulk method if available. */ - if (nblocks && c->bulk.xts_crypt) - { -- c->bulk.xts_crypt (c, c->u_ctr.ctr, outbuf, inbuf, nblocks, encrypt); -+ c->bulk.xts_crypt (&c->context.c, c->u_ctr.ctr, outbuf, inbuf, nblocks, -+ encrypt); - inbuf += nblocks * GCRY_XTS_BLOCK_LEN; - outbuf += nblocks * GCRY_XTS_BLOCK_LEN; - inbuflen -= nblocks * GCRY_XTS_BLOCK_LEN; -@@ -106,10 +107,10 @@ _gcry_cipher_xts_crypt (gcry_cipher_hd_t - while (nblocks) - { - /* Xor-Encrypt/Decrypt-Xor block. */ -- buf_xor (tmp.x64, inbuf, c->u_ctr.ctr, GCRY_XTS_BLOCK_LEN); -+ cipher_block_xor (tmp.x64, inbuf, c->u_ctr.ctr, GCRY_XTS_BLOCK_LEN); - nburn = crypt_fn (&c->context.c, tmp.x1, tmp.x1); - burn = nburn > burn ? nburn : burn; -- buf_xor (outbuf, tmp.x64, c->u_ctr.ctr, GCRY_XTS_BLOCK_LEN); -+ cipher_block_xor (outbuf, tmp.x64, c->u_ctr.ctr, GCRY_XTS_BLOCK_LEN); - - outbuf += GCRY_XTS_BLOCK_LEN; - inbuf += GCRY_XTS_BLOCK_LEN; -@@ -132,10 +133,10 @@ _gcry_cipher_xts_crypt (gcry_cipher_hd_t - xts_gfmul_byA (tmp.x1, c->u_ctr.ctr); - - /* Decrypt last block first. */ -- buf_xor (outbuf, inbuf, tmp.x64, GCRY_XTS_BLOCK_LEN); -+ cipher_block_xor (outbuf, inbuf, tmp.x64, GCRY_XTS_BLOCK_LEN); - nburn = crypt_fn (&c->context.c, outbuf, outbuf); - burn = nburn > burn ? nburn : burn; -- buf_xor (outbuf, outbuf, tmp.x64, GCRY_XTS_BLOCK_LEN); -+ cipher_block_xor (outbuf, outbuf, tmp.x64, GCRY_XTS_BLOCK_LEN); - - inbuflen -= GCRY_XTS_BLOCK_LEN; - inbuf += GCRY_XTS_BLOCK_LEN; -@@ -146,15 +147,15 @@ _gcry_cipher_xts_crypt (gcry_cipher_hd_t - outbuf -= GCRY_XTS_BLOCK_LEN; - - /* Steal ciphertext from previous block. */ -- buf_cpy (tmp.x64, outbuf, GCRY_XTS_BLOCK_LEN); -+ cipher_block_cpy (tmp.x64, outbuf, GCRY_XTS_BLOCK_LEN); - buf_cpy (tmp.x64, inbuf, inbuflen); - buf_cpy (outbuf + GCRY_XTS_BLOCK_LEN, outbuf, inbuflen); - - /* Decrypt/Encrypt last block. */ -- buf_xor (tmp.x64, tmp.x64, c->u_ctr.ctr, GCRY_XTS_BLOCK_LEN); -+ cipher_block_xor (tmp.x64, tmp.x64, c->u_ctr.ctr, GCRY_XTS_BLOCK_LEN); - nburn = crypt_fn (&c->context.c, tmp.x1, tmp.x1); - burn = nburn > burn ? nburn : burn; -- buf_xor (outbuf, tmp.x64, c->u_ctr.ctr, GCRY_XTS_BLOCK_LEN); -+ cipher_block_xor (outbuf, tmp.x64, c->u_ctr.ctr, GCRY_XTS_BLOCK_LEN); - } - - /* Auto-increment data-unit sequence number */ -@@ -168,3 +169,21 @@ _gcry_cipher_xts_crypt (gcry_cipher_hd_t - - return 0; - } -+ -+ -+gcry_err_code_t -+_gcry_cipher_xts_encrypt (gcry_cipher_hd_t c, -+ unsigned char *outbuf, size_t outbuflen, -+ const unsigned char *inbuf, size_t inbuflen) -+{ -+ return _gcry_cipher_xts_crypt (c, outbuf, outbuflen, inbuf, inbuflen, 1); -+} -+ -+ -+gcry_err_code_t -+_gcry_cipher_xts_decrypt (gcry_cipher_hd_t c, -+ unsigned char *outbuf, size_t outbuflen, -+ const unsigned char *inbuf, size_t inbuflen) -+{ -+ return _gcry_cipher_xts_crypt (c, outbuf, outbuflen, inbuf, inbuflen, 0); -+} -diff -up libgcrypt-1.8.5/cipher/des.c.aes-perf libgcrypt-1.8.5/cipher/des.c ---- libgcrypt-1.8.5/cipher/des.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/des.c 2020-04-22 18:29:41.669862248 +0200 -@@ -119,6 +119,7 @@ - #include "g10lib.h" - #include "cipher.h" - #include "bufhelp.h" -+#include "cipher-internal.h" - #include "cipher-selftest.h" - - -@@ -197,7 +198,8 @@ static unsigned int do_tripledes_encrypt - static unsigned int do_tripledes_decrypt(void *context, byte *outbuf, - const byte *inbuf ); - static gcry_err_code_t do_tripledes_setkey(void *context, const byte *key, -- unsigned keylen); -+ unsigned keylen, -+ gcry_cipher_hd_t hd); - - static int initialized; - -@@ -940,7 +942,7 @@ _gcry_3des_ctr_enc(void *context, unsign - /* Encrypt the counter. */ - tripledes_ecb_encrypt (ctx, ctr, tmpbuf); - /* XOR the input with the encrypted counter and store in output. */ -- buf_xor(outbuf, tmpbuf, inbuf, DES_BLOCKSIZE); -+ cipher_block_xor(outbuf, tmpbuf, inbuf, DES_BLOCKSIZE); - outbuf += DES_BLOCKSIZE; - inbuf += DES_BLOCKSIZE; - /* Increment the counter. */ -@@ -996,7 +998,7 @@ _gcry_3des_cbc_dec(void *context, unsign - the intermediate result to SAVEBUF. */ - tripledes_ecb_decrypt (ctx, inbuf, savebuf); - -- buf_xor_n_copy_2(outbuf, savebuf, iv, inbuf, DES_BLOCKSIZE); -+ cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, DES_BLOCKSIZE); - inbuf += DES_BLOCKSIZE; - outbuf += DES_BLOCKSIZE; - } -@@ -1041,7 +1043,7 @@ _gcry_3des_cfb_dec(void *context, unsign - for ( ;nblocks; nblocks-- ) - { - tripledes_ecb_encrypt (ctx, iv, iv); -- buf_xor_n_copy(outbuf, iv, inbuf, DES_BLOCKSIZE); -+ cipher_block_xor_n_copy(outbuf, iv, inbuf, DES_BLOCKSIZE); - outbuf += DES_BLOCKSIZE; - inbuf += DES_BLOCKSIZE; - } -@@ -1086,7 +1088,8 @@ is_weak_key ( const byte *key ) - - /* Alternative setkey for selftests; need larger key than default. */ - static gcry_err_code_t --bulk_selftest_setkey (void *context, const byte *__key, unsigned __keylen) -+bulk_selftest_setkey (void *context, const byte *__key, unsigned __keylen, -+ gcry_cipher_hd_t hd) - { - static const unsigned char key[24] ATTR_ALIGNED_16 = { - 0x66,0x9A,0x00,0x7F,0xC7,0x6A,0x45,0x9F, -@@ -1094,10 +1097,11 @@ bulk_selftest_setkey (void *context, con - 0x18,0x2A,0x39,0x47,0x5E,0x6F,0x75,0x82 - }; - -+ (void)hd; - (void)__key; - (void)__keylen; - -- return do_tripledes_setkey(context, key, sizeof(key)); -+ return do_tripledes_setkey(context, key, sizeof(key), NULL); - } - - -@@ -1349,10 +1353,13 @@ selftest (void) - - - static gcry_err_code_t --do_tripledes_setkey ( void *context, const byte *key, unsigned keylen ) -+do_tripledes_setkey ( void *context, const byte *key, unsigned keylen, -+ gcry_cipher_hd_t hd ) - { - struct _tripledes_ctx *ctx = (struct _tripledes_ctx *) context; - -+ (void)hd; -+ - if( keylen != 24 ) - return GPG_ERR_INV_KEYLEN; - -@@ -1413,10 +1420,13 @@ do_tripledes_decrypt( void *context, byt - } - - static gcry_err_code_t --do_des_setkey (void *context, const byte *key, unsigned keylen) -+do_des_setkey (void *context, const byte *key, unsigned keylen, -+ gcry_cipher_hd_t hd) - { - struct _des_ctx *ctx = (struct _des_ctx *) context; - -+ (void)hd; -+ - if (keylen != 8) - return GPG_ERR_INV_KEYLEN; - -diff -up libgcrypt-1.8.5/cipher/gost28147.c.aes-perf libgcrypt-1.8.5/cipher/gost28147.c ---- libgcrypt-1.8.5/cipher/gost28147.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/gost28147.c 2020-04-22 18:29:41.669862248 +0200 -@@ -39,11 +39,14 @@ - #include "gost-sb.h" - - static gcry_err_code_t --gost_setkey (void *c, const byte *key, unsigned keylen) -+gost_setkey (void *c, const byte *key, unsigned keylen, -+ gcry_cipher_hd_t hd) - { - int i; - GOST28147_context *ctx = c; - -+ (void)hd; -+ - if (keylen != 256 / 8) - return GPG_ERR_INV_KEYLEN; - -diff -up libgcrypt-1.8.5/cipher/idea.c.aes-perf libgcrypt-1.8.5/cipher/idea.c ---- libgcrypt-1.8.5/cipher/idea.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/idea.c 2020-04-22 18:29:41.670862229 +0200 -@@ -258,10 +258,12 @@ do_setkey( IDEA_context *c, const byte * - } - - static gcry_err_code_t --idea_setkey (void *context, const byte *key, unsigned int keylen) -+idea_setkey (void *context, const byte *key, unsigned int keylen, -+ gcry_cipher_hd_t hd) - { - IDEA_context *ctx = context; - int rc = do_setkey (ctx, key, keylen); -+ (void)hd; - _gcry_burn_stack (23+6*sizeof(void*)); - return rc; - } -diff -up libgcrypt-1.8.5/cipher/Makefile.am.aes-perf libgcrypt-1.8.5/cipher/Makefile.am ---- libgcrypt-1.8.5/cipher/Makefile.am.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/Makefile.am 2020-04-22 18:29:41.670862229 +0200 -@@ -83,7 +83,8 @@ rijndael.c rijndael-internal.h rijndael- - rijndael-padlock.c rijndael-amd64.S rijndael-arm.S \ - rijndael-ssse3-amd64.c rijndael-ssse3-amd64-asm.S \ - rijndael-armv8-ce.c rijndael-armv8-aarch32-ce.S rijndael-armv8-aarch64-ce.S \ -- rijndael-aarch64.S \ -+ rijndael-aarch64.S rijndael-ppc.c rijndael-ppc9le.c \ -+ rijndael-ppc-common.h rijndael-ppc-functions.h \ - rmd160.c \ - rsa.c \ - salsa20.c salsa20-amd64.S salsa20-armv7-neon.S \ -@@ -128,3 +129,23 @@ tiger.o: $(srcdir)/tiger.c - - tiger.lo: $(srcdir)/tiger.c - `echo $(LTCOMPILE) -c $(srcdir)/tiger.c | $(o_flag_munging) ` -+ -+if ENABLE_PPC_VCRYPTO_EXTRA_CFLAGS -+ppc_vcrypto_cflags = -maltivec -mvsx -mcrypto -+else -+ppc_vcrypto_cflags = -+endif -+ -+rijndael-ppc.o: $(srcdir)/rijndael-ppc.c Makefile -+ `echo $(COMPILE) $(ppc_vcrypto_cflags) -c $< ` -+ -+rijndael-ppc.lo: $(srcdir)/rijndael-ppc.c Makefile -+ `echo $(LTCOMPILE) $(ppc_vcrypto_cflags) -c $< ` -+ -+rijndael-ppc9le.o: $(srcdir)/rijndael-ppc9le.c Makefile -+ `echo $(COMPILE) $(ppc_vcrypto_cflags) -c $< ` -+ -+rijndael-ppc9le.lo: $(srcdir)/rijndael-ppc9le.c Makefile -+ `echo $(LTCOMPILE) $(ppc_vcrypto_cflags) -c $< ` -+ -+ -diff -up libgcrypt-1.8.5/cipher/rfc2268.c.aes-perf libgcrypt-1.8.5/cipher/rfc2268.c ---- libgcrypt-1.8.5/cipher/rfc2268.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/rfc2268.c 2020-04-22 18:29:41.670862229 +0200 -@@ -262,8 +262,10 @@ setkey_core (void *context, const unsign - } - - static gpg_err_code_t --do_setkey (void *context, const unsigned char *key, unsigned int keylen) -+do_setkey (void *context, const unsigned char *key, unsigned int keylen, -+ gcry_cipher_hd_t hd) - { -+ (void)hd; - return setkey_core (context, key, keylen, 1); - } - -diff -up libgcrypt-1.8.5/cipher/rijndael-aesni.c.aes-perf libgcrypt-1.8.5/cipher/rijndael-aesni.c ---- libgcrypt-1.8.5/cipher/rijndael-aesni.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/rijndael-aesni.c 2020-04-28 11:22:26.025519954 +0200 -@@ -327,8 +327,8 @@ _gcry_aes_aesni_do_setkey (RIJNDAEL_cont - - - /* Make a decryption key from an encryption key. */ --void --_gcry_aes_aesni_prepare_decryption (RIJNDAEL_context *ctx) -+static inline void -+do_aesni_prepare_decryption (RIJNDAEL_context *ctx) - { - /* The AES-NI decrypt instructions use the Equivalent Inverse - Cipher, thus we can't use the the standard decrypt key -@@ -338,8 +338,6 @@ _gcry_aes_aesni_prepare_decryption (RIJN - int rr; - int r; - -- aesni_prepare(); -- - #define DO_AESNI_AESIMC() \ - asm volatile ("movdqa %[ekey], %%xmm1\n\t" \ - /*"aesimc %%xmm1, %%xmm1\n\t"*/ \ -@@ -375,7 +373,13 @@ _gcry_aes_aesni_prepare_decryption (RIJN - dkey[r] = ekey[0]; - - #undef DO_AESNI_AESIMC -+} - -+void -+_gcry_aes_aesni_prepare_decryption (RIJNDAEL_context *ctx) -+{ -+ aesni_prepare(); -+ do_aesni_prepare_decryption (ctx); - aesni_cleanup(); - } - -@@ -1023,8 +1027,8 @@ _gcry_aes_aesni_encrypt (const RIJNDAEL_ - - - void --_gcry_aes_aesni_cfb_enc (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -+_gcry_aes_aesni_cfb_enc (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks) - { - aesni_prepare (); -@@ -1059,8 +1063,8 @@ _gcry_aes_aesni_cfb_enc (RIJNDAEL_contex - - - void --_gcry_aes_aesni_cbc_enc (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -+_gcry_aes_aesni_cbc_enc (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks, int cbc_mac) - { - aesni_prepare_2_6_variable; -@@ -1105,8 +1109,8 @@ _gcry_aes_aesni_cbc_enc (RIJNDAEL_contex - - - void --_gcry_aes_aesni_ctr_enc (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *ctr, -+_gcry_aes_aesni_ctr_enc (RIJNDAEL_context *ctx, unsigned char *ctr, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks) - { - static const unsigned char be_mask[16] __attribute__ ((aligned (16))) = -@@ -1160,8 +1164,8 @@ _gcry_aes_aesni_decrypt (const RIJNDAEL_ - - - void --_gcry_aes_aesni_cfb_dec (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -+_gcry_aes_aesni_cfb_dec (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks) - { - aesni_prepare_2_6_variable; -@@ -1245,15 +1249,21 @@ _gcry_aes_aesni_cfb_dec (RIJNDAEL_contex - - - void --_gcry_aes_aesni_cbc_dec (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -- size_t nblocks) -+_gcry_aes_aesni_cbc_dec (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks) - { - aesni_prepare_2_6_variable; - - aesni_prepare (); - aesni_prepare_2_6(); - -+ if ( !ctx->decryption_prepared ) -+ { -+ do_aesni_prepare_decryption ( ctx ); -+ ctx->decryption_prepared = 1; -+ } -+ - asm volatile - ("movdqu %[iv], %%xmm5\n\t" /* use xmm5 as fast IV storage */ - : /* No output */ -@@ -1514,6 +1524,12 @@ aesni_ocb_dec (gcry_cipher_hd_t c, void - aesni_prepare (); - aesni_prepare_2_6 (); - -+ if ( !ctx->decryption_prepared ) -+ { -+ do_aesni_prepare_decryption ( ctx ); -+ ctx->decryption_prepared = 1; -+ } -+ - /* Preload Offset and Checksum */ - asm volatile ("movdqu %[iv], %%xmm5\n\t" - "movdqu %[ctr], %%xmm6\n\t" -@@ -1665,7 +1681,7 @@ aesni_ocb_dec (gcry_cipher_hd_t c, void - } - - --void -+size_t - _gcry_aes_aesni_ocb_crypt(gcry_cipher_hd_t c, void *outbuf_arg, - const void *inbuf_arg, size_t nblocks, int encrypt) - { -@@ -1673,10 +1689,12 @@ _gcry_aes_aesni_ocb_crypt(gcry_cipher_hd - aesni_ocb_enc(c, outbuf_arg, inbuf_arg, nblocks); - else - aesni_ocb_dec(c, outbuf_arg, inbuf_arg, nblocks); -+ -+ return 0; - } - - --void -+size_t - _gcry_aes_aesni_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, - size_t nblocks) - { -@@ -1810,7 +1828,306 @@ _gcry_aes_aesni_ocb_auth (gcry_cipher_hd - - aesni_cleanup (); - aesni_cleanup_2_6 (); -+ -+ return 0; - } - - -+static const u64 xts_gfmul_const[16] __attribute__ ((aligned (16))) = -+ { 0x87, 0x01 }; -+ -+ -+static void -+_gcry_aes_aesni_xts_enc (RIJNDAEL_context *ctx, unsigned char *tweak, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks) -+{ -+ aesni_prepare_2_6_variable; -+ -+ aesni_prepare (); -+ aesni_prepare_2_6 (); -+ -+ /* Preload Tweak */ -+ asm volatile ("movdqu %[tweak], %%xmm5\n\t" -+ "movdqa %[gfmul], %%xmm6\n\t" -+ : -+ : [tweak] "m" (*tweak), -+ [gfmul] "m" (*xts_gfmul_const) -+ : "memory" ); -+ -+ for ( ;nblocks >= 4; nblocks -= 4 ) -+ { -+ asm volatile ("pshufd $0x13, %%xmm5, %%xmm4\n\t" -+ "movdqu %[inbuf0], %%xmm1\n\t" -+ "pxor %%xmm5, %%xmm1\n\t" -+ "movdqu %%xmm5, %[outbuf0]\n\t" -+ -+ "movdqa %%xmm4, %%xmm0\n\t" -+ "paddd %%xmm4, %%xmm4\n\t" -+ "psrad $31, %%xmm0\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm5\n\t" -+ : [outbuf0] "=m" (*(outbuf + 0 * 16)) -+ : [inbuf0] "m" (*(inbuf + 0 * 16)) -+ : "memory" ); -+ -+ asm volatile ("movdqu %[inbuf1], %%xmm2\n\t" -+ "pxor %%xmm5, %%xmm2\n\t" -+ "movdqu %%xmm5, %[outbuf1]\n\t" -+ -+ "movdqa %%xmm4, %%xmm0\n\t" -+ "paddd %%xmm4, %%xmm4\n\t" -+ "psrad $31, %%xmm0\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm5\n\t" -+ : [outbuf1] "=m" (*(outbuf + 1 * 16)) -+ : [inbuf1] "m" (*(inbuf + 1 * 16)) -+ : "memory" ); -+ -+ asm volatile ("movdqu %[inbuf2], %%xmm3\n\t" -+ "pxor %%xmm5, %%xmm3\n\t" -+ "movdqu %%xmm5, %[outbuf2]\n\t" -+ -+ "movdqa %%xmm4, %%xmm0\n\t" -+ "paddd %%xmm4, %%xmm4\n\t" -+ "psrad $31, %%xmm0\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm5\n\t" -+ : [outbuf2] "=m" (*(outbuf + 2 * 16)) -+ : [inbuf2] "m" (*(inbuf + 2 * 16)) -+ : "memory" ); -+ -+ asm volatile ("movdqa %%xmm4, %%xmm0\n\t" -+ "movdqu %[inbuf3], %%xmm4\n\t" -+ "pxor %%xmm5, %%xmm4\n\t" -+ "movdqu %%xmm5, %[outbuf3]\n\t" -+ -+ "psrad $31, %%xmm0\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm5\n\t" -+ : [outbuf3] "=m" (*(outbuf + 3 * 16)) -+ : [inbuf3] "m" (*(inbuf + 3 * 16)) -+ : "memory" ); -+ -+ do_aesni_enc_vec4 (ctx); -+ -+ asm volatile ("movdqu %[outbuf0], %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm1\n\t" -+ "movdqu %[outbuf1], %%xmm0\n\t" -+ "movdqu %%xmm1, %[outbuf0]\n\t" -+ "movdqu %[outbuf2], %%xmm1\n\t" -+ "pxor %%xmm0, %%xmm2\n\t" -+ "movdqu %[outbuf3], %%xmm0\n\t" -+ "pxor %%xmm1, %%xmm3\n\t" -+ "pxor %%xmm0, %%xmm4\n\t" -+ "movdqu %%xmm2, %[outbuf1]\n\t" -+ "movdqu %%xmm3, %[outbuf2]\n\t" -+ "movdqu %%xmm4, %[outbuf3]\n\t" -+ : [outbuf0] "+m" (*(outbuf + 0 * 16)), -+ [outbuf1] "+m" (*(outbuf + 1 * 16)), -+ [outbuf2] "+m" (*(outbuf + 2 * 16)), -+ [outbuf3] "+m" (*(outbuf + 3 * 16)) -+ : -+ : "memory" ); -+ -+ outbuf += BLOCKSIZE * 4; -+ inbuf += BLOCKSIZE * 4; -+ } -+ -+ for ( ;nblocks; nblocks-- ) -+ { -+ asm volatile ("movdqu %[inbuf], %%xmm0\n\t" -+ "pxor %%xmm5, %%xmm0\n\t" -+ "movdqa %%xmm5, %%xmm4\n\t" -+ -+ "pshufd $0x13, %%xmm5, %%xmm1\n\t" -+ "psrad $31, %%xmm1\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm1\n\t" -+ "pxor %%xmm1, %%xmm5\n\t" -+ : -+ : [inbuf] "m" (*inbuf) -+ : "memory" ); -+ -+ do_aesni_enc (ctx); -+ -+ asm volatile ("pxor %%xmm4, %%xmm0\n\t" -+ "movdqu %%xmm0, %[outbuf]\n\t" -+ : [outbuf] "=m" (*outbuf) -+ : -+ : "memory" ); -+ -+ outbuf += BLOCKSIZE; -+ inbuf += BLOCKSIZE; -+ } -+ -+ asm volatile ("movdqu %%xmm5, %[tweak]\n\t" -+ : [tweak] "=m" (*tweak) -+ : -+ : "memory" ); -+ -+ aesni_cleanup (); -+ aesni_cleanup_2_6 (); -+} -+ -+ -+static void -+_gcry_aes_aesni_xts_dec (RIJNDAEL_context *ctx, unsigned char *tweak, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks) -+{ -+ aesni_prepare_2_6_variable; -+ -+ aesni_prepare (); -+ aesni_prepare_2_6 (); -+ -+ if ( !ctx->decryption_prepared ) -+ { -+ do_aesni_prepare_decryption ( ctx ); -+ ctx->decryption_prepared = 1; -+ } -+ -+ /* Preload Tweak */ -+ asm volatile ("movdqu %[tweak], %%xmm5\n\t" -+ "movdqa %[gfmul], %%xmm6\n\t" -+ : -+ : [tweak] "m" (*tweak), -+ [gfmul] "m" (*xts_gfmul_const) -+ : "memory" ); -+ -+ for ( ;nblocks >= 4; nblocks -= 4 ) -+ { -+ asm volatile ("pshufd $0x13, %%xmm5, %%xmm4\n\t" -+ "movdqu %[inbuf0], %%xmm1\n\t" -+ "pxor %%xmm5, %%xmm1\n\t" -+ "movdqu %%xmm5, %[outbuf0]\n\t" -+ -+ "movdqa %%xmm4, %%xmm0\n\t" -+ "paddd %%xmm4, %%xmm4\n\t" -+ "psrad $31, %%xmm0\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm5\n\t" -+ : [outbuf0] "=m" (*(outbuf + 0 * 16)) -+ : [inbuf0] "m" (*(inbuf + 0 * 16)) -+ : "memory" ); -+ -+ asm volatile ("movdqu %[inbuf1], %%xmm2\n\t" -+ "pxor %%xmm5, %%xmm2\n\t" -+ "movdqu %%xmm5, %[outbuf1]\n\t" -+ -+ "movdqa %%xmm4, %%xmm0\n\t" -+ "paddd %%xmm4, %%xmm4\n\t" -+ "psrad $31, %%xmm0\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm5\n\t" -+ : [outbuf1] "=m" (*(outbuf + 1 * 16)) -+ : [inbuf1] "m" (*(inbuf + 1 * 16)) -+ : "memory" ); -+ -+ asm volatile ("movdqu %[inbuf2], %%xmm3\n\t" -+ "pxor %%xmm5, %%xmm3\n\t" -+ "movdqu %%xmm5, %[outbuf2]\n\t" -+ -+ "movdqa %%xmm4, %%xmm0\n\t" -+ "paddd %%xmm4, %%xmm4\n\t" -+ "psrad $31, %%xmm0\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm5\n\t" -+ : [outbuf2] "=m" (*(outbuf + 2 * 16)) -+ : [inbuf2] "m" (*(inbuf + 2 * 16)) -+ : "memory" ); -+ -+ asm volatile ("movdqa %%xmm4, %%xmm0\n\t" -+ "movdqu %[inbuf3], %%xmm4\n\t" -+ "pxor %%xmm5, %%xmm4\n\t" -+ "movdqu %%xmm5, %[outbuf3]\n\t" -+ -+ "psrad $31, %%xmm0\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm5\n\t" -+ : [outbuf3] "=m" (*(outbuf + 3 * 16)) -+ : [inbuf3] "m" (*(inbuf + 3 * 16)) -+ : "memory" ); -+ -+ do_aesni_dec_vec4 (ctx); -+ -+ asm volatile ("movdqu %[outbuf0], %%xmm0\n\t" -+ "pxor %%xmm0, %%xmm1\n\t" -+ "movdqu %[outbuf1], %%xmm0\n\t" -+ "movdqu %%xmm1, %[outbuf0]\n\t" -+ "movdqu %[outbuf2], %%xmm1\n\t" -+ "pxor %%xmm0, %%xmm2\n\t" -+ "movdqu %[outbuf3], %%xmm0\n\t" -+ "pxor %%xmm1, %%xmm3\n\t" -+ "pxor %%xmm0, %%xmm4\n\t" -+ "movdqu %%xmm2, %[outbuf1]\n\t" -+ "movdqu %%xmm3, %[outbuf2]\n\t" -+ "movdqu %%xmm4, %[outbuf3]\n\t" -+ : [outbuf0] "+m" (*(outbuf + 0 * 16)), -+ [outbuf1] "+m" (*(outbuf + 1 * 16)), -+ [outbuf2] "+m" (*(outbuf + 2 * 16)), -+ [outbuf3] "+m" (*(outbuf + 3 * 16)) -+ : -+ : "memory" ); -+ -+ outbuf += BLOCKSIZE * 4; -+ inbuf += BLOCKSIZE * 4; -+ } -+ -+ for ( ;nblocks; nblocks-- ) -+ { -+ asm volatile ("movdqu %[inbuf], %%xmm0\n\t" -+ "pxor %%xmm5, %%xmm0\n\t" -+ "movdqa %%xmm5, %%xmm4\n\t" -+ -+ "pshufd $0x13, %%xmm5, %%xmm1\n\t" -+ "psrad $31, %%xmm1\n\t" -+ "paddq %%xmm5, %%xmm5\n\t" -+ "pand %%xmm6, %%xmm1\n\t" -+ "pxor %%xmm1, %%xmm5\n\t" -+ : -+ : [inbuf] "m" (*inbuf) -+ : "memory" ); -+ -+ do_aesni_dec (ctx); -+ -+ asm volatile ("pxor %%xmm4, %%xmm0\n\t" -+ "movdqu %%xmm0, %[outbuf]\n\t" -+ : [outbuf] "=m" (*outbuf) -+ : -+ : "memory" ); -+ -+ outbuf += BLOCKSIZE; -+ inbuf += BLOCKSIZE; -+ } -+ -+ asm volatile ("movdqu %%xmm5, %[tweak]\n\t" -+ : [tweak] "=m" (*tweak) -+ : -+ : "memory" ); -+ -+ aesni_cleanup (); -+ aesni_cleanup_2_6 (); -+} -+ -+ -+void -+_gcry_aes_aesni_xts_crypt (RIJNDAEL_context *ctx, unsigned char *tweak, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks, int encrypt) -+{ -+ if (encrypt) -+ _gcry_aes_aesni_xts_enc(ctx, tweak, outbuf, inbuf, nblocks); -+ else -+ _gcry_aes_aesni_xts_dec(ctx, tweak, outbuf, inbuf, nblocks); -+} -+ - #endif /* USE_AESNI */ -diff -up libgcrypt-1.8.5/cipher/rijndael-armv8-aarch32-ce.S.aes-perf libgcrypt-1.8.5/cipher/rijndael-armv8-aarch32-ce.S ---- libgcrypt-1.8.5/cipher/rijndael-armv8-aarch32-ce.S.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/rijndael-armv8-aarch32-ce.S 2020-04-22 18:29:41.673862172 +0200 -@@ -1517,6 +1517,317 @@ _gcry_aes_ocb_auth_armv8_ce: - .size _gcry_aes_ocb_auth_armv8_ce,.-_gcry_aes_ocb_auth_armv8_ce; - - -+ -+/* -+ * void _gcry_aes_xts_enc_armv8_ce (const void *keysched, -+ * unsigned char *outbuf, -+ * const unsigned char *inbuf, -+ * unsigned char *iv, unsigned int nrounds); -+ */ -+ -+.align 3 -+.globl _gcry_aes_xts_enc_armv8_ce -+.type _gcry_aes_xts_enc_armv8_ce,%function; -+_gcry_aes_xts_enc_armv8_ce: -+ /* input: -+ * r0: keysched -+ * r1: outbuf -+ * r2: inbuf -+ * r3: iv -+ * %st+0: nblocks => r4 -+ * %st+4: nrounds => r5 -+ */ -+ -+ vpush {q4-q7} -+ push {r4-r12,lr} /* 4*16 + 4*10 = 104b */ -+ ldr r4, [sp, #(104+0)] -+ ldr r5, [sp, #(104+4)] -+ cmp r4, #0 -+ beq .Lxts_enc_skip -+ -+ cmp r5, #12 -+ -+ vld1.8 {q0}, [r3] /* load tweak */ -+ mov r7, #0x87; -+ -+ aes_preload_keys(r0, r6); -+ -+ beq .Lxts_enc_entry_192 -+ bhi .Lxts_enc_entry_256 -+ -+#define CTR_XTS(bits, ...) \ -+ .Lxts_enc_entry_##bits: \ -+ cmp r4, #4; \ -+ blo .Lxts_enc_loop_##bits; \ -+ \ -+ .Lxts_enc_loop4_##bits: \ -+ sub r4, r4, #4; \ -+ veor q9, q9, q9; \ -+ \ -+ vld1.8 {q1-q2}, [r2]!; /* load plaintext */ \ -+ veor q1, q1, q0; \ -+ cmp r4, #4; \ -+ vmov.u32 d18[0], r7; \ -+ vst1.8 {q0}, [r1]!; /* store tweak0 to temp */ \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ \ -+ vld1.8 {q3-q4}, [r2]!; /* load plaintext */ \ -+ veor q2, q2, q0; \ -+ vst1.8 {q0}, [r1]!; /* store tweak1 to temp */ \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ \ -+ veor q3, q3, q0; \ -+ vst1.8 {q0}, [r1]!; /* store tweak2 to temp */ \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ \ -+ veor q4, q4, q0; \ -+ vst1.8 {q0}, [r1]; /* store tweak3 to temp */ \ -+ sub r1, r1, #48; \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ \ -+ do_aes_4_##bits(e, mc, q1, q2, q3, q4, ##__VA_ARGS__); \ -+ \ -+ vld1.8 {q8-q9}, [r1]!; /* load tweak from temp */ \ -+ veor q1, q1, q8; \ -+ veor q2, q2, q9; \ -+ vld1.8 {q8-q9}, [r1]; /* load tweak from temp */ \ -+ sub r1, r1, #32; \ -+ veor q3, q3, q8; \ -+ veor q4, q4, q9; \ -+ vst1.8 {q1-q2}, [r1]!; /* store plaintext */ \ -+ vst1.8 {q3-q4}, [r1]!; /* store plaintext */ \ -+ \ -+ bhs .Lxts_enc_loop4_##bits; \ -+ cmp r4, #0; \ -+ beq .Lxts_enc_done; \ -+ \ -+ .Lxts_enc_loop_##bits: \ -+ \ -+ vld1.8 {q1}, [r2]!; /* load ciphertext */ \ -+ \ -+ veor q9, q9, q9; \ -+ veor q1, q1, q0; \ -+ vmov.u32 d18[0], r7; \ -+ vmov q2, q0; \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ subs r4, r4, #1; \ -+ \ -+ do_aes_one##bits(e, mc, q1, q1, ##__VA_ARGS__); \ -+ \ -+ veor q1, q1, q2; \ -+ vst1.8 {q1}, [r1]!; /* store plaintext */ \ -+ \ -+ bne .Lxts_enc_loop_##bits; \ -+ b .Lxts_enc_done; -+ -+ CTR_XTS(128re, r0, r6) -+ CTR_XTS(192, r0, r6) -+ CTR_XTS(256, r0, r6) -+ -+#undef CTR_XTS -+ -+.Lxts_enc_done: -+ vst1.8 {q0}, [r3] /* store tweak */ -+ -+ CLEAR_REG(q0) -+ CLEAR_REG(q1) -+ CLEAR_REG(q2) -+ CLEAR_REG(q3) -+ CLEAR_REG(q8) -+ CLEAR_REG(q9) -+ CLEAR_REG(q10) -+ CLEAR_REG(q11) -+ CLEAR_REG(q12) -+ CLEAR_REG(q13) -+ CLEAR_REG(q14) -+ -+.Lxts_enc_skip: -+ pop {r4-r12,lr} -+ vpop {q4-q7} -+ bx lr -+.size _gcry_aes_xts_enc_armv8_ce,.-_gcry_aes_xts_enc_armv8_ce; -+ -+ -+/* -+ * void _gcry_aes_xts_dec_armv8_ce (const void *keysched, -+ * unsigned char *outbuf, -+ * const unsigned char *inbuf, -+ * unsigned char *iv, unsigned int nrounds); -+ */ -+ -+.align 3 -+.globl _gcry_aes_xts_dec_armv8_ce -+.type _gcry_aes_xts_dec_armv8_ce,%function; -+_gcry_aes_xts_dec_armv8_ce: -+ /* input: -+ * r0: keysched -+ * r1: outbuf -+ * r2: inbuf -+ * r3: iv -+ * %st+0: nblocks => r4 -+ * %st+4: nrounds => r5 -+ */ -+ -+ vpush {q4-q7} -+ push {r4-r12,lr} /* 4*16 + 4*10 = 104b */ -+ ldr r4, [sp, #(104+0)] -+ ldr r5, [sp, #(104+4)] -+ cmp r4, #0 -+ beq .Lxts_dec_skip -+ -+ cmp r5, #12 -+ -+ vld1.8 {q0}, [r3] /* load tweak */ -+ mov r7, #0x87; -+ -+ aes_preload_keys(r0, r6); -+ -+ beq .Lxts_dec_entry_192 -+ bhi .Lxts_dec_entry_256 -+ -+#define CTR_XTS(bits, ...) \ -+ .Lxts_dec_entry_##bits: \ -+ cmp r4, #4; \ -+ blo .Lxts_dec_loop_##bits; \ -+ \ -+ .Lxts_dec_loop4_##bits: \ -+ sub r4, r4, #4; \ -+ veor q9, q9, q9; \ -+ \ -+ vld1.8 {q1-q2}, [r2]!; /* load plaintext */ \ -+ veor q1, q1, q0; \ -+ cmp r4, #4; \ -+ vmov.u32 d18[0], r7; \ -+ vst1.8 {q0}, [r1]!; /* store tweak0 to temp */ \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ \ -+ vld1.8 {q3-q4}, [r2]!; /* load plaintext */ \ -+ veor q2, q2, q0; \ -+ vst1.8 {q0}, [r1]!; /* store tweak1 to temp */ \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ \ -+ veor q3, q3, q0; \ -+ vst1.8 {q0}, [r1]!; /* store tweak2 to temp */ \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ \ -+ veor q4, q4, q0; \ -+ vst1.8 {q0}, [r1]; /* store tweak3 to temp */ \ -+ sub r1, r1, #48; \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ \ -+ do_aes_4_##bits(d, imc, q1, q2, q3, q4, ##__VA_ARGS__); \ -+ \ -+ vld1.8 {q8-q9}, [r1]!; /* load tweak from temp */ \ -+ veor q1, q1, q8; \ -+ veor q2, q2, q9; \ -+ vld1.8 {q8-q9}, [r1]; /* load tweak from temp */ \ -+ sub r1, r1, #32; \ -+ veor q3, q3, q8; \ -+ veor q4, q4, q9; \ -+ vst1.8 {q1-q2}, [r1]!; /* store plaintext */ \ -+ vst1.8 {q3-q4}, [r1]!; /* store plaintext */ \ -+ \ -+ bhs .Lxts_dec_loop4_##bits; \ -+ cmp r4, #0; \ -+ beq .Lxts_dec_done; \ -+ \ -+ .Lxts_dec_loop_##bits: \ -+ \ -+ vld1.8 {q1}, [r2]!; /* load ciphertext */ \ -+ \ -+ veor q9, q9, q9; \ -+ veor q1, q1, q0; \ -+ vmov.u32 d18[0], r7; \ -+ vmov q2, q0; \ -+ \ -+ vshr.s64 d16, d1, #63; \ -+ vshr.u64 d17, d0, #63; \ -+ vadd.u64 q0, q0, q0; \ -+ vand d16, d16, d18; \ -+ veor q0, q0, q8; \ -+ subs r4, r4, #1; \ -+ \ -+ do_aes_one##bits(d, imc, q1, q1, ##__VA_ARGS__); \ -+ \ -+ veor q1, q1, q2; \ -+ vst1.8 {q1}, [r1]!; /* store plaintext */ \ -+ \ -+ bne .Lxts_dec_loop_##bits; \ -+ b .Lxts_dec_done; -+ -+ CTR_XTS(128re, r0, r6) -+ CTR_XTS(192, r0, r6) -+ CTR_XTS(256, r0, r6) -+ -+#undef CTR_XTS -+ -+.Lxts_dec_done: -+ vst1.8 {q0}, [r3] /* store tweak */ -+ -+ CLEAR_REG(q0) -+ CLEAR_REG(q1) -+ CLEAR_REG(q2) -+ CLEAR_REG(q3) -+ CLEAR_REG(q8) -+ CLEAR_REG(q9) -+ CLEAR_REG(q10) -+ CLEAR_REG(q11) -+ CLEAR_REG(q12) -+ CLEAR_REG(q13) -+ CLEAR_REG(q14) -+ -+.Lxts_dec_skip: -+ pop {r4-r12,lr} -+ vpop {q4-q7} -+ bx lr -+.size _gcry_aes_xts_dec_armv8_ce,.-_gcry_aes_xts_dec_armv8_ce; -+ -+ - /* - * u32 _gcry_aes_sbox4_armv8_ce(u32 in4b); - */ -diff -up libgcrypt-1.8.5/cipher/rijndael-armv8-aarch64-ce.S.aes-perf libgcrypt-1.8.5/cipher/rijndael-armv8-aarch64-ce.S ---- libgcrypt-1.8.5/cipher/rijndael-armv8-aarch64-ce.S.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/rijndael-armv8-aarch64-ce.S 2020-04-22 18:29:41.674862153 +0200 -@@ -1157,8 +1157,8 @@ _gcry_aes_ocb_auth_armv8_ce: - * w6: nrounds => w7 - * w7: blkn => w12 - */ -- mov x12, x7 -- mov x7, x6 -+ mov w12, w7 -+ mov w7, w6 - mov x6, x5 - mov x5, x4 - mov x4, x3 -@@ -1277,6 +1277,284 @@ _gcry_aes_ocb_auth_armv8_ce: - - - /* -+ * void _gcry_aes_xts_enc_armv8_ce (const void *keysched, -+ * unsigned char *outbuf, -+ * const unsigned char *inbuf, -+ * unsigned char *tweak, -+ * size_t nblocks, -+ * unsigned int nrounds); -+ */ -+ -+.align 3 -+.globl _gcry_aes_xts_enc_armv8_ce -+.type _gcry_aes_xts_enc_armv8_ce,%function; -+_gcry_aes_xts_enc_armv8_ce: -+ /* input: -+ * r0: keysched -+ * r1: outbuf -+ * r2: inbuf -+ * r3: tweak -+ * x4: nblocks -+ * w5: nrounds -+ */ -+ -+ cbz x4, .Lxts_enc_skip -+ -+ /* load tweak */ -+ ld1 {v0.16b}, [x3] -+ -+ /* load gfmul mask */ -+ mov x6, #0x87 -+ mov x7, #0x01 -+ mov v16.D[0], x6 -+ mov v16.D[1], x7 -+ -+ aes_preload_keys(x0, w5); -+ -+ b.eq .Lxts_enc_entry_192 -+ b.hi .Lxts_enc_entry_256 -+ -+#define XTS_ENC(bits) \ -+ .Lxts_enc_entry_##bits: \ -+ cmp x4, #4; \ -+ b.lo .Lxts_enc_loop_##bits; \ -+ \ -+ .Lxts_enc_loop4_##bits: \ -+ \ -+ ext v4.16b, v0.16b, v0.16b, #8; \ -+ \ -+ sshr v2.2d, v4.2d, #63; \ -+ add v5.2d, v0.2d, v0.2d; \ -+ and v2.16b, v2.16b, v16.16b; \ -+ add v4.2d, v4.2d, v4.2d; \ -+ eor v5.16b, v5.16b, v2.16b; \ -+ \ -+ sshr v2.2d, v4.2d, #63; \ -+ add v6.2d, v5.2d, v5.2d; \ -+ and v2.16b, v2.16b, v16.16b; \ -+ add v4.2d, v4.2d, v4.2d; \ -+ eor v6.16b, v6.16b, v2.16b; \ -+ \ -+ sshr v2.2d, v4.2d, #63; \ -+ add v7.2d, v6.2d, v6.2d; \ -+ and v2.16b, v2.16b, v16.16b; \ -+ add v4.2d, v4.2d, v4.2d; \ -+ eor v7.16b, v7.16b, v2.16b; \ -+ \ -+ sshr v2.2d, v4.2d, #63; \ -+ add v3.2d, v7.2d, v7.2d; \ -+ and v2.16b, v2.16b, v16.16b; \ -+ add v4.2d, v4.2d, v4.2d; \ -+ eor v3.16b, v3.16b, v2.16b; \ -+ ld1 {v1.16b-v2.16b}, [x2], #32; /* load plaintext */ \ -+ st1 {v3.16b}, [x3]; \ -+ sub x4, x4, #4; \ -+ eor v1.16b, v1.16b, v0.16b; \ -+ \ -+ ld1 {v3.16b-v4.16b}, [x2], #32; /* load plaintext */ \ -+ cmp x4, #4; \ -+ eor v2.16b, v2.16b, v5.16b; \ -+ eor v3.16b, v3.16b, v6.16b; \ -+ eor v4.16b, v4.16b, v7.16b; \ -+ \ -+ do_aes_4_##bits(e, mc, v1, v2, v3, v4); \ -+ \ -+ eor v1.16b, v1.16b, v0.16b; \ -+ ld1 {v0.16b}, [x3]; \ -+ eor v2.16b, v2.16b, v5.16b; \ -+ eor v3.16b, v3.16b, v6.16b; \ -+ eor v4.16b, v4.16b, v7.16b; \ -+ st1 {v1.16b-v4.16b}, [x1], #64; /* store plaintext */ \ -+ \ -+ b.hs .Lxts_enc_loop4_##bits; \ -+ CLEAR_REG(v3); \ -+ CLEAR_REG(v4); \ -+ CLEAR_REG(v5); \ -+ CLEAR_REG(v6); \ -+ CLEAR_REG(v7); \ -+ cbz x4, .Lxts_enc_done; \ -+ \ -+ .Lxts_enc_loop_##bits: \ -+ \ -+ ld1 {v1.16b}, [x2], #16; /* load plaintext */ \ -+ ext v3.16b, v0.16b, v0.16b, #8; \ -+ mov v2.16b, v0.16b; \ -+ sshr v3.2d, v3.2d, #63; \ -+ add v0.2d, v0.2d, v0.2d; \ -+ and v3.16b, v3.16b, v16.16b; \ -+ eor v1.16b, v1.16b, v2.16b; \ -+ eor v0.16b, v0.16b, v3.16b; \ -+ sub x4, x4, #1; \ -+ \ -+ do_aes_one##bits(e, mc, v1, v1); \ -+ \ -+ eor v1.16b, v1.16b, v2.16b; \ -+ st1 {v1.16b}, [x1], #16; /* store ciphertext */ \ -+ \ -+ cbnz x4, .Lxts_enc_loop_##bits; \ -+ b .Lxts_enc_done; -+ -+ XTS_ENC(128) -+ XTS_ENC(192) -+ XTS_ENC(256) -+ -+#undef XTS_ENC -+ -+.Lxts_enc_done: -+ aes_clear_keys(w5) -+ -+ st1 {v0.16b}, [x3] /* store tweak */ -+ -+ CLEAR_REG(v0) -+ CLEAR_REG(v1) -+ CLEAR_REG(v2) -+ -+.Lxts_enc_skip: -+ ret -+ -+.size _gcry_aes_xts_enc_armv8_ce,.-_gcry_aes_xts_enc_armv8_ce; -+ -+ -+/* -+ * void _gcry_aes_xts_dec_armv8_ce (const void *keysched, -+ * unsigned char *outbuf, -+ * const unsigned char *inbuf, -+ * unsigned char *tweak, -+ * size_t nblocks, -+ * unsigned int nrounds); -+ */ -+ -+.align 3 -+.globl _gcry_aes_xts_dec_armv8_ce -+.type _gcry_aes_xts_dec_armv8_ce,%function; -+_gcry_aes_xts_dec_armv8_ce: -+ /* input: -+ * r0: keysched -+ * r1: outbuf -+ * r2: inbuf -+ * r3: tweak -+ * x4: nblocks -+ * w5: nrounds -+ */ -+ -+ cbz x4, .Lxts_dec_skip -+ -+ /* load tweak */ -+ ld1 {v0.16b}, [x3] -+ -+ /* load gfmul mask */ -+ mov x6, #0x87 -+ mov x7, #0x01 -+ mov v16.D[0], x6 -+ mov v16.D[1], x7 -+ -+ aes_preload_keys(x0, w5); -+ -+ b.eq .Lxts_dec_entry_192 -+ b.hi .Lxts_dec_entry_256 -+ -+#define XTS_DEC(bits) \ -+ .Lxts_dec_entry_##bits: \ -+ cmp x4, #4; \ -+ b.lo .Lxts_dec_loop_##bits; \ -+ \ -+ .Lxts_dec_loop4_##bits: \ -+ \ -+ ext v4.16b, v0.16b, v0.16b, #8; \ -+ \ -+ sshr v2.2d, v4.2d, #63; \ -+ add v5.2d, v0.2d, v0.2d; \ -+ and v2.16b, v2.16b, v16.16b; \ -+ add v4.2d, v4.2d, v4.2d; \ -+ eor v5.16b, v5.16b, v2.16b; \ -+ \ -+ sshr v2.2d, v4.2d, #63; \ -+ add v6.2d, v5.2d, v5.2d; \ -+ and v2.16b, v2.16b, v16.16b; \ -+ add v4.2d, v4.2d, v4.2d; \ -+ eor v6.16b, v6.16b, v2.16b; \ -+ \ -+ sshr v2.2d, v4.2d, #63; \ -+ add v7.2d, v6.2d, v6.2d; \ -+ and v2.16b, v2.16b, v16.16b; \ -+ add v4.2d, v4.2d, v4.2d; \ -+ eor v7.16b, v7.16b, v2.16b; \ -+ \ -+ sshr v2.2d, v4.2d, #63; \ -+ add v3.2d, v7.2d, v7.2d; \ -+ and v2.16b, v2.16b, v16.16b; \ -+ add v4.2d, v4.2d, v4.2d; \ -+ eor v3.16b, v3.16b, v2.16b; \ -+ ld1 {v1.16b-v2.16b}, [x2], #32; /* load plaintext */ \ -+ st1 {v3.16b}, [x3]; \ -+ sub x4, x4, #4; \ -+ eor v1.16b, v1.16b, v0.16b; \ -+ \ -+ ld1 {v3.16b-v4.16b}, [x2], #32; /* load plaintext */ \ -+ cmp x4, #4; \ -+ eor v2.16b, v2.16b, v5.16b; \ -+ eor v3.16b, v3.16b, v6.16b; \ -+ eor v4.16b, v4.16b, v7.16b; \ -+ \ -+ do_aes_4_##bits(d, imc, v1, v2, v3, v4); \ -+ \ -+ eor v1.16b, v1.16b, v0.16b; \ -+ ld1 {v0.16b}, [x3]; \ -+ eor v2.16b, v2.16b, v5.16b; \ -+ eor v3.16b, v3.16b, v6.16b; \ -+ eor v4.16b, v4.16b, v7.16b; \ -+ st1 {v1.16b-v4.16b}, [x1], #64; /* store plaintext */ \ -+ \ -+ b.hs .Lxts_dec_loop4_##bits; \ -+ CLEAR_REG(v3); \ -+ CLEAR_REG(v4); \ -+ CLEAR_REG(v5); \ -+ CLEAR_REG(v6); \ -+ CLEAR_REG(v7); \ -+ cbz x4, .Lxts_dec_done; \ -+ \ -+ .Lxts_dec_loop_##bits: \ -+ \ -+ ld1 {v1.16b}, [x2], #16; /* load plaintext */ \ -+ ext v3.16b, v0.16b, v0.16b, #8; \ -+ mov v2.16b, v0.16b; \ -+ sshr v3.2d, v3.2d, #63; \ -+ add v0.2d, v0.2d, v0.2d; \ -+ and v3.16b, v3.16b, v16.16b; \ -+ eor v1.16b, v1.16b, v2.16b; \ -+ eor v0.16b, v0.16b, v3.16b; \ -+ sub x4, x4, #1; \ -+ \ -+ do_aes_one##bits(d, imc, v1, v1); \ -+ \ -+ eor v1.16b, v1.16b, v2.16b; \ -+ st1 {v1.16b}, [x1], #16; /* store ciphertext */ \ -+ \ -+ cbnz x4, .Lxts_dec_loop_##bits; \ -+ b .Lxts_dec_done; -+ -+ XTS_DEC(128) -+ XTS_DEC(192) -+ XTS_DEC(256) -+ -+#undef XTS_DEC -+ -+.Lxts_dec_done: -+ aes_clear_keys(w5) -+ -+ st1 {v0.16b}, [x3] /* store tweak */ -+ -+ CLEAR_REG(v0) -+ CLEAR_REG(v1) -+ CLEAR_REG(v2) -+ -+.Lxts_dec_skip: -+ ret -+ -+.size _gcry_aes_xts_dec_armv8_ce,.-_gcry_aes_xts_dec_armv8_ce; -+ -+ -+/* - * u32 _gcry_aes_sbox4_armv8_ce(u32 in4b); - */ - .align 3 -diff -up libgcrypt-1.8.5/cipher/rijndael-armv8-ce.c.aes-perf libgcrypt-1.8.5/cipher/rijndael-armv8-ce.c ---- libgcrypt-1.8.5/cipher/rijndael-armv8-ce.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/rijndael-armv8-ce.c 2020-04-22 18:29:41.675862134 +0200 -@@ -101,6 +101,16 @@ extern void _gcry_aes_ocb_auth_armv8_ce - size_t nblocks, - unsigned int nrounds, - unsigned int blkn); -+extern void _gcry_aes_xts_enc_armv8_ce (const void *keysched, -+ unsigned char *outbuf, -+ const unsigned char *inbuf, -+ unsigned char *tweak, -+ size_t nblocks, unsigned int nrounds); -+extern void _gcry_aes_xts_dec_armv8_ce (const void *keysched, -+ unsigned char *outbuf, -+ const unsigned char *inbuf, -+ unsigned char *tweak, -+ size_t nblocks, unsigned int nrounds); - - typedef void (*ocb_crypt_fn_t) (const void *keysched, unsigned char *outbuf, - const unsigned char *inbuf, -@@ -108,6 +118,11 @@ typedef void (*ocb_crypt_fn_t) (const vo - unsigned char *L_table, size_t nblocks, - unsigned int nrounds, unsigned int blkn); - -+typedef void (*xts_crypt_fn_t) (const void *keysched, unsigned char *outbuf, -+ const unsigned char *inbuf, -+ unsigned char *tweak, size_t nblocks, -+ unsigned int nrounds); -+ - void - _gcry_aes_armv8_ce_setkey (RIJNDAEL_context *ctx, const byte *key) - { -@@ -269,8 +284,8 @@ _gcry_aes_armv8_ce_decrypt (const RIJNDA - } - - void --_gcry_aes_armv8_ce_cbc_enc (const RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -+_gcry_aes_armv8_ce_cbc_enc (const RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks, int cbc_mac) - { - const void *keysched = ctx->keyschenc32; -@@ -281,19 +296,25 @@ _gcry_aes_armv8_ce_cbc_enc (const RIJNDA - } - - void --_gcry_aes_armv8_ce_cbc_dec (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -+_gcry_aes_armv8_ce_cbc_dec (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks) - { - const void *keysched = ctx->keyschdec32; - unsigned int nrounds = ctx->rounds; - -+ if ( !ctx->decryption_prepared ) -+ { -+ _gcry_aes_armv8_ce_prepare_decryption ( ctx ); -+ ctx->decryption_prepared = 1; -+ } -+ - _gcry_aes_cbc_dec_armv8_ce(keysched, outbuf, inbuf, iv, nblocks, nrounds); - } - - void --_gcry_aes_armv8_ce_cfb_enc (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -+_gcry_aes_armv8_ce_cfb_enc (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks) - { - const void *keysched = ctx->keyschenc32; -@@ -303,8 +324,8 @@ _gcry_aes_armv8_ce_cfb_enc (RIJNDAEL_con - } - - void --_gcry_aes_armv8_ce_cfb_dec (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -+_gcry_aes_armv8_ce_cfb_dec (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks) - { - const void *keysched = ctx->keyschenc32; -@@ -314,8 +335,8 @@ _gcry_aes_armv8_ce_cfb_dec (RIJNDAEL_con - } - - void --_gcry_aes_armv8_ce_ctr_enc (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -+_gcry_aes_armv8_ce_ctr_enc (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, - size_t nblocks) - { - const void *keysched = ctx->keyschenc32; -@@ -324,7 +345,7 @@ _gcry_aes_armv8_ce_ctr_enc (RIJNDAEL_con - _gcry_aes_ctr_enc_armv8_ce(keysched, outbuf, inbuf, iv, nblocks, nrounds); - } - --void -+size_t - _gcry_aes_armv8_ce_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, - const void *inbuf_arg, size_t nblocks, - int encrypt) -@@ -338,13 +359,21 @@ _gcry_aes_armv8_ce_ocb_crypt (gcry_ciphe - unsigned int nrounds = ctx->rounds; - u64 blkn = c->u_mode.ocb.data_nblocks; - -+ if ( !encrypt && !ctx->decryption_prepared ) -+ { -+ _gcry_aes_armv8_ce_prepare_decryption ( ctx ); -+ ctx->decryption_prepared = 1; -+ } -+ - c->u_mode.ocb.data_nblocks = blkn + nblocks; - - crypt_fn(keysched, outbuf, inbuf, c->u_iv.iv, c->u_ctr.ctr, - c->u_mode.ocb.L[0], nblocks, nrounds, (unsigned int)blkn); -+ -+ return 0; - } - --void -+size_t - _gcry_aes_armv8_ce_ocb_auth (gcry_cipher_hd_t c, void *abuf_arg, - size_t nblocks) - { -@@ -359,6 +388,27 @@ _gcry_aes_armv8_ce_ocb_auth (gcry_cipher - _gcry_aes_ocb_auth_armv8_ce(keysched, abuf, c->u_mode.ocb.aad_offset, - c->u_mode.ocb.aad_sum, c->u_mode.ocb.L[0], - nblocks, nrounds, (unsigned int)blkn); -+ -+ return 0; -+} -+ -+void -+_gcry_aes_armv8_ce_xts_crypt (RIJNDAEL_context *ctx, unsigned char *tweak, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks, int encrypt) -+{ -+ const void *keysched = encrypt ? ctx->keyschenc32 : ctx->keyschdec32; -+ xts_crypt_fn_t crypt_fn = encrypt ? _gcry_aes_xts_enc_armv8_ce -+ : _gcry_aes_xts_dec_armv8_ce; -+ unsigned int nrounds = ctx->rounds; -+ -+ if ( !encrypt && !ctx->decryption_prepared ) -+ { -+ _gcry_aes_armv8_ce_prepare_decryption ( ctx ); -+ ctx->decryption_prepared = 1; -+ } -+ -+ crypt_fn(keysched, outbuf, inbuf, tweak, nblocks, nrounds); - } - - #endif /* USE_ARM_CE */ -diff -up libgcrypt-1.8.5/cipher/rijndael.c.aes-perf libgcrypt-1.8.5/cipher/rijndael.c ---- libgcrypt-1.8.5/cipher/rijndael.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/rijndael.c 2020-04-22 18:29:41.676862114 +0200 -@@ -77,32 +77,29 @@ extern unsigned int _gcry_aes_aesni_encr - extern unsigned int _gcry_aes_aesni_decrypt (const RIJNDAEL_context *ctx, - unsigned char *dst, - const unsigned char *src); --extern void _gcry_aes_aesni_cfb_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_aesni_cbc_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks, -- int cbc_mac); --extern void _gcry_aes_aesni_ctr_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *ctr, size_t nblocks); --extern void _gcry_aes_aesni_cfb_dec (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_aesni_cbc_dec (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_aesni_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, -- const void *inbuf_arg, size_t nblocks, -- int encrypt); --extern void _gcry_aes_aesni_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, -- size_t nblocks); -+extern void _gcry_aes_aesni_cfb_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_aesni_cbc_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, int cbc_mac); -+extern void _gcry_aes_aesni_ctr_enc (void *context, unsigned char *ctr, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_aesni_cfb_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_aesni_cbc_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern size_t _gcry_aes_aesni_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, -+ const void *inbuf_arg, size_t nblocks, -+ int encrypt); -+extern size_t _gcry_aes_aesni_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_aesni_xts_crypt (void *context, unsigned char *tweak, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, int encrypt); - #endif - - #ifdef USE_SSSE3 -@@ -116,32 +113,27 @@ extern unsigned int _gcry_aes_ssse3_encr - extern unsigned int _gcry_aes_ssse3_decrypt (const RIJNDAEL_context *ctx, - unsigned char *dst, - const unsigned char *src); --extern void _gcry_aes_ssse3_cfb_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_ssse3_cbc_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks, -+extern void _gcry_aes_ssse3_cfb_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ssse3_cbc_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, - int cbc_mac); --extern void _gcry_aes_ssse3_ctr_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *ctr, size_t nblocks); --extern void _gcry_aes_ssse3_cfb_dec (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_ssse3_cbc_dec (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_ssse3_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, -- const void *inbuf_arg, size_t nblocks, -- int encrypt); --extern void _gcry_aes_ssse3_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, -- size_t nblocks); -+extern void _gcry_aes_ssse3_ctr_enc (void *context, unsigned char *ctr, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ssse3_cfb_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ssse3_cbc_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern size_t _gcry_aes_ssse3_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, -+ const void *inbuf_arg, size_t nblocks, -+ int encrypt); -+extern size_t _gcry_aes_ssse3_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, -+ size_t nblocks); - #endif - - #ifdef USE_PADLOCK -@@ -180,34 +172,110 @@ extern unsigned int _gcry_aes_armv8_ce_d - unsigned char *dst, - const unsigned char *src); - --extern void _gcry_aes_armv8_ce_cfb_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_armv8_ce_cbc_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks, -+extern void _gcry_aes_armv8_ce_cfb_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_armv8_ce_cbc_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, - int cbc_mac); --extern void _gcry_aes_armv8_ce_ctr_enc (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *ctr, size_t nblocks); --extern void _gcry_aes_armv8_ce_cfb_dec (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_armv8_ce_cbc_dec (RIJNDAEL_context *ctx, -- unsigned char *outbuf, -- const unsigned char *inbuf, -- unsigned char *iv, size_t nblocks); --extern void _gcry_aes_armv8_ce_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, -- const void *inbuf_arg, size_t nblocks, -- int encrypt); --extern void _gcry_aes_armv8_ce_ocb_auth (gcry_cipher_hd_t c, -- const void *abuf_arg, size_t nblocks); -+extern void _gcry_aes_armv8_ce_ctr_enc (void *context, unsigned char *ctr, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_armv8_ce_cfb_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_armv8_ce_cbc_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern size_t _gcry_aes_armv8_ce_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, -+ const void *inbuf_arg, size_t nblocks, -+ int encrypt); -+extern size_t _gcry_aes_armv8_ce_ocb_auth (gcry_cipher_hd_t c, -+ const void *abuf_arg, size_t nblocks); -+extern void _gcry_aes_armv8_ce_xts_crypt (void *context, unsigned char *tweak, -+ void *outbuf_arg, -+ const void *inbuf_arg, -+ size_t nblocks, int encrypt); - #endif /*USE_ARM_ASM*/ - -+#ifdef USE_PPC_CRYPTO -+/* PowerPC Crypto implementations of AES */ -+extern void _gcry_aes_ppc8_setkey(RIJNDAEL_context *ctx, const byte *key); -+extern void _gcry_aes_ppc8_prepare_decryption(RIJNDAEL_context *ctx); -+ -+extern unsigned int _gcry_aes_ppc8_encrypt(const RIJNDAEL_context *ctx, -+ unsigned char *dst, -+ const unsigned char *src); -+extern unsigned int _gcry_aes_ppc8_decrypt(const RIJNDAEL_context *ctx, -+ unsigned char *dst, -+ const unsigned char *src); -+ -+extern void _gcry_aes_ppc8_cfb_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ppc8_cbc_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, int cbc_mac); -+extern void _gcry_aes_ppc8_ctr_enc (void *context, unsigned char *ctr, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ppc8_cfb_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ppc8_cbc_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+ -+extern size_t _gcry_aes_ppc8_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, -+ const void *inbuf_arg, size_t nblocks, -+ int encrypt); -+extern size_t _gcry_aes_ppc8_ocb_auth (gcry_cipher_hd_t c, -+ const void *abuf_arg, size_t nblocks); -+ -+extern void _gcry_aes_ppc8_xts_crypt (void *context, unsigned char *tweak, -+ void *outbuf_arg, -+ const void *inbuf_arg, -+ size_t nblocks, int encrypt); -+#endif /*USE_PPC_CRYPTO*/ -+ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+/* Power9 little-endian crypto implementations of AES */ -+extern unsigned int _gcry_aes_ppc9le_encrypt(const RIJNDAEL_context *ctx, -+ unsigned char *dst, -+ const unsigned char *src); -+extern unsigned int _gcry_aes_ppc9le_decrypt(const RIJNDAEL_context *ctx, -+ unsigned char *dst, -+ const unsigned char *src); -+ -+extern void _gcry_aes_ppc9le_cfb_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ppc9le_cbc_enc (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, int cbc_mac); -+extern void _gcry_aes_ppc9le_ctr_enc (void *context, unsigned char *ctr, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ppc9le_cfb_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+extern void _gcry_aes_ppc9le_cbc_dec (void *context, unsigned char *iv, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks); -+ -+extern size_t _gcry_aes_ppc9le_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, -+ const void *inbuf_arg, size_t nblocks, -+ int encrypt); -+extern size_t _gcry_aes_ppc9le_ocb_auth (gcry_cipher_hd_t c, -+ const void *abuf_arg, size_t nblocks); -+ -+extern void _gcry_aes_ppc9le_xts_crypt (void *context, unsigned char *tweak, -+ void *outbuf_arg, -+ const void *inbuf_arg, -+ size_t nblocks, int encrypt); -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+ - static unsigned int do_encrypt (const RIJNDAEL_context *ctx, unsigned char *bx, - const unsigned char *ax); - static unsigned int do_decrypt (const RIJNDAEL_context *ctx, unsigned char *bx, -@@ -260,7 +328,8 @@ static void prefetch_dec(void) - - /* Perform the key setup. */ - static gcry_err_code_t --do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen) -+do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen, -+ gcry_cipher_hd_t hd) - { - static int initialized = 0; - static const char *selftest_failed = 0; -@@ -268,7 +337,7 @@ do_setkey (RIJNDAEL_context *ctx, const - int i,j, r, t, rconpointer = 0; - int KC; - #if defined(USE_AESNI) || defined(USE_PADLOCK) || defined(USE_SSSE3) \ -- || defined(USE_ARM_CE) -+ || defined(USE_ARM_CE) || defined(USE_PPC_CRYPTO) - unsigned int hwfeatures; - #endif - -@@ -310,7 +379,7 @@ do_setkey (RIJNDAEL_context *ctx, const - ctx->rounds = rounds; - - #if defined(USE_AESNI) || defined(USE_PADLOCK) || defined(USE_SSSE3) \ -- || defined(USE_ARM_CE) -+ || defined(USE_ARM_CE) || defined(USE_PPC_CRYPTO) - hwfeatures = _gcry_get_hw_features (); - #endif - -@@ -327,6 +396,12 @@ do_setkey (RIJNDAEL_context *ctx, const - #ifdef USE_ARM_CE - ctx->use_arm_ce = 0; - #endif -+#ifdef USE_PPC_CRYPTO -+ ctx->use_ppc_crypto = 0; -+#endif -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ ctx->use_ppc9le_crypto = 0; -+#endif - - if (0) - { -@@ -340,6 +415,17 @@ do_setkey (RIJNDAEL_context *ctx, const - ctx->prefetch_enc_fn = NULL; - ctx->prefetch_dec_fn = NULL; - ctx->use_aesni = 1; -+ if (hd) -+ { -+ hd->bulk.cfb_enc = _gcry_aes_aesni_cfb_enc; -+ hd->bulk.cfb_dec = _gcry_aes_aesni_cfb_dec; -+ hd->bulk.cbc_enc = _gcry_aes_aesni_cbc_enc; -+ hd->bulk.cbc_dec = _gcry_aes_aesni_cbc_dec; -+ hd->bulk.ctr_enc = _gcry_aes_aesni_ctr_enc; -+ hd->bulk.ocb_crypt = _gcry_aes_aesni_ocb_crypt; -+ hd->bulk.ocb_auth = _gcry_aes_aesni_ocb_auth; -+ hd->bulk.xts_crypt = _gcry_aes_aesni_xts_crypt; -+ } - } - #endif - #ifdef USE_PADLOCK -@@ -361,6 +447,16 @@ do_setkey (RIJNDAEL_context *ctx, const - ctx->prefetch_enc_fn = NULL; - ctx->prefetch_dec_fn = NULL; - ctx->use_ssse3 = 1; -+ if (hd) -+ { -+ hd->bulk.cfb_enc = _gcry_aes_ssse3_cfb_enc; -+ hd->bulk.cfb_dec = _gcry_aes_ssse3_cfb_dec; -+ hd->bulk.cbc_enc = _gcry_aes_ssse3_cbc_enc; -+ hd->bulk.cbc_dec = _gcry_aes_ssse3_cbc_dec; -+ hd->bulk.ctr_enc = _gcry_aes_ssse3_ctr_enc; -+ hd->bulk.ocb_crypt = _gcry_aes_ssse3_ocb_crypt; -+ hd->bulk.ocb_auth = _gcry_aes_ssse3_ocb_auth; -+ } - } - #endif - #ifdef USE_ARM_CE -@@ -371,6 +467,60 @@ do_setkey (RIJNDAEL_context *ctx, const - ctx->prefetch_enc_fn = NULL; - ctx->prefetch_dec_fn = NULL; - ctx->use_arm_ce = 1; -+ if (hd) -+ { -+ hd->bulk.cfb_enc = _gcry_aes_armv8_ce_cfb_enc; -+ hd->bulk.cfb_dec = _gcry_aes_armv8_ce_cfb_dec; -+ hd->bulk.cbc_enc = _gcry_aes_armv8_ce_cbc_enc; -+ hd->bulk.cbc_dec = _gcry_aes_armv8_ce_cbc_dec; -+ hd->bulk.ctr_enc = _gcry_aes_armv8_ce_ctr_enc; -+ hd->bulk.ocb_crypt = _gcry_aes_armv8_ce_ocb_crypt; -+ hd->bulk.ocb_auth = _gcry_aes_armv8_ce_ocb_auth; -+ hd->bulk.xts_crypt = _gcry_aes_armv8_ce_xts_crypt; -+ } -+ } -+#endif -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if ((hwfeatures & HWF_PPC_VCRYPTO) && (hwfeatures & HWF_PPC_ARCH_3_00)) -+ { -+ ctx->encrypt_fn = _gcry_aes_ppc9le_encrypt; -+ ctx->decrypt_fn = _gcry_aes_ppc9le_decrypt; -+ ctx->prefetch_enc_fn = NULL; -+ ctx->prefetch_dec_fn = NULL; -+ ctx->use_ppc_crypto = 1; /* same key-setup as USE_PPC_CRYPTO */ -+ ctx->use_ppc9le_crypto = 1; -+ if (hd) -+ { -+ hd->bulk.cfb_enc = _gcry_aes_ppc9le_cfb_enc; -+ hd->bulk.cfb_dec = _gcry_aes_ppc9le_cfb_dec; -+ hd->bulk.cbc_enc = _gcry_aes_ppc9le_cbc_enc; -+ hd->bulk.cbc_dec = _gcry_aes_ppc9le_cbc_dec; -+ hd->bulk.ctr_enc = _gcry_aes_ppc9le_ctr_enc; -+ hd->bulk.ocb_crypt = _gcry_aes_ppc9le_ocb_crypt; -+ hd->bulk.ocb_auth = _gcry_aes_ppc9le_ocb_auth; -+ hd->bulk.xts_crypt = _gcry_aes_ppc9le_xts_crypt; -+ } -+ } -+#endif -+#ifdef USE_PPC_CRYPTO -+ else if (hwfeatures & HWF_PPC_VCRYPTO) -+ { -+ ctx->encrypt_fn = _gcry_aes_ppc8_encrypt; -+ ctx->decrypt_fn = _gcry_aes_ppc8_decrypt; -+ ctx->prefetch_enc_fn = NULL; -+ ctx->prefetch_dec_fn = NULL; -+ ctx->use_ppc_crypto = 1; -+ if (hd) -+ { -+ hd->bulk.cfb_enc = _gcry_aes_ppc8_cfb_enc; -+ hd->bulk.cfb_dec = _gcry_aes_ppc8_cfb_dec; -+ hd->bulk.cbc_enc = _gcry_aes_ppc8_cbc_enc; -+ hd->bulk.cbc_dec = _gcry_aes_ppc8_cbc_dec; -+ hd->bulk.ctr_enc = _gcry_aes_ppc8_ctr_enc; -+ hd->bulk.ocb_crypt = _gcry_aes_ppc8_ocb_crypt; -+ hd->bulk.ocb_auth = _gcry_aes_ppc8_ocb_auth; -+ hd->bulk.xts_crypt = _gcry_aes_ppc8_xts_crypt; -+ } - } - #endif - else -@@ -399,6 +549,10 @@ do_setkey (RIJNDAEL_context *ctx, const - else if (ctx->use_arm_ce) - _gcry_aes_armv8_ce_setkey (ctx, key); - #endif -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ _gcry_aes_ppc8_setkey (ctx, key); -+#endif - else - { - const byte *sbox = ((const byte *)encT) + 1; -@@ -503,10 +657,11 @@ do_setkey (RIJNDAEL_context *ctx, const - - - static gcry_err_code_t --rijndael_setkey (void *context, const byte *key, const unsigned keylen) -+rijndael_setkey (void *context, const byte *key, const unsigned keylen, -+ gcry_cipher_hd_t hd) - { - RIJNDAEL_context *ctx = context; -- return do_setkey (ctx, key, keylen); -+ return do_setkey (ctx, key, keylen, hd); - } - - -@@ -535,7 +690,19 @@ prepare_decryption( RIJNDAEL_context *ct - { - _gcry_aes_armv8_ce_prepare_decryption (ctx); - } --#endif /*USE_SSSE3*/ -+#endif /*USE_ARM_CE*/ -+#ifdef USE_ARM_CE -+ else if (ctx->use_arm_ce) -+ { -+ _gcry_aes_armv8_ce_prepare_decryption (ctx); -+ } -+#endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ _gcry_aes_ppc8_prepare_decryption (ctx); -+ } -+#endif - #ifdef USE_PADLOCK - else if (ctx->use_padlock) - { -@@ -790,42 +957,56 @@ _gcry_aes_cfb_enc (void *context, unsign - const unsigned char *inbuf = inbuf_arg; - unsigned int burn_depth = 0; - -- if (ctx->prefetch_enc_fn) -- ctx->prefetch_enc_fn(); -- - if (0) - ; - #ifdef USE_AESNI - else if (ctx->use_aesni) - { -- _gcry_aes_aesni_cfb_enc (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_aesni_cfb_enc (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_AESNI*/ - #ifdef USE_SSSE3 - else if (ctx->use_ssse3) - { -- _gcry_aes_ssse3_cfb_enc (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_ssse3_cfb_enc (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_SSSE3*/ - #ifdef USE_ARM_CE - else if (ctx->use_arm_ce) - { -- _gcry_aes_armv8_ce_cfb_enc (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_armv8_ce_cfb_enc (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if (ctx->use_ppc9le_crypto) -+ { -+ _gcry_aes_ppc9le_cfb_enc (ctx, iv, outbuf, inbuf, nblocks); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ _gcry_aes_ppc8_cfb_enc (ctx, iv, outbuf, inbuf, nblocks); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO*/ - else - { - rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; - -+ if (ctx->prefetch_enc_fn) -+ ctx->prefetch_enc_fn(); -+ - for ( ;nblocks; nblocks-- ) - { - /* Encrypt the IV. */ - burn_depth = encrypt_fn (ctx, iv, iv); - /* XOR the input with the IV and store input into IV. */ -- buf_xor_2dst(outbuf, iv, inbuf, BLOCKSIZE); -+ cipher_block_xor_2dst(outbuf, iv, inbuf, BLOCKSIZE); - outbuf += BLOCKSIZE; - inbuf += BLOCKSIZE; - } -@@ -851,41 +1032,55 @@ _gcry_aes_cbc_enc (void *context, unsign - unsigned char *last_iv; - unsigned int burn_depth = 0; - -- if (ctx->prefetch_enc_fn) -- ctx->prefetch_enc_fn(); -- - if (0) - ; - #ifdef USE_AESNI - else if (ctx->use_aesni) - { -- _gcry_aes_aesni_cbc_enc (ctx, outbuf, inbuf, iv, nblocks, cbc_mac); -- burn_depth = 0; -+ _gcry_aes_aesni_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); -+ return; - } - #endif /*USE_AESNI*/ - #ifdef USE_SSSE3 - else if (ctx->use_ssse3) - { -- _gcry_aes_ssse3_cbc_enc (ctx, outbuf, inbuf, iv, nblocks, cbc_mac); -- burn_depth = 0; -+ _gcry_aes_ssse3_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); -+ return; - } - #endif /*USE_SSSE3*/ - #ifdef USE_ARM_CE - else if (ctx->use_arm_ce) - { -- _gcry_aes_armv8_ce_cbc_enc (ctx, outbuf, inbuf, iv, nblocks, cbc_mac); -- burn_depth = 0; -+ _gcry_aes_armv8_ce_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); -+ return; - } - #endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if (ctx->use_ppc9le_crypto) -+ { -+ _gcry_aes_ppc9le_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ _gcry_aes_ppc8_cbc_enc (ctx, iv, outbuf, inbuf, nblocks, cbc_mac); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO*/ - else - { - rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; - -+ if (ctx->prefetch_enc_fn) -+ ctx->prefetch_enc_fn(); -+ - last_iv = iv; - - for ( ;nblocks; nblocks-- ) - { -- buf_xor(outbuf, inbuf, last_iv, BLOCKSIZE); -+ cipher_block_xor(outbuf, inbuf, last_iv, BLOCKSIZE); - - burn_depth = encrypt_fn (ctx, outbuf, outbuf); - -@@ -896,7 +1091,7 @@ _gcry_aes_cbc_enc (void *context, unsign - } - - if (last_iv != iv) -- buf_cpy (iv, last_iv, BLOCKSIZE); -+ cipher_block_cpy (iv, last_iv, BLOCKSIZE); - } - - if (burn_depth) -@@ -920,43 +1115,57 @@ _gcry_aes_ctr_enc (void *context, unsign - unsigned int burn_depth = 0; - int i; - -- if (ctx->prefetch_enc_fn) -- ctx->prefetch_enc_fn(); -- - if (0) - ; - #ifdef USE_AESNI - else if (ctx->use_aesni) - { -- _gcry_aes_aesni_ctr_enc (ctx, outbuf, inbuf, ctr, nblocks); -- burn_depth = 0; -+ _gcry_aes_aesni_ctr_enc (ctx, ctr, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_AESNI*/ - #ifdef USE_SSSE3 - else if (ctx->use_ssse3) - { -- _gcry_aes_ssse3_ctr_enc (ctx, outbuf, inbuf, ctr, nblocks); -- burn_depth = 0; -+ _gcry_aes_ssse3_ctr_enc (ctx, ctr, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_SSSE3*/ - #ifdef USE_ARM_CE - else if (ctx->use_arm_ce) - { -- _gcry_aes_armv8_ce_ctr_enc (ctx, outbuf, inbuf, ctr, nblocks); -- burn_depth = 0; -+ _gcry_aes_armv8_ce_ctr_enc (ctx, ctr, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if (ctx->use_ppc9le_crypto) -+ { -+ _gcry_aes_ppc9le_ctr_enc (ctx, ctr, outbuf, inbuf, nblocks); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ _gcry_aes_ppc8_ctr_enc (ctx, ctr, outbuf, inbuf, nblocks); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO*/ - else - { - union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } tmp; - rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; - -+ if (ctx->prefetch_enc_fn) -+ ctx->prefetch_enc_fn(); -+ - for ( ;nblocks; nblocks-- ) - { - /* Encrypt the counter. */ - burn_depth = encrypt_fn (ctx, tmp.x1, ctr); - /* XOR the input with the encrypted counter and store in output. */ -- buf_xor(outbuf, tmp.x1, inbuf, BLOCKSIZE); -+ cipher_block_xor(outbuf, tmp.x1, inbuf, BLOCKSIZE); - outbuf += BLOCKSIZE; - inbuf += BLOCKSIZE; - /* Increment the counter. */ -@@ -1187,40 +1396,54 @@ _gcry_aes_cfb_dec (void *context, unsign - const unsigned char *inbuf = inbuf_arg; - unsigned int burn_depth = 0; - -- if (ctx->prefetch_enc_fn) -- ctx->prefetch_enc_fn(); -- - if (0) - ; - #ifdef USE_AESNI - else if (ctx->use_aesni) - { -- _gcry_aes_aesni_cfb_dec (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_aesni_cfb_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_AESNI*/ - #ifdef USE_SSSE3 - else if (ctx->use_ssse3) - { -- _gcry_aes_ssse3_cfb_dec (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_ssse3_cfb_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_SSSE3*/ - #ifdef USE_ARM_CE - else if (ctx->use_arm_ce) - { -- _gcry_aes_armv8_ce_cfb_dec (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_armv8_ce_cfb_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if (ctx->use_ppc9le_crypto) -+ { -+ _gcry_aes_ppc9le_cfb_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ _gcry_aes_ppc8_cfb_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO*/ - else - { - rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; - -+ if (ctx->prefetch_enc_fn) -+ ctx->prefetch_enc_fn(); -+ - for ( ;nblocks; nblocks-- ) - { - burn_depth = encrypt_fn (ctx, iv, iv); -- buf_xor_n_copy(outbuf, iv, inbuf, BLOCKSIZE); -+ cipher_block_xor_n_copy(outbuf, iv, inbuf, BLOCKSIZE); - outbuf += BLOCKSIZE; - inbuf += BLOCKSIZE; - } -@@ -1245,39 +1468,53 @@ _gcry_aes_cbc_dec (void *context, unsign - const unsigned char *inbuf = inbuf_arg; - unsigned int burn_depth = 0; - -- check_decryption_preparation (ctx); -- -- if (ctx->prefetch_dec_fn) -- ctx->prefetch_dec_fn(); -- - if (0) - ; - #ifdef USE_AESNI - else if (ctx->use_aesni) - { -- _gcry_aes_aesni_cbc_dec (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_aesni_cbc_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_AESNI*/ - #ifdef USE_SSSE3 - else if (ctx->use_ssse3) - { -- _gcry_aes_ssse3_cbc_dec (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_ssse3_cbc_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_SSSE3*/ - #ifdef USE_ARM_CE - else if (ctx->use_arm_ce) - { -- _gcry_aes_armv8_ce_cbc_dec (ctx, outbuf, inbuf, iv, nblocks); -- burn_depth = 0; -+ _gcry_aes_armv8_ce_cbc_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; - } - #endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if (ctx->use_ppc9le_crypto) -+ { -+ _gcry_aes_ppc9le_cbc_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ _gcry_aes_ppc8_cbc_dec (ctx, iv, outbuf, inbuf, nblocks); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO*/ - else - { - unsigned char savebuf[BLOCKSIZE] ATTR_ALIGNED_16; - rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn; - -+ check_decryption_preparation (ctx); -+ -+ if (ctx->prefetch_dec_fn) -+ ctx->prefetch_dec_fn(); -+ - for ( ;nblocks; nblocks-- ) - { - /* INBUF is needed later and it may be identical to OUTBUF, so store -@@ -1285,7 +1522,7 @@ _gcry_aes_cbc_dec (void *context, unsign - - burn_depth = decrypt_fn (ctx, savebuf, inbuf); - -- buf_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOCKSIZE); -+ cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, BLOCKSIZE); - inbuf += BLOCKSIZE; - outbuf += BLOCKSIZE; - } -@@ -1309,62 +1546,61 @@ _gcry_aes_ocb_crypt (gcry_cipher_hd_t c, - const unsigned char *inbuf = inbuf_arg; - unsigned int burn_depth = 0; - -- if (encrypt) -- { -- if (ctx->prefetch_enc_fn) -- ctx->prefetch_enc_fn(); -- } -- else -- { -- check_decryption_preparation (ctx); -- -- if (ctx->prefetch_dec_fn) -- ctx->prefetch_dec_fn(); -- } -- - if (0) - ; - #ifdef USE_AESNI - else if (ctx->use_aesni) - { -- _gcry_aes_aesni_ocb_crypt (c, outbuf, inbuf, nblocks, encrypt); -- burn_depth = 0; -+ return _gcry_aes_aesni_ocb_crypt (c, outbuf, inbuf, nblocks, encrypt); - } - #endif /*USE_AESNI*/ - #ifdef USE_SSSE3 - else if (ctx->use_ssse3) - { -- _gcry_aes_ssse3_ocb_crypt (c, outbuf, inbuf, nblocks, encrypt); -- burn_depth = 0; -+ return _gcry_aes_ssse3_ocb_crypt (c, outbuf, inbuf, nblocks, encrypt); - } - #endif /*USE_SSSE3*/ - #ifdef USE_ARM_CE - else if (ctx->use_arm_ce) - { -- _gcry_aes_armv8_ce_ocb_crypt (c, outbuf, inbuf, nblocks, encrypt); -- burn_depth = 0; -+ return _gcry_aes_armv8_ce_ocb_crypt (c, outbuf, inbuf, nblocks, encrypt); - } - #endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if (ctx->use_ppc9le_crypto) -+ { -+ return _gcry_aes_ppc9le_ocb_crypt (c, outbuf, inbuf, nblocks, encrypt); -+ } -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ return _gcry_aes_ppc8_ocb_crypt (c, outbuf, inbuf, nblocks, encrypt); -+ } -+#endif /*USE_PPC_CRYPTO*/ - else if (encrypt) - { - union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp; - rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; - -+ if (ctx->prefetch_enc_fn) -+ ctx->prefetch_enc_fn(); -+ - for ( ;nblocks; nblocks-- ) - { - u64 i = ++c->u_mode.ocb.data_nblocks; - const unsigned char *l = ocb_get_l(c, i); - - /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -- buf_xor_1 (c->u_iv.iv, l, BLOCKSIZE); -- buf_cpy (l_tmp.x1, inbuf, BLOCKSIZE); -+ cipher_block_xor_1 (c->u_iv.iv, l, BLOCKSIZE); -+ cipher_block_cpy (l_tmp.x1, inbuf, BLOCKSIZE); - /* Checksum_i = Checksum_{i-1} xor P_i */ -- buf_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE); -+ cipher_block_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE); - /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */ -- buf_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); -+ cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); - burn_depth = encrypt_fn (ctx, l_tmp.x1, l_tmp.x1); -- buf_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); -- buf_cpy (outbuf, l_tmp.x1, BLOCKSIZE); -+ cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); -+ cipher_block_cpy (outbuf, l_tmp.x1, BLOCKSIZE); - - inbuf += BLOCKSIZE; - outbuf += BLOCKSIZE; -@@ -1375,21 +1611,26 @@ _gcry_aes_ocb_crypt (gcry_cipher_hd_t c, - union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp; - rijndael_cryptfn_t decrypt_fn = ctx->decrypt_fn; - -+ check_decryption_preparation (ctx); -+ -+ if (ctx->prefetch_dec_fn) -+ ctx->prefetch_dec_fn(); -+ - for ( ;nblocks; nblocks-- ) - { - u64 i = ++c->u_mode.ocb.data_nblocks; - const unsigned char *l = ocb_get_l(c, i); - - /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -- buf_xor_1 (c->u_iv.iv, l, BLOCKSIZE); -- buf_cpy (l_tmp.x1, inbuf, BLOCKSIZE); -+ cipher_block_xor_1 (c->u_iv.iv, l, BLOCKSIZE); -+ cipher_block_cpy (l_tmp.x1, inbuf, BLOCKSIZE); - /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */ -- buf_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); -+ cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); - burn_depth = decrypt_fn (ctx, l_tmp.x1, l_tmp.x1); -- buf_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); -+ cipher_block_xor_1 (l_tmp.x1, c->u_iv.iv, BLOCKSIZE); - /* Checksum_i = Checksum_{i-1} xor P_i */ -- buf_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE); -- buf_cpy (outbuf, l_tmp.x1, BLOCKSIZE); -+ cipher_block_xor_1 (c->u_ctr.ctr, l_tmp.x1, BLOCKSIZE); -+ cipher_block_cpy (outbuf, l_tmp.x1, BLOCKSIZE); - - inbuf += BLOCKSIZE; - outbuf += BLOCKSIZE; -@@ -1411,48 +1652,58 @@ _gcry_aes_ocb_auth (gcry_cipher_hd_t c, - const unsigned char *abuf = abuf_arg; - unsigned int burn_depth = 0; - -- if (ctx->prefetch_enc_fn) -- ctx->prefetch_enc_fn(); -- - if (0) - ; - #ifdef USE_AESNI - else if (ctx->use_aesni) - { -- _gcry_aes_aesni_ocb_auth (c, abuf, nblocks); -- burn_depth = 0; -+ return _gcry_aes_aesni_ocb_auth (c, abuf, nblocks); - } - #endif /*USE_AESNI*/ - #ifdef USE_SSSE3 - else if (ctx->use_ssse3) - { -- _gcry_aes_ssse3_ocb_auth (c, abuf, nblocks); -- burn_depth = 0; -+ return _gcry_aes_ssse3_ocb_auth (c, abuf, nblocks); - } - #endif /*USE_SSSE3*/ - #ifdef USE_ARM_CE - else if (ctx->use_arm_ce) - { -- _gcry_aes_armv8_ce_ocb_auth (c, abuf, nblocks); -- burn_depth = 0; -+ return _gcry_aes_armv8_ce_ocb_auth (c, abuf, nblocks); - } - #endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if (ctx->use_ppc9le_crypto) -+ { -+ return _gcry_aes_ppc9le_ocb_auth (c, abuf, nblocks); -+ } -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ return _gcry_aes_ppc8_ocb_auth (c, abuf, nblocks); -+ } -+#endif /*USE_PPC_CRYPTO*/ - else - { - union { unsigned char x1[16] ATTR_ALIGNED_16; u32 x32[4]; } l_tmp; - rijndael_cryptfn_t encrypt_fn = ctx->encrypt_fn; - -+ if (ctx->prefetch_enc_fn) -+ ctx->prefetch_enc_fn(); -+ - for ( ;nblocks; nblocks-- ) - { - u64 i = ++c->u_mode.ocb.aad_nblocks; - const unsigned char *l = ocb_get_l(c, i); - - /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -- buf_xor_1 (c->u_mode.ocb.aad_offset, l, BLOCKSIZE); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_offset, l, BLOCKSIZE); - /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i) */ -- buf_xor (l_tmp.x1, c->u_mode.ocb.aad_offset, abuf, BLOCKSIZE); -+ cipher_block_xor (l_tmp.x1, c->u_mode.ocb.aad_offset, abuf, -+ BLOCKSIZE); - burn_depth = encrypt_fn (ctx, l_tmp.x1, l_tmp.x1); -- buf_xor_1 (c->u_mode.ocb.aad_sum, l_tmp.x1, BLOCKSIZE); -+ cipher_block_xor_1 (c->u_mode.ocb.aad_sum, l_tmp.x1, BLOCKSIZE); - - abuf += BLOCKSIZE; - } -@@ -1467,6 +1718,106 @@ _gcry_aes_ocb_auth (gcry_cipher_hd_t c, - } - - -+/* Bulk encryption/decryption of complete blocks in XTS mode. */ -+void -+_gcry_aes_xts_crypt (void *context, unsigned char *tweak, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, int encrypt) -+{ -+ RIJNDAEL_context *ctx = context; -+ unsigned char *outbuf = outbuf_arg; -+ const unsigned char *inbuf = inbuf_arg; -+ unsigned int burn_depth = 0; -+ rijndael_cryptfn_t crypt_fn; -+ u64 tweak_lo, tweak_hi, tweak_next_lo, tweak_next_hi, tmp_lo, tmp_hi, carry; -+ -+ if (0) -+ ; -+#ifdef USE_AESNI -+ else if (ctx->use_aesni) -+ { -+ _gcry_aes_aesni_xts_crypt (ctx, tweak, outbuf, inbuf, nblocks, encrypt); -+ return; -+ } -+#endif /*USE_AESNI*/ -+#ifdef USE_ARM_CE -+ else if (ctx->use_arm_ce) -+ { -+ _gcry_aes_armv8_ce_xts_crypt (ctx, tweak, outbuf, inbuf, nblocks, encrypt); -+ return; -+ } -+#endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ else if (ctx->use_ppc9le_crypto) -+ { -+ _gcry_aes_ppc9le_xts_crypt (ctx, tweak, outbuf, inbuf, nblocks, encrypt); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO_WITH_PPC9LE*/ -+#ifdef USE_PPC_CRYPTO -+ else if (ctx->use_ppc_crypto) -+ { -+ _gcry_aes_ppc8_xts_crypt (ctx, tweak, outbuf, inbuf, nblocks, encrypt); -+ return; -+ } -+#endif /*USE_PPC_CRYPTO*/ -+ else -+ { -+ if (encrypt) -+ { -+ if (ctx->prefetch_enc_fn) -+ ctx->prefetch_enc_fn(); -+ -+ crypt_fn = ctx->encrypt_fn; -+ } -+ else -+ { -+ check_decryption_preparation (ctx); -+ -+ if (ctx->prefetch_dec_fn) -+ ctx->prefetch_dec_fn(); -+ -+ crypt_fn = ctx->decrypt_fn; -+ } -+ -+ tweak_next_lo = buf_get_le64 (tweak + 0); -+ tweak_next_hi = buf_get_le64 (tweak + 8); -+ -+ while (nblocks) -+ { -+ tweak_lo = tweak_next_lo; -+ tweak_hi = tweak_next_hi; -+ -+ /* Xor-Encrypt/Decrypt-Xor block. */ -+ tmp_lo = buf_get_le64 (inbuf + 0) ^ tweak_lo; -+ tmp_hi = buf_get_le64 (inbuf + 8) ^ tweak_hi; -+ -+ buf_put_le64 (outbuf + 0, tmp_lo); -+ buf_put_le64 (outbuf + 8, tmp_hi); -+ -+ /* Generate next tweak. */ -+ carry = -(tweak_next_hi >> 63) & 0x87; -+ tweak_next_hi = (tweak_next_hi << 1) + (tweak_next_lo >> 63); -+ tweak_next_lo = (tweak_next_lo << 1) ^ carry; -+ -+ burn_depth = crypt_fn (ctx, outbuf, outbuf); -+ -+ buf_put_le64 (outbuf + 0, buf_get_le64 (outbuf + 0) ^ tweak_lo); -+ buf_put_le64 (outbuf + 8, buf_get_le64 (outbuf + 8) ^ tweak_hi); -+ -+ outbuf += GCRY_XTS_BLOCK_LEN; -+ inbuf += GCRY_XTS_BLOCK_LEN; -+ nblocks--; -+ } -+ -+ buf_put_le64 (tweak + 0, tweak_next_lo); -+ buf_put_le64 (tweak + 8, tweak_next_hi); -+ } -+ -+ if (burn_depth) -+ _gcry_burn_stack (burn_depth + 5 * sizeof(void *)); -+} -+ - - /* Run the self-tests for AES 128. Returns NULL on success. */ - static const char* -@@ -1522,7 +1873,7 @@ selftest_basic_128 (void) - if (!ctx) - return "failed to allocate memory"; - -- rijndael_setkey (ctx, key_128, sizeof (key_128)); -+ rijndael_setkey (ctx, key_128, sizeof (key_128), NULL); - rijndael_encrypt (ctx, scratch, plaintext_128); - if (memcmp (scratch, ciphertext_128, sizeof (ciphertext_128))) - { -@@ -1565,7 +1916,7 @@ selftest_basic_192 (void) - ctx = _gcry_cipher_selftest_alloc_ctx (sizeof *ctx, &ctxmem); - if (!ctx) - return "failed to allocate memory"; -- rijndael_setkey (ctx, key_192, sizeof(key_192)); -+ rijndael_setkey (ctx, key_192, sizeof(key_192), NULL); - rijndael_encrypt (ctx, scratch, plaintext_192); - if (memcmp (scratch, ciphertext_192, sizeof (ciphertext_192))) - { -@@ -1610,7 +1961,7 @@ selftest_basic_256 (void) - ctx = _gcry_cipher_selftest_alloc_ctx (sizeof *ctx, &ctxmem); - if (!ctx) - return "failed to allocate memory"; -- rijndael_setkey (ctx, key_256, sizeof(key_256)); -+ rijndael_setkey (ctx, key_256, sizeof(key_256), NULL); - rijndael_encrypt (ctx, scratch, plaintext_256); - if (memcmp (scratch, ciphertext_256, sizeof (ciphertext_256))) - { -diff -up libgcrypt-1.8.5/cipher/rijndael-internal.h.aes-perf libgcrypt-1.8.5/cipher/rijndael-internal.h ---- libgcrypt-1.8.5/cipher/rijndael-internal.h.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/rijndael-internal.h 2020-04-22 18:29:41.676862114 +0200 -@@ -73,7 +73,7 @@ - # define USE_PADLOCK 1 - # endif - # endif --#endif /*ENABLE_PADLOCK_SUPPORT*/ -+#endif /* ENABLE_PADLOCK_SUPPORT */ - - /* USE_AESNI inidicates whether to compile with Intel AES-NI code. We - need the vector-size attribute which seems to be available since -@@ -102,6 +102,23 @@ - # endif - #endif /* ENABLE_ARM_CRYPTO_SUPPORT */ - -+/* USE_PPC_CRYPTO indicates whether to enable PowerPC vector crypto -+ * accelerated code. USE_PPC_CRYPTO_WITH_PPC9LE indicates whether to -+ * enable POWER9 optimized variant. */ -+#undef USE_PPC_CRYPTO -+#undef USE_PPC_CRYPTO_WITH_PPC9LE -+#ifdef ENABLE_PPC_CRYPTO_SUPPORT -+# if defined(HAVE_COMPATIBLE_CC_PPC_ALTIVEC) && \ -+ defined(HAVE_GCC_INLINE_ASM_PPC_ALTIVEC) -+# if __GNUC__ >= 4 -+# define USE_PPC_CRYPTO 1 -+# if !defined(WORDS_BIGENDIAN) && defined(HAVE_GCC_INLINE_ASM_PPC_ARCH_3_00) -+# define USE_PPC_CRYPTO_WITH_PPC9LE 1 -+# endif -+# endif -+# endif -+#endif /* ENABLE_PPC_CRYPTO_SUPPORT */ -+ - struct RIJNDAEL_context_s; - - typedef unsigned int (*rijndael_cryptfn_t)(const struct RIJNDAEL_context_s *ctx, -@@ -150,6 +167,12 @@ typedef struct RIJNDAEL_context_s - #ifdef USE_ARM_CE - unsigned int use_arm_ce:1; /* ARMv8 CE shall be used. */ - #endif /*USE_ARM_CE*/ -+#ifdef USE_PPC_CRYPTO -+ unsigned int use_ppc_crypto:1; /* PowerPC crypto shall be used. */ -+#endif /*USE_PPC_CRYPTO*/ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ unsigned int use_ppc9le_crypto:1; /* POWER9 LE crypto shall be used. */ -+#endif - rijndael_cryptfn_t encrypt_fn; - rijndael_cryptfn_t decrypt_fn; - rijndael_prefetchfn_t prefetch_enc_fn; -diff -up libgcrypt-1.8.5/cipher/rijndael-ppc9le.c.aes-perf libgcrypt-1.8.5/cipher/rijndael-ppc9le.c ---- libgcrypt-1.8.5/cipher/rijndael-ppc9le.c.aes-perf 2020-04-22 18:29:41.677862096 +0200 -+++ libgcrypt-1.8.5/cipher/rijndael-ppc9le.c 2020-04-22 18:29:41.677862096 +0200 -@@ -0,0 +1,102 @@ -+/* Rijndael (AES) for GnuPG - PowerPC Vector Crypto AES implementation -+ * Copyright (C) 2019 Shawn Landden -+ * Copyright (C) 2019-2020 Jussi Kivilinna -+ * -+ * This file is part of Libgcrypt. -+ * -+ * Libgcrypt is free software; you can redistribute it and/or modify -+ * it under the terms of the GNU Lesser General Public License as -+ * published by the Free Software Foundation; either version 2.1 of -+ * the License, or (at your option) any later version. -+ * -+ * Libgcrypt is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ * GNU Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with this program; if not, see . -+ * -+ * Alternatively, this code may be used in OpenSSL from The OpenSSL Project, -+ * and Cryptogams by Andy Polyakov, and if made part of a release of either -+ * or both projects, is thereafter dual-licensed under the license said project -+ * is released under. -+ */ -+ -+#include -+ -+#include "rijndael-internal.h" -+#include "cipher-internal.h" -+#include "bufhelp.h" -+ -+#ifdef USE_PPC_CRYPTO_WITH_PPC9LE -+ -+#include "rijndael-ppc-common.h" -+ -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_load_be_const(void) -+{ -+ static const block vec_dummy = { 0 }; -+ return vec_dummy; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_be_swap(block vec, block be_bswap_const) -+{ -+ (void)be_bswap_const; -+ return vec; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_load_be_noswap(unsigned long offset, const void *ptr) -+{ -+ block vec; -+#if __GNUC__ >= 4 -+ if (__builtin_constant_p (offset) && offset == 0) -+ __asm__ volatile ("lxvb16x %x0,0,%1\n\t" -+ : "=wa" (vec) -+ : "r" ((uintptr_t)ptr) -+ : "memory"); -+ else -+#endif -+ __asm__ volatile ("lxvb16x %x0,%1,%2\n\t" -+ : "=wa" (vec) -+ : "r" (offset), "r" ((uintptr_t)ptr) -+ : "memory", "r0"); -+ return vec; -+} -+ -+static ASM_FUNC_ATTR_INLINE void -+asm_store_be_noswap(block vec, unsigned long offset, void *ptr) -+{ -+#if __GNUC__ >= 4 -+ if (__builtin_constant_p (offset) && offset == 0) -+ __asm__ volatile ("stxvb16x %x0,0,%1\n\t" -+ : -+ : "wa" (vec), "r" ((uintptr_t)ptr) -+ : "memory"); -+ else -+#endif -+ __asm__ volatile ("stxvb16x %x0,%1,%2\n\t" -+ : -+ : "wa" (vec), "r" (offset), "r" ((uintptr_t)ptr) -+ : "memory", "r0"); -+} -+ -+ -+#define GCRY_AES_PPC9LE 1 -+#define ENCRYPT_BLOCK_FUNC _gcry_aes_ppc9le_encrypt -+#define DECRYPT_BLOCK_FUNC _gcry_aes_ppc9le_decrypt -+#define CFB_ENC_FUNC _gcry_aes_ppc9le_cfb_enc -+#define CFB_DEC_FUNC _gcry_aes_ppc9le_cfb_dec -+#define CBC_ENC_FUNC _gcry_aes_ppc9le_cbc_enc -+#define CBC_DEC_FUNC _gcry_aes_ppc9le_cbc_dec -+#define CTR_ENC_FUNC _gcry_aes_ppc9le_ctr_enc -+#define OCB_CRYPT_FUNC _gcry_aes_ppc9le_ocb_crypt -+#define OCB_AUTH_FUNC _gcry_aes_ppc9le_ocb_auth -+#define XTS_CRYPT_FUNC _gcry_aes_ppc9le_xts_crypt -+ -+#include -+ -+#endif /* USE_PPC_CRYPTO */ -diff -up libgcrypt-1.8.5/cipher/rijndael-ppc.c.aes-perf libgcrypt-1.8.5/cipher/rijndael-ppc.c ---- libgcrypt-1.8.5/cipher/rijndael-ppc.c.aes-perf 2020-04-22 18:29:41.677862096 +0200 -+++ libgcrypt-1.8.5/cipher/rijndael-ppc.c 2020-04-22 18:29:41.677862096 +0200 -@@ -0,0 +1,259 @@ -+/* Rijndael (AES) for GnuPG - PowerPC Vector Crypto AES implementation -+ * Copyright (C) 2019 Shawn Landden -+ * Copyright (C) 2019-2020 Jussi Kivilinna -+ * -+ * This file is part of Libgcrypt. -+ * -+ * Libgcrypt is free software; you can redistribute it and/or modify -+ * it under the terms of the GNU Lesser General Public License as -+ * published by the Free Software Foundation; either version 2.1 of -+ * the License, or (at your option) any later version. -+ * -+ * Libgcrypt is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ * GNU Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with this program; if not, see . -+ * -+ * Alternatively, this code may be used in OpenSSL from The OpenSSL Project, -+ * and Cryptogams by Andy Polyakov, and if made part of a release of either -+ * or both projects, is thereafter dual-licensed under the license said project -+ * is released under. -+ */ -+ -+#include -+ -+#include "rijndael-internal.h" -+#include "cipher-internal.h" -+#include "bufhelp.h" -+ -+#ifdef USE_PPC_CRYPTO -+ -+#include "rijndael-ppc-common.h" -+ -+ -+#ifdef WORDS_BIGENDIAN -+static const block vec_bswap32_const = -+ { 3, 2, 1, 0, 7, 6, 5, 4, 11, 10, 9, 8, 15, 14, 13, 12 }; -+#else -+static const block vec_bswap32_const_neg = -+ { ~3, ~2, ~1, ~0, ~7, ~6, ~5, ~4, ~11, ~10, ~9, ~8, ~15, ~14, ~13, ~12 }; -+#endif -+ -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_load_be_const(void) -+{ -+#ifndef WORDS_BIGENDIAN -+ return ALIGNED_LOAD (&vec_bswap32_const_neg, 0); -+#else -+ static const block vec_dummy = { 0 }; -+ return vec_dummy; -+#endif -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_be_swap(block vec, block be_bswap_const) -+{ -+ (void)be_bswap_const; -+#ifndef WORDS_BIGENDIAN -+ return asm_vperm1 (vec, be_bswap_const); -+#else -+ return vec; -+#endif -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_load_be_noswap(unsigned long offset, const void *ptr) -+{ -+ block vec; -+#if __GNUC__ >= 4 -+ if (__builtin_constant_p (offset) && offset == 0) -+ __asm__ volatile ("lxvw4x %x0,0,%1\n\t" -+ : "=wa" (vec) -+ : "r" ((uintptr_t)ptr) -+ : "memory"); -+ else -+#endif -+ __asm__ volatile ("lxvw4x %x0,%1,%2\n\t" -+ : "=wa" (vec) -+ : "r" (offset), "r" ((uintptr_t)ptr) -+ : "memory", "r0"); -+ /* NOTE: vec needs to be be-swapped using 'asm_be_swap' by caller */ -+ return vec; -+} -+ -+static ASM_FUNC_ATTR_INLINE void -+asm_store_be_noswap(block vec, unsigned long offset, void *ptr) -+{ -+ /* NOTE: vec be-swapped using 'asm_be_swap' by caller */ -+#if __GNUC__ >= 4 -+ if (__builtin_constant_p (offset) && offset == 0) -+ __asm__ volatile ("stxvw4x %x0,0,%1\n\t" -+ : -+ : "wa" (vec), "r" ((uintptr_t)ptr) -+ : "memory"); -+ else -+#endif -+ __asm__ volatile ("stxvw4x %x0,%1,%2\n\t" -+ : -+ : "wa" (vec), "r" (offset), "r" ((uintptr_t)ptr) -+ : "memory", "r0"); -+} -+ -+ -+static ASM_FUNC_ATTR_INLINE u32 -+_gcry_aes_sbox4_ppc8(u32 fourbytes) -+{ -+ union -+ { -+ PROPERLY_ALIGNED_TYPE dummy; -+ block data_vec; -+ u32 data32[4]; -+ } u; -+ -+ u.data32[0] = fourbytes; -+ u.data_vec = vec_sbox_be(u.data_vec); -+ return u.data32[0]; -+} -+ -+void -+_gcry_aes_ppc8_setkey (RIJNDAEL_context *ctx, const byte *key) -+{ -+ const block bige_const = asm_load_be_const(); -+ union -+ { -+ PROPERLY_ALIGNED_TYPE dummy; -+ byte data[MAXKC][4]; -+ u32 data32[MAXKC]; -+ } tkk[2]; -+ unsigned int rounds = ctx->rounds; -+ int KC = rounds - 6; -+ unsigned int keylen = KC * 4; -+ u128_t *ekey = (u128_t *)(void *)ctx->keyschenc; -+ unsigned int i, r, t; -+ byte rcon = 1; -+ int j; -+#define k tkk[0].data -+#define k_u32 tkk[0].data32 -+#define tk tkk[1].data -+#define tk_u32 tkk[1].data32 -+#define W (ctx->keyschenc) -+#define W_u32 (ctx->keyschenc32) -+ -+ for (i = 0; i < keylen; i++) -+ { -+ k[i >> 2][i & 3] = key[i]; -+ } -+ -+ for (j = KC-1; j >= 0; j--) -+ { -+ tk_u32[j] = k_u32[j]; -+ } -+ r = 0; -+ t = 0; -+ /* Copy values into round key array. */ -+ for (j = 0; (j < KC) && (r < rounds + 1); ) -+ { -+ for (; (j < KC) && (t < 4); j++, t++) -+ { -+ W_u32[r][t] = le_bswap32(tk_u32[j]); -+ } -+ if (t == 4) -+ { -+ r++; -+ t = 0; -+ } -+ } -+ while (r < rounds + 1) -+ { -+ tk_u32[0] ^= -+ le_bswap32( -+ _gcry_aes_sbox4_ppc8(rol(le_bswap32(tk_u32[KC - 1]), 24)) ^ rcon); -+ -+ if (KC != 8) -+ { -+ for (j = 1; j < KC; j++) -+ { -+ tk_u32[j] ^= tk_u32[j-1]; -+ } -+ } -+ else -+ { -+ for (j = 1; j < KC/2; j++) -+ { -+ tk_u32[j] ^= tk_u32[j-1]; -+ } -+ -+ tk_u32[KC/2] ^= -+ le_bswap32(_gcry_aes_sbox4_ppc8(le_bswap32(tk_u32[KC/2 - 1]))); -+ -+ for (j = KC/2 + 1; j < KC; j++) -+ { -+ tk_u32[j] ^= tk_u32[j-1]; -+ } -+ } -+ -+ /* Copy values into round key array. */ -+ for (j = 0; (j < KC) && (r < rounds + 1); ) -+ { -+ for (; (j < KC) && (t < 4); j++, t++) -+ { -+ W_u32[r][t] = le_bswap32(tk_u32[j]); -+ } -+ if (t == 4) -+ { -+ r++; -+ t = 0; -+ } -+ } -+ -+ rcon = (rcon << 1) ^ (-(rcon >> 7) & 0x1b); -+ } -+ -+ /* Store in big-endian order. */ -+ for (r = 0; r <= rounds; r++) -+ { -+#ifndef WORDS_BIGENDIAN -+ VEC_STORE_BE(ekey, r, ALIGNED_LOAD (ekey, r), bige_const); -+#else -+ block rvec = ALIGNED_LOAD (ekey, r); -+ ALIGNED_STORE (ekey, r, -+ vec_perm(rvec, rvec, vec_bswap32_const)); -+ (void)bige_const; -+#endif -+ } -+ -+#undef W -+#undef tk -+#undef k -+#undef W_u32 -+#undef tk_u32 -+#undef k_u32 -+ wipememory(&tkk, sizeof(tkk)); -+} -+ -+void -+_gcry_aes_ppc8_prepare_decryption (RIJNDAEL_context *ctx) -+{ -+ internal_aes_ppc_prepare_decryption (ctx); -+} -+ -+ -+#define GCRY_AES_PPC8 1 -+#define ENCRYPT_BLOCK_FUNC _gcry_aes_ppc8_encrypt -+#define DECRYPT_BLOCK_FUNC _gcry_aes_ppc8_decrypt -+#define CFB_ENC_FUNC _gcry_aes_ppc8_cfb_enc -+#define CFB_DEC_FUNC _gcry_aes_ppc8_cfb_dec -+#define CBC_ENC_FUNC _gcry_aes_ppc8_cbc_enc -+#define CBC_DEC_FUNC _gcry_aes_ppc8_cbc_dec -+#define CTR_ENC_FUNC _gcry_aes_ppc8_ctr_enc -+#define OCB_CRYPT_FUNC _gcry_aes_ppc8_ocb_crypt -+#define OCB_AUTH_FUNC _gcry_aes_ppc8_ocb_auth -+#define XTS_CRYPT_FUNC _gcry_aes_ppc8_xts_crypt -+ -+#include -+ -+#endif /* USE_PPC_CRYPTO */ -diff -up libgcrypt-1.8.5/cipher/rijndael-ppc-common.h.aes-perf libgcrypt-1.8.5/cipher/rijndael-ppc-common.h ---- libgcrypt-1.8.5/cipher/rijndael-ppc-common.h.aes-perf 2020-04-22 18:29:41.678862076 +0200 -+++ libgcrypt-1.8.5/cipher/rijndael-ppc-common.h 2020-04-22 18:29:41.678862076 +0200 -@@ -0,0 +1,342 @@ -+/* Rijndael (AES) for GnuPG - PowerPC Vector Crypto AES implementation -+ * Copyright (C) 2019 Shawn Landden -+ * Copyright (C) 2019-2020 Jussi Kivilinna -+ * -+ * This file is part of Libgcrypt. -+ * -+ * Libgcrypt is free software; you can redistribute it and/or modify -+ * it under the terms of the GNU Lesser General Public License as -+ * published by the Free Software Foundation; either version 2.1 of -+ * the License, or (at your option) any later version. -+ * -+ * Libgcrypt is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ * GNU Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with this program; if not, see . -+ * -+ * Alternatively, this code may be used in OpenSSL from The OpenSSL Project, -+ * and Cryptogams by Andy Polyakov, and if made part of a release of either -+ * or both projects, is thereafter dual-licensed under the license said project -+ * is released under. -+ */ -+ -+#ifndef G10_RIJNDAEL_PPC_COMMON_H -+#define G10_RIJNDAEL_PPC_COMMON_H -+ -+#include -+ -+ -+typedef vector unsigned char block; -+ -+typedef union -+{ -+ u32 data32[4]; -+} __attribute__((packed, aligned(1), may_alias)) u128_t; -+ -+ -+#define ALWAYS_INLINE inline __attribute__((always_inline)) -+#define NO_INLINE __attribute__((noinline)) -+#define NO_INSTRUMENT_FUNCTION __attribute__((no_instrument_function)) -+ -+#define ASM_FUNC_ATTR NO_INSTRUMENT_FUNCTION -+#define ASM_FUNC_ATTR_INLINE ASM_FUNC_ATTR ALWAYS_INLINE -+#define ASM_FUNC_ATTR_NOINLINE ASM_FUNC_ATTR NO_INLINE -+ -+ -+#define ALIGNED_LOAD(in_ptr, offs) \ -+ (asm_aligned_ld ((offs) * 16, (const void *)(in_ptr))) -+ -+#define ALIGNED_STORE(out_ptr, offs, vec) \ -+ (asm_aligned_st ((vec), (offs) * 16, (void *)(out_ptr))) -+ -+#define VEC_BE_SWAP(vec, bige_const) (asm_be_swap ((vec), (bige_const))) -+ -+#define VEC_LOAD_BE(in_ptr, offs, bige_const) \ -+ (asm_be_swap (asm_load_be_noswap ((offs) * 16, (const void *)(in_ptr)), \ -+ bige_const)) -+ -+#define VEC_LOAD_BE_NOSWAP(in_ptr, offs) \ -+ (asm_load_be_noswap ((offs) * 16, (const unsigned char *)(in_ptr))) -+ -+#define VEC_STORE_BE(out_ptr, offs, vec, bige_const) \ -+ (asm_store_be_noswap (asm_be_swap ((vec), (bige_const)), (offs) * 16, \ -+ (void *)(out_ptr))) -+ -+#define VEC_STORE_BE_NOSWAP(out_ptr, offs, vec) \ -+ (asm_store_be_noswap ((vec), (offs) * 16, (void *)(out_ptr))) -+ -+ -+#define ROUND_KEY_VARIABLES \ -+ block rkey0, rkeylast -+ -+#define PRELOAD_ROUND_KEYS(nrounds) \ -+ do { \ -+ rkey0 = ALIGNED_LOAD (rk, 0); \ -+ rkeylast = ALIGNED_LOAD (rk, nrounds); \ -+ } while (0) -+ -+#define AES_ENCRYPT(blk, nrounds) \ -+ do { \ -+ blk ^= rkey0; \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 1)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 2)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 3)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 4)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 5)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 6)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 7)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 8)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 9)); \ -+ if (nrounds >= 12) \ -+ { \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 10)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 11)); \ -+ if (rounds > 12) \ -+ { \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 12)); \ -+ blk = asm_cipher_be (blk, ALIGNED_LOAD (rk, 13)); \ -+ } \ -+ } \ -+ blk = asm_cipherlast_be (blk, rkeylast); \ -+ } while (0) -+ -+#define AES_DECRYPT(blk, nrounds) \ -+ do { \ -+ blk ^= rkey0; \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 1)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 2)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 3)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 4)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 5)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 6)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 7)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 8)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 9)); \ -+ if (nrounds >= 12) \ -+ { \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 10)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 11)); \ -+ if (rounds > 12) \ -+ { \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 12)); \ -+ blk = asm_ncipher_be (blk, ALIGNED_LOAD (rk, 13)); \ -+ } \ -+ } \ -+ blk = asm_ncipherlast_be (blk, rkeylast); \ -+ } while (0) -+ -+ -+#define ROUND_KEY_VARIABLES_ALL \ -+ block rkey0, rkey1, rkey2, rkey3, rkey4, rkey5, rkey6, rkey7, rkey8, \ -+ rkey9, rkey10, rkey11, rkey12, rkey13, rkeylast -+ -+#define PRELOAD_ROUND_KEYS_ALL(nrounds) \ -+ do { \ -+ rkey0 = ALIGNED_LOAD (rk, 0); \ -+ rkey1 = ALIGNED_LOAD (rk, 1); \ -+ rkey2 = ALIGNED_LOAD (rk, 2); \ -+ rkey3 = ALIGNED_LOAD (rk, 3); \ -+ rkey4 = ALIGNED_LOAD (rk, 4); \ -+ rkey5 = ALIGNED_LOAD (rk, 5); \ -+ rkey6 = ALIGNED_LOAD (rk, 6); \ -+ rkey7 = ALIGNED_LOAD (rk, 7); \ -+ rkey8 = ALIGNED_LOAD (rk, 8); \ -+ rkey9 = ALIGNED_LOAD (rk, 9); \ -+ if (nrounds >= 12) \ -+ { \ -+ rkey10 = ALIGNED_LOAD (rk, 10); \ -+ rkey11 = ALIGNED_LOAD (rk, 11); \ -+ if (rounds > 12) \ -+ { \ -+ rkey12 = ALIGNED_LOAD (rk, 12); \ -+ rkey13 = ALIGNED_LOAD (rk, 13); \ -+ } \ -+ } \ -+ rkeylast = ALIGNED_LOAD (rk, nrounds); \ -+ } while (0) -+ -+#define AES_ENCRYPT_ALL(blk, nrounds) \ -+ do { \ -+ blk ^= rkey0; \ -+ blk = asm_cipher_be (blk, rkey1); \ -+ blk = asm_cipher_be (blk, rkey2); \ -+ blk = asm_cipher_be (blk, rkey3); \ -+ blk = asm_cipher_be (blk, rkey4); \ -+ blk = asm_cipher_be (blk, rkey5); \ -+ blk = asm_cipher_be (blk, rkey6); \ -+ blk = asm_cipher_be (blk, rkey7); \ -+ blk = asm_cipher_be (blk, rkey8); \ -+ blk = asm_cipher_be (blk, rkey9); \ -+ if (nrounds >= 12) \ -+ { \ -+ blk = asm_cipher_be (blk, rkey10); \ -+ blk = asm_cipher_be (blk, rkey11); \ -+ if (rounds > 12) \ -+ { \ -+ blk = asm_cipher_be (blk, rkey12); \ -+ blk = asm_cipher_be (blk, rkey13); \ -+ } \ -+ } \ -+ blk = asm_cipherlast_be (blk, rkeylast); \ -+ } while (0) -+ -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_aligned_ld(unsigned long offset, const void *ptr) -+{ -+ block vec; -+#if __GNUC__ >= 4 -+ if (__builtin_constant_p (offset) && offset == 0) -+ __asm__ volatile ("lvx %0,0,%1\n\t" -+ : "=v" (vec) -+ : "r" ((uintptr_t)ptr) -+ : "memory"); -+ else -+#endif -+ __asm__ volatile ("lvx %0,%1,%2\n\t" -+ : "=v" (vec) -+ : "r" (offset), "r" ((uintptr_t)ptr) -+ : "memory", "r0"); -+ return vec; -+} -+ -+static ASM_FUNC_ATTR_INLINE void -+asm_aligned_st(block vec, unsigned long offset, void *ptr) -+{ -+#if __GNUC__ >= 4 -+ if (__builtin_constant_p (offset) && offset == 0) -+ __asm__ volatile ("stvx %0,0,%1\n\t" -+ : -+ : "v" (vec), "r" ((uintptr_t)ptr) -+ : "memory"); -+ else -+#endif -+ __asm__ volatile ("stvx %0,%1,%2\n\t" -+ : -+ : "v" (vec), "r" (offset), "r" ((uintptr_t)ptr) -+ : "memory", "r0"); -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_vperm1(block vec, block mask) -+{ -+ block o; -+ __asm__ volatile ("vperm %0,%1,%1,%2\n\t" -+ : "=v" (o) -+ : "v" (vec), "v" (mask)); -+ return o; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_add_uint128(block a, block b) -+{ -+ block res; -+ __asm__ volatile ("vadduqm %0,%1,%2\n\t" -+ : "=v" (res) -+ : "v" (a), "v" (b)); -+ return res; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_add_uint64(block a, block b) -+{ -+ block res; -+ __asm__ volatile ("vaddudm %0,%1,%2\n\t" -+ : "=v" (res) -+ : "v" (a), "v" (b)); -+ return res; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_sra_int64(block a, block b) -+{ -+ block res; -+ __asm__ volatile ("vsrad %0,%1,%2\n\t" -+ : "=v" (res) -+ : "v" (a), "v" (b)); -+ return res; -+} -+ -+static block -+asm_swap_uint64_halfs(block a) -+{ -+ block res; -+ __asm__ volatile ("xxswapd %x0, %x1" -+ : "=wa" (res) -+ : "wa" (a)); -+ return res; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_xor(block a, block b) -+{ -+ block res; -+ __asm__ volatile ("vxor %0,%1,%2\n\t" -+ : "=v" (res) -+ : "v" (a), "v" (b)); -+ return res; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_cipher_be(block b, block rk) -+{ -+ block o; -+ __asm__ volatile ("vcipher %0, %1, %2\n\t" -+ : "=v" (o) -+ : "v" (b), "v" (rk)); -+ return o; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_cipherlast_be(block b, block rk) -+{ -+ block o; -+ __asm__ volatile ("vcipherlast %0, %1, %2\n\t" -+ : "=v" (o) -+ : "v" (b), "v" (rk)); -+ return o; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_ncipher_be(block b, block rk) -+{ -+ block o; -+ __asm__ volatile ("vncipher %0, %1, %2\n\t" -+ : "=v" (o) -+ : "v" (b), "v" (rk)); -+ return o; -+} -+ -+static ASM_FUNC_ATTR_INLINE block -+asm_ncipherlast_be(block b, block rk) -+{ -+ block o; -+ __asm__ volatile ("vncipherlast %0, %1, %2\n\t" -+ : "=v" (o) -+ : "v" (b), "v" (rk)); -+ return o; -+} -+ -+ -+/* Make a decryption key from an encryption key. */ -+static ASM_FUNC_ATTR_INLINE void -+internal_aes_ppc_prepare_decryption (RIJNDAEL_context *ctx) -+{ -+ u128_t *ekey = (u128_t *)(void *)ctx->keyschenc; -+ u128_t *dkey = (u128_t *)(void *)ctx->keyschdec; -+ int rounds = ctx->rounds; -+ int rr; -+ int r; -+ -+ r = 0; -+ rr = rounds; -+ for (r = 0, rr = rounds; r <= rounds; r++, rr--) -+ { -+ ALIGNED_STORE (dkey, r, ALIGNED_LOAD (ekey, rr)); -+ } -+} -+ -+#endif /* G10_RIJNDAEL_PPC_COMMON_H */ -diff -up libgcrypt-1.8.5/cipher/rijndael-ppc-functions.h.aes-perf libgcrypt-1.8.5/cipher/rijndael-ppc-functions.h ---- libgcrypt-1.8.5/cipher/rijndael-ppc-functions.h.aes-perf 2020-04-22 18:29:41.679862057 +0200 -+++ libgcrypt-1.8.5/cipher/rijndael-ppc-functions.h 2020-04-22 18:29:41.679862057 +0200 -@@ -0,0 +1,2020 @@ -+/* Rijndael (AES) for GnuPG - PowerPC Vector Crypto AES implementation -+ * Copyright (C) 2019 Shawn Landden -+ * Copyright (C) 2019-2020 Jussi Kivilinna -+ * -+ * This file is part of Libgcrypt. -+ * -+ * Libgcrypt is free software; you can redistribute it and/or modify -+ * it under the terms of the GNU Lesser General Public License as -+ * published by the Free Software Foundation; either version 2.1 of -+ * the License, or (at your option) any later version. -+ * -+ * Libgcrypt is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ * GNU Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with this program; if not, see . -+ * -+ * Alternatively, this code may be used in OpenSSL from The OpenSSL Project, -+ * and Cryptogams by Andy Polyakov, and if made part of a release of either -+ * or both projects, is thereafter dual-licensed under the license said project -+ * is released under. -+ */ -+ -+unsigned int ENCRYPT_BLOCK_FUNC (const RIJNDAEL_context *ctx, -+ unsigned char *out, -+ const unsigned char *in) -+{ -+ const block bige_const = asm_load_be_const(); -+ const u128_t *rk = (u128_t *)&ctx->keyschenc; -+ int rounds = ctx->rounds; -+ ROUND_KEY_VARIABLES; -+ block b; -+ -+ b = VEC_LOAD_BE (in, 0, bige_const); -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ -+ AES_ENCRYPT (b, rounds); -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ return 0; /* does not use stack */ -+} -+ -+ -+unsigned int DECRYPT_BLOCK_FUNC (const RIJNDAEL_context *ctx, -+ unsigned char *out, -+ const unsigned char *in) -+{ -+ const block bige_const = asm_load_be_const(); -+ const u128_t *rk = (u128_t *)&ctx->keyschdec; -+ int rounds = ctx->rounds; -+ ROUND_KEY_VARIABLES; -+ block b; -+ -+ b = VEC_LOAD_BE (in, 0, bige_const); -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ -+ AES_DECRYPT (b, rounds); -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ return 0; /* does not use stack */ -+} -+ -+ -+void CFB_ENC_FUNC (void *context, unsigned char *iv_arg, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks) -+{ -+ const block bige_const = asm_load_be_const(); -+ RIJNDAEL_context *ctx = context; -+ const u128_t *rk = (u128_t *)&ctx->keyschenc; -+ const u128_t *in = (const u128_t *)inbuf_arg; -+ u128_t *out = (u128_t *)outbuf_arg; -+ int rounds = ctx->rounds; -+ ROUND_KEY_VARIABLES_ALL; -+ block rkeylast_orig; -+ block iv; -+ -+ iv = VEC_LOAD_BE (iv_arg, 0, bige_const); -+ -+ PRELOAD_ROUND_KEYS_ALL (rounds); -+ rkeylast_orig = rkeylast; -+ -+ for (; nblocks >= 2; nblocks -= 2) -+ { -+ block in2, iv1; -+ -+ rkeylast = rkeylast_orig ^ VEC_LOAD_BE (in, 0, bige_const); -+ in2 = VEC_LOAD_BE (in + 1, 0, bige_const); -+ in += 2; -+ -+ AES_ENCRYPT_ALL (iv, rounds); -+ -+ iv1 = iv; -+ rkeylast = rkeylast_orig ^ in2; -+ -+ AES_ENCRYPT_ALL (iv, rounds); -+ -+ VEC_STORE_BE (out++, 0, iv1, bige_const); -+ VEC_STORE_BE (out++, 0, iv, bige_const); -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ rkeylast = rkeylast_orig ^ VEC_LOAD_BE (in++, 0, bige_const); -+ -+ AES_ENCRYPT_ALL (iv, rounds); -+ -+ VEC_STORE_BE (out++, 0, iv, bige_const); -+ } -+ -+ VEC_STORE_BE (iv_arg, 0, iv, bige_const); -+} -+ -+void CFB_DEC_FUNC (void *context, unsigned char *iv_arg, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks) -+{ -+ const block bige_const = asm_load_be_const(); -+ RIJNDAEL_context *ctx = context; -+ const u128_t *rk = (u128_t *)&ctx->keyschenc; -+ const u128_t *in = (const u128_t *)inbuf_arg; -+ u128_t *out = (u128_t *)outbuf_arg; -+ int rounds = ctx->rounds; -+ ROUND_KEY_VARIABLES; -+ block rkeylast_orig; -+ block iv, b, bin; -+ block in0, in1, in2, in3, in4, in5, in6, in7; -+ block b0, b1, b2, b3, b4, b5, b6, b7; -+ block rkey; -+ -+ iv = VEC_LOAD_BE (iv_arg, 0, bige_const); -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ rkeylast_orig = rkeylast; -+ -+ for (; nblocks >= 8; nblocks -= 8) -+ { -+ in0 = iv; -+ in1 = VEC_LOAD_BE_NOSWAP (in, 0); -+ in2 = VEC_LOAD_BE_NOSWAP (in, 1); -+ in3 = VEC_LOAD_BE_NOSWAP (in, 2); -+ in4 = VEC_LOAD_BE_NOSWAP (in, 3); -+ in1 = VEC_BE_SWAP (in1, bige_const); -+ in2 = VEC_BE_SWAP (in2, bige_const); -+ in5 = VEC_LOAD_BE_NOSWAP (in, 4); -+ in6 = VEC_LOAD_BE_NOSWAP (in, 5); -+ in3 = VEC_BE_SWAP (in3, bige_const); -+ in4 = VEC_BE_SWAP (in4, bige_const); -+ in7 = VEC_LOAD_BE_NOSWAP (in, 6); -+ iv = VEC_LOAD_BE_NOSWAP (in, 7); -+ in += 8; -+ in5 = VEC_BE_SWAP (in5, bige_const); -+ in6 = VEC_BE_SWAP (in6, bige_const); -+ b0 = asm_xor (rkey0, in0); -+ b1 = asm_xor (rkey0, in1); -+ in7 = VEC_BE_SWAP (in7, bige_const); -+ iv = VEC_BE_SWAP (iv, bige_const); -+ b2 = asm_xor (rkey0, in2); -+ b3 = asm_xor (rkey0, in3); -+ b4 = asm_xor (rkey0, in4); -+ b5 = asm_xor (rkey0, in5); -+ b6 = asm_xor (rkey0, in6); -+ b7 = asm_xor (rkey0, in7); -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); \ -+ b4 = asm_cipher_be (b4, rkey); \ -+ b5 = asm_cipher_be (b5, rkey); \ -+ b6 = asm_cipher_be (b6, rkey); \ -+ b7 = asm_cipher_be (b7, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ in1 = asm_xor (rkeylast, in1); -+ in2 = asm_xor (rkeylast, in2); -+ in3 = asm_xor (rkeylast, in3); -+ in4 = asm_xor (rkeylast, in4); -+ b0 = asm_cipherlast_be (b0, in1); -+ b1 = asm_cipherlast_be (b1, in2); -+ in5 = asm_xor (rkeylast, in5); -+ in6 = asm_xor (rkeylast, in6); -+ b2 = asm_cipherlast_be (b2, in3); -+ b3 = asm_cipherlast_be (b3, in4); -+ in7 = asm_xor (rkeylast, in7); -+ in0 = asm_xor (rkeylast, iv); -+ b0 = VEC_BE_SWAP (b0, bige_const); -+ b1 = VEC_BE_SWAP (b1, bige_const); -+ b4 = asm_cipherlast_be (b4, in5); -+ b5 = asm_cipherlast_be (b5, in6); -+ b2 = VEC_BE_SWAP (b2, bige_const); -+ b3 = VEC_BE_SWAP (b3, bige_const); -+ b6 = asm_cipherlast_be (b6, in7); -+ b7 = asm_cipherlast_be (b7, in0); -+ b4 = VEC_BE_SWAP (b4, bige_const); -+ b5 = VEC_BE_SWAP (b5, bige_const); -+ b6 = VEC_BE_SWAP (b6, bige_const); -+ b7 = VEC_BE_SWAP (b7, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 0, b0); -+ VEC_STORE_BE_NOSWAP (out, 1, b1); -+ VEC_STORE_BE_NOSWAP (out, 2, b2); -+ VEC_STORE_BE_NOSWAP (out, 3, b3); -+ VEC_STORE_BE_NOSWAP (out, 4, b4); -+ VEC_STORE_BE_NOSWAP (out, 5, b5); -+ VEC_STORE_BE_NOSWAP (out, 6, b6); -+ VEC_STORE_BE_NOSWAP (out, 7, b7); -+ out += 8; -+ } -+ -+ if (nblocks >= 4) -+ { -+ in0 = iv; -+ in1 = VEC_LOAD_BE (in, 0, bige_const); -+ in2 = VEC_LOAD_BE (in, 1, bige_const); -+ in3 = VEC_LOAD_BE (in, 2, bige_const); -+ iv = VEC_LOAD_BE (in, 3, bige_const); -+ -+ b0 = asm_xor (rkey0, in0); -+ b1 = asm_xor (rkey0, in1); -+ b2 = asm_xor (rkey0, in2); -+ b3 = asm_xor (rkey0, in3); -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ in1 = asm_xor (rkeylast, in1); -+ in2 = asm_xor (rkeylast, in2); -+ in3 = asm_xor (rkeylast, in3); -+ in0 = asm_xor (rkeylast, iv); -+ b0 = asm_cipherlast_be (b0, in1); -+ b1 = asm_cipherlast_be (b1, in2); -+ b2 = asm_cipherlast_be (b2, in3); -+ b3 = asm_cipherlast_be (b3, in0); -+ VEC_STORE_BE (out, 0, b0, bige_const); -+ VEC_STORE_BE (out, 1, b1, bige_const); -+ VEC_STORE_BE (out, 2, b2, bige_const); -+ VEC_STORE_BE (out, 3, b3, bige_const); -+ -+ in += 4; -+ out += 4; -+ nblocks -= 4; -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ bin = VEC_LOAD_BE (in, 0, bige_const); -+ rkeylast = rkeylast_orig ^ bin; -+ b = iv; -+ iv = bin; -+ -+ AES_ENCRYPT (b, rounds); -+ -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ out++; -+ in++; -+ } -+ -+ VEC_STORE_BE (iv_arg, 0, iv, bige_const); -+} -+ -+ -+void CBC_ENC_FUNC (void *context, unsigned char *iv_arg, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, int cbc_mac) -+{ -+ const block bige_const = asm_load_be_const(); -+ RIJNDAEL_context *ctx = context; -+ const u128_t *rk = (u128_t *)&ctx->keyschenc; -+ const u128_t *in = (const u128_t *)inbuf_arg; -+ byte *out = (byte *)outbuf_arg; -+ int rounds = ctx->rounds; -+ ROUND_KEY_VARIABLES_ALL; -+ block lastiv, b; -+ unsigned int outadd = -(!cbc_mac) & 16; -+ -+ lastiv = VEC_LOAD_BE (iv_arg, 0, bige_const); -+ -+ PRELOAD_ROUND_KEYS_ALL (rounds); -+ -+ for (; nblocks >= 2; nblocks -= 2) -+ { -+ block in2, lastiv1; -+ -+ b = lastiv ^ VEC_LOAD_BE (in, 0, bige_const); -+ in2 = VEC_LOAD_BE (in + 1, 0, bige_const); -+ in += 2; -+ -+ AES_ENCRYPT_ALL (b, rounds); -+ -+ lastiv1 = b; -+ b = lastiv1 ^ in2; -+ -+ AES_ENCRYPT_ALL (b, rounds); -+ -+ lastiv = b; -+ VEC_STORE_BE ((u128_t *)out, 0, lastiv1, bige_const); -+ out += outadd; -+ VEC_STORE_BE ((u128_t *)out, 0, lastiv, bige_const); -+ out += outadd; -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ b = lastiv ^ VEC_LOAD_BE (in++, 0, bige_const); -+ -+ AES_ENCRYPT_ALL (b, rounds); -+ -+ lastiv = b; -+ VEC_STORE_BE ((u128_t *)out, 0, b, bige_const); -+ out += outadd; -+ } -+ -+ VEC_STORE_BE (iv_arg, 0, lastiv, bige_const); -+} -+ -+void CBC_DEC_FUNC (void *context, unsigned char *iv_arg, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks) -+{ -+ const block bige_const = asm_load_be_const(); -+ RIJNDAEL_context *ctx = context; -+ const u128_t *rk = (u128_t *)&ctx->keyschdec; -+ const u128_t *in = (const u128_t *)inbuf_arg; -+ u128_t *out = (u128_t *)outbuf_arg; -+ int rounds = ctx->rounds; -+ ROUND_KEY_VARIABLES; -+ block rkeylast_orig; -+ block in0, in1, in2, in3, in4, in5, in6, in7; -+ block b0, b1, b2, b3, b4, b5, b6, b7; -+ block rkey; -+ block iv, b; -+ -+ if (!ctx->decryption_prepared) -+ { -+ internal_aes_ppc_prepare_decryption (ctx); -+ ctx->decryption_prepared = 1; -+ } -+ -+ iv = VEC_LOAD_BE (iv_arg, 0, bige_const); -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ rkeylast_orig = rkeylast; -+ -+ for (; nblocks >= 8; nblocks -= 8) -+ { -+ in0 = VEC_LOAD_BE_NOSWAP (in, 0); -+ in1 = VEC_LOAD_BE_NOSWAP (in, 1); -+ in2 = VEC_LOAD_BE_NOSWAP (in, 2); -+ in3 = VEC_LOAD_BE_NOSWAP (in, 3); -+ in0 = VEC_BE_SWAP (in0, bige_const); -+ in1 = VEC_BE_SWAP (in1, bige_const); -+ in4 = VEC_LOAD_BE_NOSWAP (in, 4); -+ in5 = VEC_LOAD_BE_NOSWAP (in, 5); -+ in2 = VEC_BE_SWAP (in2, bige_const); -+ in3 = VEC_BE_SWAP (in3, bige_const); -+ in6 = VEC_LOAD_BE_NOSWAP (in, 6); -+ in7 = VEC_LOAD_BE_NOSWAP (in, 7); -+ in += 8; -+ b0 = asm_xor (rkey0, in0); -+ b1 = asm_xor (rkey0, in1); -+ in4 = VEC_BE_SWAP (in4, bige_const); -+ in5 = VEC_BE_SWAP (in5, bige_const); -+ b2 = asm_xor (rkey0, in2); -+ b3 = asm_xor (rkey0, in3); -+ in6 = VEC_BE_SWAP (in6, bige_const); -+ in7 = VEC_BE_SWAP (in7, bige_const); -+ b4 = asm_xor (rkey0, in4); -+ b5 = asm_xor (rkey0, in5); -+ b6 = asm_xor (rkey0, in6); -+ b7 = asm_xor (rkey0, in7); -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_ncipher_be (b0, rkey); \ -+ b1 = asm_ncipher_be (b1, rkey); \ -+ b2 = asm_ncipher_be (b2, rkey); \ -+ b3 = asm_ncipher_be (b3, rkey); \ -+ b4 = asm_ncipher_be (b4, rkey); \ -+ b5 = asm_ncipher_be (b5, rkey); \ -+ b6 = asm_ncipher_be (b6, rkey); \ -+ b7 = asm_ncipher_be (b7, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ iv = asm_xor (rkeylast, iv); -+ in0 = asm_xor (rkeylast, in0); -+ in1 = asm_xor (rkeylast, in1); -+ in2 = asm_xor (rkeylast, in2); -+ b0 = asm_ncipherlast_be (b0, iv); -+ iv = in7; -+ b1 = asm_ncipherlast_be (b1, in0); -+ in3 = asm_xor (rkeylast, in3); -+ in4 = asm_xor (rkeylast, in4); -+ b2 = asm_ncipherlast_be (b2, in1); -+ b3 = asm_ncipherlast_be (b3, in2); -+ in5 = asm_xor (rkeylast, in5); -+ in6 = asm_xor (rkeylast, in6); -+ b0 = VEC_BE_SWAP (b0, bige_const); -+ b1 = VEC_BE_SWAP (b1, bige_const); -+ b4 = asm_ncipherlast_be (b4, in3); -+ b5 = asm_ncipherlast_be (b5, in4); -+ b2 = VEC_BE_SWAP (b2, bige_const); -+ b3 = VEC_BE_SWAP (b3, bige_const); -+ b6 = asm_ncipherlast_be (b6, in5); -+ b7 = asm_ncipherlast_be (b7, in6); -+ b4 = VEC_BE_SWAP (b4, bige_const); -+ b5 = VEC_BE_SWAP (b5, bige_const); -+ b6 = VEC_BE_SWAP (b6, bige_const); -+ b7 = VEC_BE_SWAP (b7, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 0, b0); -+ VEC_STORE_BE_NOSWAP (out, 1, b1); -+ VEC_STORE_BE_NOSWAP (out, 2, b2); -+ VEC_STORE_BE_NOSWAP (out, 3, b3); -+ VEC_STORE_BE_NOSWAP (out, 4, b4); -+ VEC_STORE_BE_NOSWAP (out, 5, b5); -+ VEC_STORE_BE_NOSWAP (out, 6, b6); -+ VEC_STORE_BE_NOSWAP (out, 7, b7); -+ out += 8; -+ } -+ -+ if (nblocks >= 4) -+ { -+ in0 = VEC_LOAD_BE (in, 0, bige_const); -+ in1 = VEC_LOAD_BE (in, 1, bige_const); -+ in2 = VEC_LOAD_BE (in, 2, bige_const); -+ in3 = VEC_LOAD_BE (in, 3, bige_const); -+ -+ b0 = asm_xor (rkey0, in0); -+ b1 = asm_xor (rkey0, in1); -+ b2 = asm_xor (rkey0, in2); -+ b3 = asm_xor (rkey0, in3); -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_ncipher_be (b0, rkey); \ -+ b1 = asm_ncipher_be (b1, rkey); \ -+ b2 = asm_ncipher_be (b2, rkey); \ -+ b3 = asm_ncipher_be (b3, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ iv = asm_xor (rkeylast, iv); -+ in0 = asm_xor (rkeylast, in0); -+ in1 = asm_xor (rkeylast, in1); -+ in2 = asm_xor (rkeylast, in2); -+ -+ b0 = asm_ncipherlast_be (b0, iv); -+ iv = in3; -+ b1 = asm_ncipherlast_be (b1, in0); -+ b2 = asm_ncipherlast_be (b2, in1); -+ b3 = asm_ncipherlast_be (b3, in2); -+ -+ VEC_STORE_BE (out, 0, b0, bige_const); -+ VEC_STORE_BE (out, 1, b1, bige_const); -+ VEC_STORE_BE (out, 2, b2, bige_const); -+ VEC_STORE_BE (out, 3, b3, bige_const); -+ -+ in += 4; -+ out += 4; -+ nblocks -= 4; -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ rkeylast = rkeylast_orig ^ iv; -+ -+ iv = VEC_LOAD_BE (in, 0, bige_const); -+ b = iv; -+ AES_DECRYPT (b, rounds); -+ -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ in++; -+ out++; -+ } -+ -+ VEC_STORE_BE (iv_arg, 0, iv, bige_const); -+} -+ -+ -+void CTR_ENC_FUNC (void *context, unsigned char *ctr_arg, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks) -+{ -+ static const unsigned char vec_one_const[16] = -+ { 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1 }; -+ const block bige_const = asm_load_be_const(); -+ RIJNDAEL_context *ctx = context; -+ const u128_t *rk = (u128_t *)&ctx->keyschenc; -+ const u128_t *in = (const u128_t *)inbuf_arg; -+ u128_t *out = (u128_t *)outbuf_arg; -+ int rounds = ctx->rounds; -+ ROUND_KEY_VARIABLES; -+ block rkeylast_orig; -+ block ctr, b, one; -+ -+ ctr = VEC_LOAD_BE (ctr_arg, 0, bige_const); -+ one = VEC_LOAD_BE (&vec_one_const, 0, bige_const); -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ rkeylast_orig = rkeylast; -+ -+ if (nblocks >= 4) -+ { -+ block in0, in1, in2, in3, in4, in5, in6, in7; -+ block b0, b1, b2, b3, b4, b5, b6, b7; -+ block two, three, four; -+ block rkey; -+ -+ two = asm_add_uint128 (one, one); -+ three = asm_add_uint128 (two, one); -+ four = asm_add_uint128 (two, two); -+ -+ for (; nblocks >= 8; nblocks -= 8) -+ { -+ b1 = asm_add_uint128 (ctr, one); -+ b2 = asm_add_uint128 (ctr, two); -+ b3 = asm_add_uint128 (ctr, three); -+ b4 = asm_add_uint128 (ctr, four); -+ b5 = asm_add_uint128 (b1, four); -+ b6 = asm_add_uint128 (b2, four); -+ b7 = asm_add_uint128 (b3, four); -+ b0 = asm_xor (rkey0, ctr); -+ rkey = ALIGNED_LOAD (rk, 1); -+ ctr = asm_add_uint128 (b4, four); -+ b1 = asm_xor (rkey0, b1); -+ b2 = asm_xor (rkey0, b2); -+ b3 = asm_xor (rkey0, b3); -+ b0 = asm_cipher_be (b0, rkey); -+ b1 = asm_cipher_be (b1, rkey); -+ b2 = asm_cipher_be (b2, rkey); -+ b3 = asm_cipher_be (b3, rkey); -+ b4 = asm_xor (rkey0, b4); -+ b5 = asm_xor (rkey0, b5); -+ b6 = asm_xor (rkey0, b6); -+ b7 = asm_xor (rkey0, b7); -+ b4 = asm_cipher_be (b4, rkey); -+ b5 = asm_cipher_be (b5, rkey); -+ b6 = asm_cipher_be (b6, rkey); -+ b7 = asm_cipher_be (b7, rkey); -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); \ -+ b4 = asm_cipher_be (b4, rkey); \ -+ b5 = asm_cipher_be (b5, rkey); \ -+ b6 = asm_cipher_be (b6, rkey); \ -+ b7 = asm_cipher_be (b7, rkey); -+ -+ in0 = VEC_LOAD_BE_NOSWAP (in, 0); -+ DO_ROUND(2); -+ in1 = VEC_LOAD_BE_NOSWAP (in, 1); -+ DO_ROUND(3); -+ in2 = VEC_LOAD_BE_NOSWAP (in, 2); -+ DO_ROUND(4); -+ in3 = VEC_LOAD_BE_NOSWAP (in, 3); -+ DO_ROUND(5); -+ in4 = VEC_LOAD_BE_NOSWAP (in, 4); -+ DO_ROUND(6); -+ in5 = VEC_LOAD_BE_NOSWAP (in, 5); -+ DO_ROUND(7); -+ in6 = VEC_LOAD_BE_NOSWAP (in, 6); -+ DO_ROUND(8); -+ in7 = VEC_LOAD_BE_NOSWAP (in, 7); -+ in += 8; -+ DO_ROUND(9); -+ -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ in0 = VEC_BE_SWAP (in0, bige_const); -+ in1 = VEC_BE_SWAP (in1, bige_const); -+ in2 = VEC_BE_SWAP (in2, bige_const); -+ in3 = VEC_BE_SWAP (in3, bige_const); -+ in4 = VEC_BE_SWAP (in4, bige_const); -+ in5 = VEC_BE_SWAP (in5, bige_const); -+ in6 = VEC_BE_SWAP (in6, bige_const); -+ in7 = VEC_BE_SWAP (in7, bige_const); -+ -+ in0 = asm_xor (rkeylast, in0); -+ in1 = asm_xor (rkeylast, in1); -+ in2 = asm_xor (rkeylast, in2); -+ in3 = asm_xor (rkeylast, in3); -+ b0 = asm_cipherlast_be (b0, in0); -+ b1 = asm_cipherlast_be (b1, in1); -+ in4 = asm_xor (rkeylast, in4); -+ in5 = asm_xor (rkeylast, in5); -+ b2 = asm_cipherlast_be (b2, in2); -+ b3 = asm_cipherlast_be (b3, in3); -+ in6 = asm_xor (rkeylast, in6); -+ in7 = asm_xor (rkeylast, in7); -+ b4 = asm_cipherlast_be (b4, in4); -+ b5 = asm_cipherlast_be (b5, in5); -+ b6 = asm_cipherlast_be (b6, in6); -+ b7 = asm_cipherlast_be (b7, in7); -+ -+ b0 = VEC_BE_SWAP (b0, bige_const); -+ b1 = VEC_BE_SWAP (b1, bige_const); -+ b2 = VEC_BE_SWAP (b2, bige_const); -+ b3 = VEC_BE_SWAP (b3, bige_const); -+ b4 = VEC_BE_SWAP (b4, bige_const); -+ b5 = VEC_BE_SWAP (b5, bige_const); -+ b6 = VEC_BE_SWAP (b6, bige_const); -+ b7 = VEC_BE_SWAP (b7, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 0, b0); -+ VEC_STORE_BE_NOSWAP (out, 1, b1); -+ VEC_STORE_BE_NOSWAP (out, 2, b2); -+ VEC_STORE_BE_NOSWAP (out, 3, b3); -+ VEC_STORE_BE_NOSWAP (out, 4, b4); -+ VEC_STORE_BE_NOSWAP (out, 5, b5); -+ VEC_STORE_BE_NOSWAP (out, 6, b6); -+ VEC_STORE_BE_NOSWAP (out, 7, b7); -+ out += 8; -+ } -+ -+ if (nblocks >= 4) -+ { -+ b1 = asm_add_uint128 (ctr, one); -+ b2 = asm_add_uint128 (ctr, two); -+ b3 = asm_add_uint128 (ctr, three); -+ b0 = asm_xor (rkey0, ctr); -+ ctr = asm_add_uint128 (ctr, four); -+ b1 = asm_xor (rkey0, b1); -+ b2 = asm_xor (rkey0, b2); -+ b3 = asm_xor (rkey0, b3); -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ -+ in0 = VEC_LOAD_BE (in, 0, bige_const); -+ in1 = VEC_LOAD_BE (in, 1, bige_const); -+ in2 = VEC_LOAD_BE (in, 2, bige_const); -+ in3 = VEC_LOAD_BE (in, 3, bige_const); -+ -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ in0 = asm_xor (rkeylast, in0); -+ in1 = asm_xor (rkeylast, in1); -+ in2 = asm_xor (rkeylast, in2); -+ in3 = asm_xor (rkeylast, in3); -+ -+ b0 = asm_cipherlast_be (b0, in0); -+ b1 = asm_cipherlast_be (b1, in1); -+ b2 = asm_cipherlast_be (b2, in2); -+ b3 = asm_cipherlast_be (b3, in3); -+ -+ VEC_STORE_BE (out, 0, b0, bige_const); -+ VEC_STORE_BE (out, 1, b1, bige_const); -+ VEC_STORE_BE (out, 2, b2, bige_const); -+ VEC_STORE_BE (out, 3, b3, bige_const); -+ -+ in += 4; -+ out += 4; -+ nblocks -= 4; -+ } -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ b = ctr; -+ ctr = asm_add_uint128 (ctr, one); -+ rkeylast = rkeylast_orig ^ VEC_LOAD_BE (in, 0, bige_const); -+ -+ AES_ENCRYPT (b, rounds); -+ -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ out++; -+ in++; -+ } -+ -+ VEC_STORE_BE (ctr_arg, 0, ctr, bige_const); -+} -+ -+ -+size_t OCB_CRYPT_FUNC (gcry_cipher_hd_t c, void *outbuf_arg, -+ const void *inbuf_arg, size_t nblocks, -+ int encrypt) -+{ -+ const block bige_const = asm_load_be_const(); -+ RIJNDAEL_context *ctx = (void *)&c->context.c; -+ const u128_t *in = (const u128_t *)inbuf_arg; -+ u128_t *out = (u128_t *)outbuf_arg; -+ int rounds = ctx->rounds; -+ u64 data_nblocks = c->u_mode.ocb.data_nblocks; -+ block l0, l1, l2, l; -+ block b0, b1, b2, b3, b4, b5, b6, b7, b; -+ block iv0, iv1, iv2, iv3, iv4, iv5, iv6, iv7; -+ block rkey, rkeylf; -+ block ctr, iv; -+ ROUND_KEY_VARIABLES; -+ -+ iv = VEC_LOAD_BE (c->u_iv.iv, 0, bige_const); -+ ctr = VEC_LOAD_BE (c->u_ctr.ctr, 0, bige_const); -+ -+ l0 = VEC_LOAD_BE (c->u_mode.ocb.L[0], 0, bige_const); -+ l1 = VEC_LOAD_BE (c->u_mode.ocb.L[1], 0, bige_const); -+ l2 = VEC_LOAD_BE (c->u_mode.ocb.L[2], 0, bige_const); -+ -+ if (encrypt) -+ { -+ const u128_t *rk = (u128_t *)&ctx->keyschenc; -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ -+ for (; nblocks >= 8 && data_nblocks % 8; nblocks--) -+ { -+ l = VEC_LOAD_BE (ocb_get_l (c, ++data_nblocks), 0, bige_const); -+ b = VEC_LOAD_BE (in, 0, bige_const); -+ -+ /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -+ iv ^= l; -+ /* Checksum_i = Checksum_{i-1} xor P_i */ -+ ctr ^= b; -+ /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */ -+ b ^= iv; -+ AES_ENCRYPT (b, rounds); -+ b ^= iv; -+ -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ in += 1; -+ out += 1; -+ } -+ -+ for (; nblocks >= 8; nblocks -= 8) -+ { -+ b0 = VEC_LOAD_BE_NOSWAP (in, 0); -+ b1 = VEC_LOAD_BE_NOSWAP (in, 1); -+ b2 = VEC_LOAD_BE_NOSWAP (in, 2); -+ b3 = VEC_LOAD_BE_NOSWAP (in, 3); -+ b4 = VEC_LOAD_BE_NOSWAP (in, 4); -+ b5 = VEC_LOAD_BE_NOSWAP (in, 5); -+ b6 = VEC_LOAD_BE_NOSWAP (in, 6); -+ b7 = VEC_LOAD_BE_NOSWAP (in, 7); -+ in += 8; -+ l = VEC_LOAD_BE_NOSWAP (ocb_get_l (c, data_nblocks += 8), 0); -+ b0 = VEC_BE_SWAP(b0, bige_const); -+ b1 = VEC_BE_SWAP(b1, bige_const); -+ b2 = VEC_BE_SWAP(b2, bige_const); -+ b3 = VEC_BE_SWAP(b3, bige_const); -+ b4 = VEC_BE_SWAP(b4, bige_const); -+ b5 = VEC_BE_SWAP(b5, bige_const); -+ b6 = VEC_BE_SWAP(b6, bige_const); -+ b7 = VEC_BE_SWAP(b7, bige_const); -+ l = VEC_BE_SWAP(l, bige_const); -+ -+ ctr ^= b0 ^ b1 ^ b2 ^ b3 ^ b4 ^ b5 ^ b6 ^ b7; -+ -+ iv ^= rkey0; -+ -+ iv0 = iv ^ l0; -+ iv1 = iv ^ l0 ^ l1; -+ iv2 = iv ^ l1; -+ iv3 = iv ^ l1 ^ l2; -+ iv4 = iv ^ l1 ^ l2 ^ l0; -+ iv5 = iv ^ l2 ^ l0; -+ iv6 = iv ^ l2; -+ iv7 = iv ^ l2 ^ l; -+ -+ b0 ^= iv0; -+ b1 ^= iv1; -+ b2 ^= iv2; -+ b3 ^= iv3; -+ b4 ^= iv4; -+ b5 ^= iv5; -+ b6 ^= iv6; -+ b7 ^= iv7; -+ iv = iv7 ^ rkey0; -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); \ -+ b4 = asm_cipher_be (b4, rkey); \ -+ b5 = asm_cipher_be (b5, rkey); \ -+ b6 = asm_cipher_be (b6, rkey); \ -+ b7 = asm_cipher_be (b7, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ -+ rkeylf = asm_xor (rkeylast, rkey0); -+ -+ DO_ROUND(8); -+ -+ iv0 = asm_xor (rkeylf, iv0); -+ iv1 = asm_xor (rkeylf, iv1); -+ iv2 = asm_xor (rkeylf, iv2); -+ iv3 = asm_xor (rkeylf, iv3); -+ iv4 = asm_xor (rkeylf, iv4); -+ iv5 = asm_xor (rkeylf, iv5); -+ iv6 = asm_xor (rkeylf, iv6); -+ iv7 = asm_xor (rkeylf, iv7); -+ -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ b0 = asm_cipherlast_be (b0, iv0); -+ b1 = asm_cipherlast_be (b1, iv1); -+ b2 = asm_cipherlast_be (b2, iv2); -+ b3 = asm_cipherlast_be (b3, iv3); -+ b4 = asm_cipherlast_be (b4, iv4); -+ b5 = asm_cipherlast_be (b5, iv5); -+ b6 = asm_cipherlast_be (b6, iv6); -+ b7 = asm_cipherlast_be (b7, iv7); -+ -+ b0 = VEC_BE_SWAP (b0, bige_const); -+ b1 = VEC_BE_SWAP (b1, bige_const); -+ b2 = VEC_BE_SWAP (b2, bige_const); -+ b3 = VEC_BE_SWAP (b3, bige_const); -+ b4 = VEC_BE_SWAP (b4, bige_const); -+ b5 = VEC_BE_SWAP (b5, bige_const); -+ b6 = VEC_BE_SWAP (b6, bige_const); -+ b7 = VEC_BE_SWAP (b7, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 0, b0); -+ VEC_STORE_BE_NOSWAP (out, 1, b1); -+ VEC_STORE_BE_NOSWAP (out, 2, b2); -+ VEC_STORE_BE_NOSWAP (out, 3, b3); -+ VEC_STORE_BE_NOSWAP (out, 4, b4); -+ VEC_STORE_BE_NOSWAP (out, 5, b5); -+ VEC_STORE_BE_NOSWAP (out, 6, b6); -+ VEC_STORE_BE_NOSWAP (out, 7, b7); -+ out += 8; -+ } -+ -+ if (nblocks >= 4 && (data_nblocks % 4) == 0) -+ { -+ b0 = VEC_LOAD_BE (in, 0, bige_const); -+ b1 = VEC_LOAD_BE (in, 1, bige_const); -+ b2 = VEC_LOAD_BE (in, 2, bige_const); -+ b3 = VEC_LOAD_BE (in, 3, bige_const); -+ -+ l = VEC_LOAD_BE (ocb_get_l (c, data_nblocks += 4), 0, bige_const); -+ -+ ctr ^= b0 ^ b1 ^ b2 ^ b3; -+ -+ iv ^= rkey0; -+ -+ iv0 = iv ^ l0; -+ iv1 = iv ^ l0 ^ l1; -+ iv2 = iv ^ l1; -+ iv3 = iv ^ l1 ^ l; -+ -+ b0 ^= iv0; -+ b1 ^= iv1; -+ b2 ^= iv2; -+ b3 ^= iv3; -+ iv = iv3 ^ rkey0; -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ rkey = rkeylast ^ rkey0; -+ b0 = asm_cipherlast_be (b0, rkey ^ iv0); -+ b1 = asm_cipherlast_be (b1, rkey ^ iv1); -+ b2 = asm_cipherlast_be (b2, rkey ^ iv2); -+ b3 = asm_cipherlast_be (b3, rkey ^ iv3); -+ -+ VEC_STORE_BE (out, 0, b0, bige_const); -+ VEC_STORE_BE (out, 1, b1, bige_const); -+ VEC_STORE_BE (out, 2, b2, bige_const); -+ VEC_STORE_BE (out, 3, b3, bige_const); -+ -+ in += 4; -+ out += 4; -+ nblocks -= 4; -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ l = VEC_LOAD_BE (ocb_get_l (c, ++data_nblocks), 0, bige_const); -+ b = VEC_LOAD_BE (in, 0, bige_const); -+ -+ /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -+ iv ^= l; -+ /* Checksum_i = Checksum_{i-1} xor P_i */ -+ ctr ^= b; -+ /* C_i = Offset_i xor ENCIPHER(K, P_i xor Offset_i) */ -+ b ^= iv; -+ AES_ENCRYPT (b, rounds); -+ b ^= iv; -+ -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ in += 1; -+ out += 1; -+ } -+ } -+ else -+ { -+ const u128_t *rk = (u128_t *)&ctx->keyschdec; -+ -+ if (!ctx->decryption_prepared) -+ { -+ internal_aes_ppc_prepare_decryption (ctx); -+ ctx->decryption_prepared = 1; -+ } -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ -+ for (; nblocks >= 8 && data_nblocks % 8; nblocks--) -+ { -+ l = VEC_LOAD_BE (ocb_get_l (c, ++data_nblocks), 0, bige_const); -+ b = VEC_LOAD_BE (in, 0, bige_const); -+ -+ /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -+ iv ^= l; -+ /* P_i = Offset_i xor DECIPHER(K, C_i xor Offset_i) */ -+ b ^= iv; -+ AES_DECRYPT (b, rounds); -+ b ^= iv; -+ /* Checksum_i = Checksum_{i-1} xor P_i */ -+ ctr ^= b; -+ -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ in += 1; -+ out += 1; -+ } -+ -+ for (; nblocks >= 8; nblocks -= 8) -+ { -+ b0 = VEC_LOAD_BE_NOSWAP (in, 0); -+ b1 = VEC_LOAD_BE_NOSWAP (in, 1); -+ b2 = VEC_LOAD_BE_NOSWAP (in, 2); -+ b3 = VEC_LOAD_BE_NOSWAP (in, 3); -+ b4 = VEC_LOAD_BE_NOSWAP (in, 4); -+ b5 = VEC_LOAD_BE_NOSWAP (in, 5); -+ b6 = VEC_LOAD_BE_NOSWAP (in, 6); -+ b7 = VEC_LOAD_BE_NOSWAP (in, 7); -+ in += 8; -+ l = VEC_LOAD_BE_NOSWAP (ocb_get_l (c, data_nblocks += 8), 0); -+ b0 = VEC_BE_SWAP(b0, bige_const); -+ b1 = VEC_BE_SWAP(b1, bige_const); -+ b2 = VEC_BE_SWAP(b2, bige_const); -+ b3 = VEC_BE_SWAP(b3, bige_const); -+ b4 = VEC_BE_SWAP(b4, bige_const); -+ b5 = VEC_BE_SWAP(b5, bige_const); -+ b6 = VEC_BE_SWAP(b6, bige_const); -+ b7 = VEC_BE_SWAP(b7, bige_const); -+ l = VEC_BE_SWAP(l, bige_const); -+ -+ iv ^= rkey0; -+ -+ iv0 = iv ^ l0; -+ iv1 = iv ^ l0 ^ l1; -+ iv2 = iv ^ l1; -+ iv3 = iv ^ l1 ^ l2; -+ iv4 = iv ^ l1 ^ l2 ^ l0; -+ iv5 = iv ^ l2 ^ l0; -+ iv6 = iv ^ l2; -+ iv7 = iv ^ l2 ^ l; -+ -+ b0 ^= iv0; -+ b1 ^= iv1; -+ b2 ^= iv2; -+ b3 ^= iv3; -+ b4 ^= iv4; -+ b5 ^= iv5; -+ b6 ^= iv6; -+ b7 ^= iv7; -+ iv = iv7 ^ rkey0; -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_ncipher_be (b0, rkey); \ -+ b1 = asm_ncipher_be (b1, rkey); \ -+ b2 = asm_ncipher_be (b2, rkey); \ -+ b3 = asm_ncipher_be (b3, rkey); \ -+ b4 = asm_ncipher_be (b4, rkey); \ -+ b5 = asm_ncipher_be (b5, rkey); \ -+ b6 = asm_ncipher_be (b6, rkey); \ -+ b7 = asm_ncipher_be (b7, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ -+ rkeylf = asm_xor (rkeylast, rkey0); -+ -+ DO_ROUND(8); -+ -+ iv0 = asm_xor (rkeylf, iv0); -+ iv1 = asm_xor (rkeylf, iv1); -+ iv2 = asm_xor (rkeylf, iv2); -+ iv3 = asm_xor (rkeylf, iv3); -+ iv4 = asm_xor (rkeylf, iv4); -+ iv5 = asm_xor (rkeylf, iv5); -+ iv6 = asm_xor (rkeylf, iv6); -+ iv7 = asm_xor (rkeylf, iv7); -+ -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ b0 = asm_ncipherlast_be (b0, iv0); -+ b1 = asm_ncipherlast_be (b1, iv1); -+ b2 = asm_ncipherlast_be (b2, iv2); -+ b3 = asm_ncipherlast_be (b3, iv3); -+ b4 = asm_ncipherlast_be (b4, iv4); -+ b5 = asm_ncipherlast_be (b5, iv5); -+ b6 = asm_ncipherlast_be (b6, iv6); -+ b7 = asm_ncipherlast_be (b7, iv7); -+ -+ ctr ^= b0 ^ b1 ^ b2 ^ b3 ^ b4 ^ b5 ^ b6 ^ b7; -+ -+ b0 = VEC_BE_SWAP (b0, bige_const); -+ b1 = VEC_BE_SWAP (b1, bige_const); -+ b2 = VEC_BE_SWAP (b2, bige_const); -+ b3 = VEC_BE_SWAP (b3, bige_const); -+ b4 = VEC_BE_SWAP (b4, bige_const); -+ b5 = VEC_BE_SWAP (b5, bige_const); -+ b6 = VEC_BE_SWAP (b6, bige_const); -+ b7 = VEC_BE_SWAP (b7, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 0, b0); -+ VEC_STORE_BE_NOSWAP (out, 1, b1); -+ VEC_STORE_BE_NOSWAP (out, 2, b2); -+ VEC_STORE_BE_NOSWAP (out, 3, b3); -+ VEC_STORE_BE_NOSWAP (out, 4, b4); -+ VEC_STORE_BE_NOSWAP (out, 5, b5); -+ VEC_STORE_BE_NOSWAP (out, 6, b6); -+ VEC_STORE_BE_NOSWAP (out, 7, b7); -+ out += 8; -+ } -+ -+ if (nblocks >= 4 && (data_nblocks % 4) == 0) -+ { -+ b0 = VEC_LOAD_BE (in, 0, bige_const); -+ b1 = VEC_LOAD_BE (in, 1, bige_const); -+ b2 = VEC_LOAD_BE (in, 2, bige_const); -+ b3 = VEC_LOAD_BE (in, 3, bige_const); -+ -+ l = VEC_LOAD_BE (ocb_get_l (c, data_nblocks += 4), 0, bige_const); -+ -+ iv ^= rkey0; -+ -+ iv0 = iv ^ l0; -+ iv1 = iv ^ l0 ^ l1; -+ iv2 = iv ^ l1; -+ iv3 = iv ^ l1 ^ l; -+ -+ b0 ^= iv0; -+ b1 ^= iv1; -+ b2 ^= iv2; -+ b3 ^= iv3; -+ iv = iv3 ^ rkey0; -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_ncipher_be (b0, rkey); \ -+ b1 = asm_ncipher_be (b1, rkey); \ -+ b2 = asm_ncipher_be (b2, rkey); \ -+ b3 = asm_ncipher_be (b3, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ rkey = rkeylast ^ rkey0; -+ b0 = asm_ncipherlast_be (b0, rkey ^ iv0); -+ b1 = asm_ncipherlast_be (b1, rkey ^ iv1); -+ b2 = asm_ncipherlast_be (b2, rkey ^ iv2); -+ b3 = asm_ncipherlast_be (b3, rkey ^ iv3); -+ -+ VEC_STORE_BE (out, 0, b0, bige_const); -+ VEC_STORE_BE (out, 1, b1, bige_const); -+ VEC_STORE_BE (out, 2, b2, bige_const); -+ VEC_STORE_BE (out, 3, b3, bige_const); -+ -+ ctr ^= b0 ^ b1 ^ b2 ^ b3; -+ -+ in += 4; -+ out += 4; -+ nblocks -= 4; -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ l = VEC_LOAD_BE (ocb_get_l (c, ++data_nblocks), 0, bige_const); -+ b = VEC_LOAD_BE (in, 0, bige_const); -+ -+ /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -+ iv ^= l; -+ /* P_i = Offset_i xor DECIPHER(K, C_i xor Offset_i) */ -+ b ^= iv; -+ AES_DECRYPT (b, rounds); -+ b ^= iv; -+ /* Checksum_i = Checksum_{i-1} xor P_i */ -+ ctr ^= b; -+ -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ in += 1; -+ out += 1; -+ } -+ } -+ -+ VEC_STORE_BE (c->u_iv.iv, 0, iv, bige_const); -+ VEC_STORE_BE (c->u_ctr.ctr, 0, ctr, bige_const); -+ c->u_mode.ocb.data_nblocks = data_nblocks; -+ -+ return 0; -+} -+ -+size_t OCB_AUTH_FUNC (gcry_cipher_hd_t c, void *abuf_arg, size_t nblocks) -+{ -+ const block bige_const = asm_load_be_const(); -+ RIJNDAEL_context *ctx = (void *)&c->context.c; -+ const u128_t *rk = (u128_t *)&ctx->keyschenc; -+ const u128_t *abuf = (const u128_t *)abuf_arg; -+ int rounds = ctx->rounds; -+ u64 data_nblocks = c->u_mode.ocb.aad_nblocks; -+ block l0, l1, l2, l; -+ block b0, b1, b2, b3, b4, b5, b6, b7, b; -+ block iv0, iv1, iv2, iv3, iv4, iv5, iv6, iv7; -+ block rkey, frkey; -+ block ctr, iv; -+ ROUND_KEY_VARIABLES; -+ -+ iv = VEC_LOAD_BE (c->u_mode.ocb.aad_offset, 0, bige_const); -+ ctr = VEC_LOAD_BE (c->u_mode.ocb.aad_sum, 0, bige_const); -+ -+ l0 = VEC_LOAD_BE (c->u_mode.ocb.L[0], 0, bige_const); -+ l1 = VEC_LOAD_BE (c->u_mode.ocb.L[1], 0, bige_const); -+ l2 = VEC_LOAD_BE (c->u_mode.ocb.L[2], 0, bige_const); -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ -+ for (; nblocks >= 8 && data_nblocks % 8; nblocks--) -+ { -+ l = VEC_LOAD_BE (ocb_get_l (c, ++data_nblocks), 0, bige_const); -+ b = VEC_LOAD_BE (abuf, 0, bige_const); -+ -+ /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -+ iv ^= l; -+ /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i) */ -+ b ^= iv; -+ AES_ENCRYPT (b, rounds); -+ ctr ^= b; -+ -+ abuf += 1; -+ } -+ -+ for (; nblocks >= 8; nblocks -= 8) -+ { -+ b0 = VEC_LOAD_BE (abuf, 0, bige_const); -+ b1 = VEC_LOAD_BE (abuf, 1, bige_const); -+ b2 = VEC_LOAD_BE (abuf, 2, bige_const); -+ b3 = VEC_LOAD_BE (abuf, 3, bige_const); -+ b4 = VEC_LOAD_BE (abuf, 4, bige_const); -+ b5 = VEC_LOAD_BE (abuf, 5, bige_const); -+ b6 = VEC_LOAD_BE (abuf, 6, bige_const); -+ b7 = VEC_LOAD_BE (abuf, 7, bige_const); -+ -+ l = VEC_LOAD_BE (ocb_get_l (c, data_nblocks += 8), 0, bige_const); -+ -+ frkey = rkey0; -+ iv ^= frkey; -+ -+ iv0 = iv ^ l0; -+ iv1 = iv ^ l0 ^ l1; -+ iv2 = iv ^ l1; -+ iv3 = iv ^ l1 ^ l2; -+ iv4 = iv ^ l1 ^ l2 ^ l0; -+ iv5 = iv ^ l2 ^ l0; -+ iv6 = iv ^ l2; -+ iv7 = iv ^ l2 ^ l; -+ -+ b0 ^= iv0; -+ b1 ^= iv1; -+ b2 ^= iv2; -+ b3 ^= iv3; -+ b4 ^= iv4; -+ b5 ^= iv5; -+ b6 ^= iv6; -+ b7 ^= iv7; -+ iv = iv7 ^ frkey; -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); \ -+ b4 = asm_cipher_be (b4, rkey); \ -+ b5 = asm_cipher_be (b5, rkey); \ -+ b6 = asm_cipher_be (b6, rkey); \ -+ b7 = asm_cipher_be (b7, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ rkey = rkeylast; -+ b0 = asm_cipherlast_be (b0, rkey); -+ b1 = asm_cipherlast_be (b1, rkey); -+ b2 = asm_cipherlast_be (b2, rkey); -+ b3 = asm_cipherlast_be (b3, rkey); -+ b4 = asm_cipherlast_be (b4, rkey); -+ b5 = asm_cipherlast_be (b5, rkey); -+ b6 = asm_cipherlast_be (b6, rkey); -+ b7 = asm_cipherlast_be (b7, rkey); -+ -+ ctr ^= b0 ^ b1 ^ b2 ^ b3 ^ b4 ^ b5 ^ b6 ^ b7; -+ -+ abuf += 8; -+ } -+ -+ if (nblocks >= 4 && (data_nblocks % 4) == 0) -+ { -+ b0 = VEC_LOAD_BE (abuf, 0, bige_const); -+ b1 = VEC_LOAD_BE (abuf, 1, bige_const); -+ b2 = VEC_LOAD_BE (abuf, 2, bige_const); -+ b3 = VEC_LOAD_BE (abuf, 3, bige_const); -+ -+ l = VEC_LOAD_BE (ocb_get_l (c, data_nblocks += 4), 0, bige_const); -+ -+ frkey = rkey0; -+ iv ^= frkey; -+ -+ iv0 = iv ^ l0; -+ iv1 = iv ^ l0 ^ l1; -+ iv2 = iv ^ l1; -+ iv3 = iv ^ l1 ^ l; -+ -+ b0 ^= iv0; -+ b1 ^= iv1; -+ b2 ^= iv2; -+ b3 ^= iv3; -+ iv = iv3 ^ frkey; -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ rkey = rkeylast; -+ b0 = asm_cipherlast_be (b0, rkey); -+ b1 = asm_cipherlast_be (b1, rkey); -+ b2 = asm_cipherlast_be (b2, rkey); -+ b3 = asm_cipherlast_be (b3, rkey); -+ -+ ctr ^= b0 ^ b1 ^ b2 ^ b3; -+ -+ abuf += 4; -+ nblocks -= 4; -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ l = VEC_LOAD_BE (ocb_get_l (c, ++data_nblocks), 0, bige_const); -+ b = VEC_LOAD_BE (abuf, 0, bige_const); -+ -+ /* Offset_i = Offset_{i-1} xor L_{ntz(i)} */ -+ iv ^= l; -+ /* Sum_i = Sum_{i-1} xor ENCIPHER(K, A_i xor Offset_i) */ -+ b ^= iv; -+ AES_ENCRYPT (b, rounds); -+ ctr ^= b; -+ -+ abuf += 1; -+ } -+ -+ VEC_STORE_BE (c->u_mode.ocb.aad_offset, 0, iv, bige_const); -+ VEC_STORE_BE (c->u_mode.ocb.aad_sum, 0, ctr, bige_const); -+ c->u_mode.ocb.aad_nblocks = data_nblocks; -+ -+ return 0; -+} -+ -+ -+void XTS_CRYPT_FUNC (void *context, unsigned char *tweak_arg, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, int encrypt) -+{ -+#ifdef WORDS_BIGENDIAN -+ static const block vec_bswap128_const = -+ { 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }; -+#else -+ static const block vec_bswap128_const = -+ { ~15, ~14, ~13, ~12, ~11, ~10, ~9, ~8, ~7, ~6, ~5, ~4, ~3, ~2, ~1, ~0 }; -+#endif -+ static const unsigned char vec_tweak_const[16] = -+ { 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0x87 }; -+ static const vector unsigned long long vec_shift63_const = -+ { 63, 63 }; -+ const block bige_const = asm_load_be_const(); -+ RIJNDAEL_context *ctx = context; -+ const u128_t *in = (const u128_t *)inbuf_arg; -+ u128_t *out = (u128_t *)outbuf_arg; -+ int rounds = ctx->rounds; -+ block tweak; -+ block b0, b1, b2, b3, b4, b5, b6, b7, b, rkey, rkeylf; -+ block tweak0, tweak1, tweak2, tweak3, tweak4, tweak5, tweak6, tweak7; -+ block tweak_const, bswap128_const, shift63_const; -+ ROUND_KEY_VARIABLES; -+ -+ tweak_const = VEC_LOAD_BE (&vec_tweak_const, 0, bige_const); -+ bswap128_const = ALIGNED_LOAD (&vec_bswap128_const, 0); -+ shift63_const = ALIGNED_LOAD (&vec_shift63_const, 0); -+ -+ tweak = VEC_LOAD_BE (tweak_arg, 0, bige_const); -+ tweak = asm_vperm1 (tweak, bswap128_const); -+ -+#define GEN_TWEAK(tout, tin) /* Generate next tweak. */ \ -+ do { \ -+ block tmp1, tmp2; \ -+ tmp1 = asm_swap_uint64_halfs(tin); \ -+ tmp2 = asm_add_uint64(tin, tin); \ -+ tmp1 = asm_sra_int64(tmp1, shift63_const) & tweak_const; \ -+ tout = asm_xor(tmp1, tmp2); \ -+ } while (0) -+ -+ if (encrypt) -+ { -+ const u128_t *rk = (u128_t *)&ctx->keyschenc; -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ -+ for (; nblocks >= 8; nblocks -= 8) -+ { -+ b0 = VEC_LOAD_BE_NOSWAP (in, 0); -+ b1 = VEC_LOAD_BE_NOSWAP (in, 1); -+ b2 = VEC_LOAD_BE_NOSWAP (in, 2); -+ b3 = VEC_LOAD_BE_NOSWAP (in, 3); -+ tweak0 = tweak; -+ GEN_TWEAK (tweak1, tweak0); -+ tweak0 = asm_vperm1 (tweak0, bswap128_const); -+ b4 = VEC_LOAD_BE_NOSWAP (in, 4); -+ b5 = VEC_LOAD_BE_NOSWAP (in, 5); -+ GEN_TWEAK (tweak2, tweak1); -+ tweak1 = asm_vperm1 (tweak1, bswap128_const); -+ b6 = VEC_LOAD_BE_NOSWAP (in, 6); -+ b7 = VEC_LOAD_BE_NOSWAP (in, 7); -+ in += 8; -+ -+ b0 = VEC_BE_SWAP(b0, bige_const); -+ b1 = VEC_BE_SWAP(b1, bige_const); -+ GEN_TWEAK (tweak3, tweak2); -+ tweak2 = asm_vperm1 (tweak2, bswap128_const); -+ GEN_TWEAK (tweak4, tweak3); -+ tweak3 = asm_vperm1 (tweak3, bswap128_const); -+ b2 = VEC_BE_SWAP(b2, bige_const); -+ b3 = VEC_BE_SWAP(b3, bige_const); -+ GEN_TWEAK (tweak5, tweak4); -+ tweak4 = asm_vperm1 (tweak4, bswap128_const); -+ GEN_TWEAK (tweak6, tweak5); -+ tweak5 = asm_vperm1 (tweak5, bswap128_const); -+ b4 = VEC_BE_SWAP(b4, bige_const); -+ b5 = VEC_BE_SWAP(b5, bige_const); -+ GEN_TWEAK (tweak7, tweak6); -+ tweak6 = asm_vperm1 (tweak6, bswap128_const); -+ GEN_TWEAK (tweak, tweak7); -+ tweak7 = asm_vperm1 (tweak7, bswap128_const); -+ b6 = VEC_BE_SWAP(b6, bige_const); -+ b7 = VEC_BE_SWAP(b7, bige_const); -+ -+ tweak0 = asm_xor (tweak0, rkey0); -+ tweak1 = asm_xor (tweak1, rkey0); -+ tweak2 = asm_xor (tweak2, rkey0); -+ tweak3 = asm_xor (tweak3, rkey0); -+ tweak4 = asm_xor (tweak4, rkey0); -+ tweak5 = asm_xor (tweak5, rkey0); -+ tweak6 = asm_xor (tweak6, rkey0); -+ tweak7 = asm_xor (tweak7, rkey0); -+ -+ b0 = asm_xor (b0, tweak0); -+ b1 = asm_xor (b1, tweak1); -+ b2 = asm_xor (b2, tweak2); -+ b3 = asm_xor (b3, tweak3); -+ b4 = asm_xor (b4, tweak4); -+ b5 = asm_xor (b5, tweak5); -+ b6 = asm_xor (b6, tweak6); -+ b7 = asm_xor (b7, tweak7); -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); \ -+ b4 = asm_cipher_be (b4, rkey); \ -+ b5 = asm_cipher_be (b5, rkey); \ -+ b6 = asm_cipher_be (b6, rkey); \ -+ b7 = asm_cipher_be (b7, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ -+ rkeylf = asm_xor (rkeylast, rkey0); -+ -+ DO_ROUND(8); -+ -+ tweak0 = asm_xor (tweak0, rkeylf); -+ tweak1 = asm_xor (tweak1, rkeylf); -+ tweak2 = asm_xor (tweak2, rkeylf); -+ tweak3 = asm_xor (tweak3, rkeylf); -+ tweak4 = asm_xor (tweak4, rkeylf); -+ tweak5 = asm_xor (tweak5, rkeylf); -+ tweak6 = asm_xor (tweak6, rkeylf); -+ tweak7 = asm_xor (tweak7, rkeylf); -+ -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ b0 = asm_cipherlast_be (b0, tweak0); -+ b1 = asm_cipherlast_be (b1, tweak1); -+ b2 = asm_cipherlast_be (b2, tweak2); -+ b3 = asm_cipherlast_be (b3, tweak3); -+ b0 = VEC_BE_SWAP (b0, bige_const); -+ b1 = VEC_BE_SWAP (b1, bige_const); -+ b4 = asm_cipherlast_be (b4, tweak4); -+ b5 = asm_cipherlast_be (b5, tweak5); -+ b2 = VEC_BE_SWAP (b2, bige_const); -+ b3 = VEC_BE_SWAP (b3, bige_const); -+ b6 = asm_cipherlast_be (b6, tweak6); -+ b7 = asm_cipherlast_be (b7, tweak7); -+ VEC_STORE_BE_NOSWAP (out, 0, b0); -+ VEC_STORE_BE_NOSWAP (out, 1, b1); -+ b4 = VEC_BE_SWAP (b4, bige_const); -+ b5 = VEC_BE_SWAP (b5, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 2, b2); -+ VEC_STORE_BE_NOSWAP (out, 3, b3); -+ b6 = VEC_BE_SWAP (b6, bige_const); -+ b7 = VEC_BE_SWAP (b7, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 4, b4); -+ VEC_STORE_BE_NOSWAP (out, 5, b5); -+ VEC_STORE_BE_NOSWAP (out, 6, b6); -+ VEC_STORE_BE_NOSWAP (out, 7, b7); -+ out += 8; -+ } -+ -+ if (nblocks >= 4) -+ { -+ tweak0 = tweak; -+ GEN_TWEAK (tweak1, tweak0); -+ GEN_TWEAK (tweak2, tweak1); -+ GEN_TWEAK (tweak3, tweak2); -+ GEN_TWEAK (tweak, tweak3); -+ -+ b0 = VEC_LOAD_BE (in, 0, bige_const); -+ b1 = VEC_LOAD_BE (in, 1, bige_const); -+ b2 = VEC_LOAD_BE (in, 2, bige_const); -+ b3 = VEC_LOAD_BE (in, 3, bige_const); -+ -+ tweak0 = asm_vperm1 (tweak0, bswap128_const); -+ tweak1 = asm_vperm1 (tweak1, bswap128_const); -+ tweak2 = asm_vperm1 (tweak2, bswap128_const); -+ tweak3 = asm_vperm1 (tweak3, bswap128_const); -+ -+ b0 ^= tweak0 ^ rkey0; -+ b1 ^= tweak1 ^ rkey0; -+ b2 ^= tweak2 ^ rkey0; -+ b3 ^= tweak3 ^ rkey0; -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_cipher_be (b0, rkey); \ -+ b1 = asm_cipher_be (b1, rkey); \ -+ b2 = asm_cipher_be (b2, rkey); \ -+ b3 = asm_cipher_be (b3, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ rkey = rkeylast; -+ b0 = asm_cipherlast_be (b0, rkey ^ tweak0); -+ b1 = asm_cipherlast_be (b1, rkey ^ tweak1); -+ b2 = asm_cipherlast_be (b2, rkey ^ tweak2); -+ b3 = asm_cipherlast_be (b3, rkey ^ tweak3); -+ -+ VEC_STORE_BE (out, 0, b0, bige_const); -+ VEC_STORE_BE (out, 1, b1, bige_const); -+ VEC_STORE_BE (out, 2, b2, bige_const); -+ VEC_STORE_BE (out, 3, b3, bige_const); -+ -+ in += 4; -+ out += 4; -+ nblocks -= 4; -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ tweak0 = asm_vperm1 (tweak, bswap128_const); -+ -+ /* Xor-Encrypt/Decrypt-Xor block. */ -+ b = VEC_LOAD_BE (in, 0, bige_const) ^ tweak0; -+ -+ /* Generate next tweak. */ -+ GEN_TWEAK (tweak, tweak); -+ -+ AES_ENCRYPT (b, rounds); -+ -+ b ^= tweak0; -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ in++; -+ out++; -+ } -+ } -+ else -+ { -+ const u128_t *rk = (u128_t *)&ctx->keyschdec; -+ -+ if (!ctx->decryption_prepared) -+ { -+ internal_aes_ppc_prepare_decryption (ctx); -+ ctx->decryption_prepared = 1; -+ } -+ -+ PRELOAD_ROUND_KEYS (rounds); -+ -+ for (; nblocks >= 8; nblocks -= 8) -+ { -+ b0 = VEC_LOAD_BE_NOSWAP (in, 0); -+ b1 = VEC_LOAD_BE_NOSWAP (in, 1); -+ b2 = VEC_LOAD_BE_NOSWAP (in, 2); -+ b3 = VEC_LOAD_BE_NOSWAP (in, 3); -+ tweak0 = tweak; -+ GEN_TWEAK (tweak1, tweak0); -+ tweak0 = asm_vperm1 (tweak0, bswap128_const); -+ b4 = VEC_LOAD_BE_NOSWAP (in, 4); -+ b5 = VEC_LOAD_BE_NOSWAP (in, 5); -+ GEN_TWEAK (tweak2, tweak1); -+ tweak1 = asm_vperm1 (tweak1, bswap128_const); -+ b6 = VEC_LOAD_BE_NOSWAP (in, 6); -+ b7 = VEC_LOAD_BE_NOSWAP (in, 7); -+ in += 8; -+ -+ b0 = VEC_BE_SWAP(b0, bige_const); -+ b1 = VEC_BE_SWAP(b1, bige_const); -+ GEN_TWEAK (tweak3, tweak2); -+ tweak2 = asm_vperm1 (tweak2, bswap128_const); -+ GEN_TWEAK (tweak4, tweak3); -+ tweak3 = asm_vperm1 (tweak3, bswap128_const); -+ b2 = VEC_BE_SWAP(b2, bige_const); -+ b3 = VEC_BE_SWAP(b3, bige_const); -+ GEN_TWEAK (tweak5, tweak4); -+ tweak4 = asm_vperm1 (tweak4, bswap128_const); -+ GEN_TWEAK (tweak6, tweak5); -+ tweak5 = asm_vperm1 (tweak5, bswap128_const); -+ b4 = VEC_BE_SWAP(b4, bige_const); -+ b5 = VEC_BE_SWAP(b5, bige_const); -+ GEN_TWEAK (tweak7, tweak6); -+ tweak6 = asm_vperm1 (tweak6, bswap128_const); -+ GEN_TWEAK (tweak, tweak7); -+ tweak7 = asm_vperm1 (tweak7, bswap128_const); -+ b6 = VEC_BE_SWAP(b6, bige_const); -+ b7 = VEC_BE_SWAP(b7, bige_const); -+ -+ tweak0 = asm_xor (tweak0, rkey0); -+ tweak1 = asm_xor (tweak1, rkey0); -+ tweak2 = asm_xor (tweak2, rkey0); -+ tweak3 = asm_xor (tweak3, rkey0); -+ tweak4 = asm_xor (tweak4, rkey0); -+ tweak5 = asm_xor (tweak5, rkey0); -+ tweak6 = asm_xor (tweak6, rkey0); -+ tweak7 = asm_xor (tweak7, rkey0); -+ -+ b0 = asm_xor (b0, tweak0); -+ b1 = asm_xor (b1, tweak1); -+ b2 = asm_xor (b2, tweak2); -+ b3 = asm_xor (b3, tweak3); -+ b4 = asm_xor (b4, tweak4); -+ b5 = asm_xor (b5, tweak5); -+ b6 = asm_xor (b6, tweak6); -+ b7 = asm_xor (b7, tweak7); -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_ncipher_be (b0, rkey); \ -+ b1 = asm_ncipher_be (b1, rkey); \ -+ b2 = asm_ncipher_be (b2, rkey); \ -+ b3 = asm_ncipher_be (b3, rkey); \ -+ b4 = asm_ncipher_be (b4, rkey); \ -+ b5 = asm_ncipher_be (b5, rkey); \ -+ b6 = asm_ncipher_be (b6, rkey); \ -+ b7 = asm_ncipher_be (b7, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ -+ rkeylf = asm_xor (rkeylast, rkey0); -+ -+ DO_ROUND(8); -+ -+ tweak0 = asm_xor (tweak0, rkeylf); -+ tweak1 = asm_xor (tweak1, rkeylf); -+ tweak2 = asm_xor (tweak2, rkeylf); -+ tweak3 = asm_xor (tweak3, rkeylf); -+ tweak4 = asm_xor (tweak4, rkeylf); -+ tweak5 = asm_xor (tweak5, rkeylf); -+ tweak6 = asm_xor (tweak6, rkeylf); -+ tweak7 = asm_xor (tweak7, rkeylf); -+ -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ b0 = asm_ncipherlast_be (b0, tweak0); -+ b1 = asm_ncipherlast_be (b1, tweak1); -+ b2 = asm_ncipherlast_be (b2, tweak2); -+ b3 = asm_ncipherlast_be (b3, tweak3); -+ b0 = VEC_BE_SWAP (b0, bige_const); -+ b1 = VEC_BE_SWAP (b1, bige_const); -+ b4 = asm_ncipherlast_be (b4, tweak4); -+ b5 = asm_ncipherlast_be (b5, tweak5); -+ b2 = VEC_BE_SWAP (b2, bige_const); -+ b3 = VEC_BE_SWAP (b3, bige_const); -+ b6 = asm_ncipherlast_be (b6, tweak6); -+ b7 = asm_ncipherlast_be (b7, tweak7); -+ VEC_STORE_BE_NOSWAP (out, 0, b0); -+ VEC_STORE_BE_NOSWAP (out, 1, b1); -+ b4 = VEC_BE_SWAP (b4, bige_const); -+ b5 = VEC_BE_SWAP (b5, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 2, b2); -+ VEC_STORE_BE_NOSWAP (out, 3, b3); -+ b6 = VEC_BE_SWAP (b6, bige_const); -+ b7 = VEC_BE_SWAP (b7, bige_const); -+ VEC_STORE_BE_NOSWAP (out, 4, b4); -+ VEC_STORE_BE_NOSWAP (out, 5, b5); -+ VEC_STORE_BE_NOSWAP (out, 6, b6); -+ VEC_STORE_BE_NOSWAP (out, 7, b7); -+ out += 8; -+ } -+ -+ if (nblocks >= 4) -+ { -+ tweak0 = tweak; -+ GEN_TWEAK (tweak1, tweak0); -+ GEN_TWEAK (tweak2, tweak1); -+ GEN_TWEAK (tweak3, tweak2); -+ GEN_TWEAK (tweak, tweak3); -+ -+ b0 = VEC_LOAD_BE (in, 0, bige_const); -+ b1 = VEC_LOAD_BE (in, 1, bige_const); -+ b2 = VEC_LOAD_BE (in, 2, bige_const); -+ b3 = VEC_LOAD_BE (in, 3, bige_const); -+ -+ tweak0 = asm_vperm1 (tweak0, bswap128_const); -+ tweak1 = asm_vperm1 (tweak1, bswap128_const); -+ tweak2 = asm_vperm1 (tweak2, bswap128_const); -+ tweak3 = asm_vperm1 (tweak3, bswap128_const); -+ -+ b0 ^= tweak0 ^ rkey0; -+ b1 ^= tweak1 ^ rkey0; -+ b2 ^= tweak2 ^ rkey0; -+ b3 ^= tweak3 ^ rkey0; -+ -+#define DO_ROUND(r) \ -+ rkey = ALIGNED_LOAD (rk, r); \ -+ b0 = asm_ncipher_be (b0, rkey); \ -+ b1 = asm_ncipher_be (b1, rkey); \ -+ b2 = asm_ncipher_be (b2, rkey); \ -+ b3 = asm_ncipher_be (b3, rkey); -+ -+ DO_ROUND(1); -+ DO_ROUND(2); -+ DO_ROUND(3); -+ DO_ROUND(4); -+ DO_ROUND(5); -+ DO_ROUND(6); -+ DO_ROUND(7); -+ DO_ROUND(8); -+ DO_ROUND(9); -+ if (rounds >= 12) -+ { -+ DO_ROUND(10); -+ DO_ROUND(11); -+ if (rounds > 12) -+ { -+ DO_ROUND(12); -+ DO_ROUND(13); -+ } -+ } -+ -+#undef DO_ROUND -+ -+ rkey = rkeylast; -+ b0 = asm_ncipherlast_be (b0, rkey ^ tweak0); -+ b1 = asm_ncipherlast_be (b1, rkey ^ tweak1); -+ b2 = asm_ncipherlast_be (b2, rkey ^ tweak2); -+ b3 = asm_ncipherlast_be (b3, rkey ^ tweak3); -+ -+ VEC_STORE_BE (out, 0, b0, bige_const); -+ VEC_STORE_BE (out, 1, b1, bige_const); -+ VEC_STORE_BE (out, 2, b2, bige_const); -+ VEC_STORE_BE (out, 3, b3, bige_const); -+ -+ in += 4; -+ out += 4; -+ nblocks -= 4; -+ } -+ -+ for (; nblocks; nblocks--) -+ { -+ tweak0 = asm_vperm1 (tweak, bswap128_const); -+ -+ /* Xor-Encrypt/Decrypt-Xor block. */ -+ b = VEC_LOAD_BE (in, 0, bige_const) ^ tweak0; -+ -+ /* Generate next tweak. */ -+ GEN_TWEAK (tweak, tweak); -+ -+ AES_DECRYPT (b, rounds); -+ -+ b ^= tweak0; -+ VEC_STORE_BE (out, 0, b, bige_const); -+ -+ in++; -+ out++; -+ } -+ } -+ -+ tweak = asm_vperm1 (tweak, bswap128_const); -+ VEC_STORE_BE (tweak_arg, 0, tweak, bige_const); -+ -+#undef GEN_TWEAK -+} -diff -up libgcrypt-1.8.5/cipher/rijndael-ssse3-amd64.c.aes-perf libgcrypt-1.8.5/cipher/rijndael-ssse3-amd64.c ---- libgcrypt-1.8.5/cipher/rijndael-ssse3-amd64.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/rijndael-ssse3-amd64.c 2020-04-22 18:29:41.679862057 +0200 -@@ -208,11 +208,11 @@ _gcry_aes_ssse3_do_setkey (RIJNDAEL_cont - - - /* Make a decryption key from an encryption key. */ --void --_gcry_aes_ssse3_prepare_decryption (RIJNDAEL_context *ctx) -+static inline void -+do_ssse3_prepare_decryption (RIJNDAEL_context *ctx, -+ byte ssse3_state[SSSE3_STATE_SIZE]) - { - unsigned int keybits = (ctx->rounds - 10) * 32 + 128; -- byte ssse3_state[SSSE3_STATE_SIZE]; - - vpaes_ssse3_prepare(); - -@@ -237,6 +237,14 @@ _gcry_aes_ssse3_prepare_decryption (RIJN - vpaes_ssse3_cleanup(); - } - -+void -+_gcry_aes_ssse3_prepare_decryption (RIJNDAEL_context *ctx) -+{ -+ byte ssse3_state[SSSE3_STATE_SIZE]; -+ -+ do_ssse3_prepare_decryption(ctx, ssse3_state); -+} -+ - - /* Encrypt one block using the Intel SSSE3 instructions. Block is input - * and output through SSE register xmm0. */ -@@ -295,9 +303,9 @@ _gcry_aes_ssse3_encrypt (const RIJNDAEL_ - - - void --_gcry_aes_ssse3_cfb_enc (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -- size_t nblocks) -+_gcry_aes_ssse3_cfb_enc (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks) - { - unsigned int nrounds = ctx->rounds; - byte ssse3_state[SSSE3_STATE_SIZE]; -@@ -334,9 +342,9 @@ _gcry_aes_ssse3_cfb_enc (RIJNDAEL_contex - - - void --_gcry_aes_ssse3_cbc_enc (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -- size_t nblocks, int cbc_mac) -+_gcry_aes_ssse3_cbc_enc (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks, int cbc_mac) - { - unsigned int nrounds = ctx->rounds; - byte ssse3_state[SSSE3_STATE_SIZE]; -@@ -379,9 +387,9 @@ _gcry_aes_ssse3_cbc_enc (RIJNDAEL_contex - - - void --_gcry_aes_ssse3_ctr_enc (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *ctr, -- size_t nblocks) -+_gcry_aes_ssse3_ctr_enc (RIJNDAEL_context *ctx, unsigned char *ctr, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks) - { - static const unsigned char be_mask[16] __attribute__ ((aligned (16))) = - { 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0 }; -@@ -447,7 +455,7 @@ _gcry_aes_ssse3_ctr_enc (RIJNDAEL_contex - - unsigned int - _gcry_aes_ssse3_decrypt (const RIJNDAEL_context *ctx, unsigned char *dst, -- const unsigned char *src) -+ const unsigned char *src) - { - unsigned int nrounds = ctx->rounds; - byte ssse3_state[SSSE3_STATE_SIZE]; -@@ -468,9 +476,9 @@ _gcry_aes_ssse3_decrypt (const RIJNDAEL_ - - - void --_gcry_aes_ssse3_cfb_dec (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -- size_t nblocks) -+_gcry_aes_ssse3_cfb_dec (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks) - { - unsigned int nrounds = ctx->rounds; - byte ssse3_state[SSSE3_STATE_SIZE]; -@@ -508,13 +516,19 @@ _gcry_aes_ssse3_cfb_dec (RIJNDAEL_contex - - - void --_gcry_aes_ssse3_cbc_dec (RIJNDAEL_context *ctx, unsigned char *outbuf, -- const unsigned char *inbuf, unsigned char *iv, -- size_t nblocks) -+_gcry_aes_ssse3_cbc_dec (RIJNDAEL_context *ctx, unsigned char *iv, -+ unsigned char *outbuf, const unsigned char *inbuf, -+ size_t nblocks) - { - unsigned int nrounds = ctx->rounds; - byte ssse3_state[SSSE3_STATE_SIZE]; - -+ if ( !ctx->decryption_prepared ) -+ { -+ do_ssse3_prepare_decryption ( ctx, ssse3_state ); -+ ctx->decryption_prepared = 1; -+ } -+ - vpaes_ssse3_prepare_dec (); - - asm volatile ("movdqu %[iv], %%xmm7\n\t" /* use xmm7 as fast IV storage */ -@@ -626,6 +640,12 @@ ssse3_ocb_dec (gcry_cipher_hd_t c, void - unsigned int nrounds = ctx->rounds; - byte ssse3_state[SSSE3_STATE_SIZE]; - -+ if ( !ctx->decryption_prepared ) -+ { -+ do_ssse3_prepare_decryption ( ctx, ssse3_state ); -+ ctx->decryption_prepared = 1; -+ } -+ - vpaes_ssse3_prepare_dec (); - - /* Preload Offset and Checksum */ -@@ -679,7 +699,7 @@ ssse3_ocb_dec (gcry_cipher_hd_t c, void - } - - --void -+size_t - _gcry_aes_ssse3_ocb_crypt(gcry_cipher_hd_t c, void *outbuf_arg, - const void *inbuf_arg, size_t nblocks, int encrypt) - { -@@ -687,10 +707,12 @@ _gcry_aes_ssse3_ocb_crypt(gcry_cipher_hd - ssse3_ocb_enc(c, outbuf_arg, inbuf_arg, nblocks); - else - ssse3_ocb_dec(c, outbuf_arg, inbuf_arg, nblocks); -+ -+ return 0; - } - - --void -+size_t - _gcry_aes_ssse3_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, - size_t nblocks) - { -@@ -746,6 +768,8 @@ _gcry_aes_ssse3_ocb_auth (gcry_cipher_hd - : "memory" ); - - vpaes_ssse3_cleanup (); -+ -+ return 0; - } - - #endif /* USE_SSSE3 */ -diff -up libgcrypt-1.8.5/cipher/salsa20.c.aes-perf libgcrypt-1.8.5/cipher/salsa20.c ---- libgcrypt-1.8.5/cipher/salsa20.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/salsa20.c 2020-04-22 18:29:41.679862057 +0200 -@@ -366,10 +366,12 @@ salsa20_do_setkey (SALSA20_context_t *ct - - - static gcry_err_code_t --salsa20_setkey (void *context, const byte *key, unsigned int keylen) -+salsa20_setkey (void *context, const byte *key, unsigned int keylen, -+ gcry_cipher_hd_t hd) - { - SALSA20_context_t *ctx = (SALSA20_context_t *)context; - gcry_err_code_t rc = salsa20_do_setkey (ctx, key, keylen); -+ (void)hd; - _gcry_burn_stack (4 + sizeof (void *) + 4 * sizeof (void *)); - return rc; - } -@@ -522,7 +524,7 @@ selftest (void) - /* 16-byte alignment required for amd64 implementation. */ - ctx = (SALSA20_context_t *)((uintptr_t)(ctxbuf + 15) & ~(uintptr_t)15); - -- salsa20_setkey (ctx, key_1, sizeof key_1); -+ salsa20_setkey (ctx, key_1, sizeof key_1, NULL); - salsa20_setiv (ctx, nonce_1, sizeof nonce_1); - scratch[8] = 0; - salsa20_encrypt_stream (ctx, scratch, plaintext_1, sizeof plaintext_1); -@@ -530,7 +532,7 @@ selftest (void) - return "Salsa20 encryption test 1 failed."; - if (scratch[8]) - return "Salsa20 wrote too much."; -- salsa20_setkey( ctx, key_1, sizeof(key_1)); -+ salsa20_setkey( ctx, key_1, sizeof(key_1), NULL); - salsa20_setiv (ctx, nonce_1, sizeof nonce_1); - salsa20_encrypt_stream (ctx, scratch, scratch, sizeof plaintext_1); - if (memcmp (scratch, plaintext_1, sizeof plaintext_1)) -@@ -538,12 +540,12 @@ selftest (void) - - for (i = 0; i < sizeof buf; i++) - buf[i] = i; -- salsa20_setkey (ctx, key_1, sizeof key_1); -+ salsa20_setkey (ctx, key_1, sizeof key_1, NULL); - salsa20_setiv (ctx, nonce_1, sizeof nonce_1); - /*encrypt*/ - salsa20_encrypt_stream (ctx, buf, buf, sizeof buf); - /*decrypt*/ -- salsa20_setkey (ctx, key_1, sizeof key_1); -+ salsa20_setkey (ctx, key_1, sizeof key_1, NULL); - salsa20_setiv (ctx, nonce_1, sizeof nonce_1); - salsa20_encrypt_stream (ctx, buf, buf, 1); - salsa20_encrypt_stream (ctx, buf+1, buf+1, (sizeof buf)-1-1); -diff -up libgcrypt-1.8.5/cipher/seed.c.aes-perf libgcrypt-1.8.5/cipher/seed.c ---- libgcrypt-1.8.5/cipher/seed.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/seed.c 2020-04-22 18:29:41.680862038 +0200 -@@ -309,11 +309,12 @@ do_setkey (SEED_context *ctx, const byte - } - - static gcry_err_code_t --seed_setkey (void *context, const byte *key, const unsigned keylen) -+seed_setkey (void *context, const byte *key, const unsigned keylen, -+ gcry_cipher_hd_t hd) - { - SEED_context *ctx = context; -- - int rc = do_setkey (ctx, key, keylen); -+ (void)hd; - _gcry_burn_stack (4*6 + sizeof(void*)*2 + sizeof(int)*2); - return rc; - } -@@ -446,7 +447,7 @@ selftest (void) - 0x22, 0x6B, 0xC3, 0x14, 0x2C, 0xD4, 0x0D, 0x4A, - }; - -- seed_setkey (&ctx, key, sizeof(key)); -+ seed_setkey (&ctx, key, sizeof(key), NULL); - seed_encrypt (&ctx, scratch, plaintext); - if (memcmp (scratch, ciphertext, sizeof (ciphertext))) - return "SEED test encryption failed."; -diff -up libgcrypt-1.8.5/cipher/serpent.c.aes-perf libgcrypt-1.8.5/cipher/serpent.c ---- libgcrypt-1.8.5/cipher/serpent.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/serpent.c 2020-04-22 18:29:41.680862038 +0200 -@@ -748,13 +748,16 @@ serpent_setkey_internal (serpent_context - /* Initialize CTX with the key KEY of KEY_LENGTH bytes. */ - static gcry_err_code_t - serpent_setkey (void *ctx, -- const byte *key, unsigned int key_length) -+ const byte *key, unsigned int key_length, -+ gcry_cipher_hd_t hd) - { - serpent_context_t *context = ctx; - static const char *serpent_test_ret; - static int serpent_init_done; - gcry_err_code_t ret = GPG_ERR_NO_ERROR; - -+ (void)hd; -+ - if (! serpent_init_done) - { - /* Execute a self-test the first time, Serpent is used. */ -@@ -999,7 +1002,7 @@ _gcry_serpent_ctr_enc(void *context, uns - /* Encrypt the counter. */ - serpent_encrypt_internal(ctx, ctr, tmpbuf); - /* XOR the input with the encrypted counter and store in output. */ -- buf_xor(outbuf, tmpbuf, inbuf, sizeof(serpent_block_t)); -+ cipher_block_xor(outbuf, tmpbuf, inbuf, sizeof(serpent_block_t)); - outbuf += sizeof(serpent_block_t); - inbuf += sizeof(serpent_block_t); - /* Increment the counter. */ -@@ -1114,7 +1117,8 @@ _gcry_serpent_cbc_dec(void *context, uns - the intermediate result to SAVEBUF. */ - serpent_decrypt_internal (ctx, inbuf, savebuf); - -- buf_xor_n_copy_2(outbuf, savebuf, iv, inbuf, sizeof(serpent_block_t)); -+ cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, -+ sizeof(serpent_block_t)); - inbuf += sizeof(serpent_block_t); - outbuf += sizeof(serpent_block_t); - } -@@ -1218,7 +1222,7 @@ _gcry_serpent_cfb_dec(void *context, uns - for ( ;nblocks; nblocks-- ) - { - serpent_encrypt_internal(ctx, iv, iv); -- buf_xor_n_copy(outbuf, iv, inbuf, sizeof(serpent_block_t)); -+ cipher_block_xor_n_copy(outbuf, iv, inbuf, sizeof(serpent_block_t)); - outbuf += sizeof(serpent_block_t); - inbuf += sizeof(serpent_block_t); - } -diff -up libgcrypt-1.8.5/cipher/twofish.c.aes-perf libgcrypt-1.8.5/cipher/twofish.c ---- libgcrypt-1.8.5/cipher/twofish.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/twofish.c 2020-04-22 18:29:41.680862038 +0200 -@@ -734,12 +734,15 @@ do_twofish_setkey (TWOFISH_context *ctx, - } - - static gcry_err_code_t --twofish_setkey (void *context, const byte *key, unsigned int keylen) -+twofish_setkey (void *context, const byte *key, unsigned int keylen, -+ gcry_cipher_hd_t hd) - { - TWOFISH_context *ctx = context; - unsigned int hwfeatures = _gcry_get_hw_features (); - int rc; - -+ (void)hd; -+ - rc = do_twofish_setkey (ctx, key, keylen); - - #ifdef USE_AVX2 -@@ -1245,7 +1248,7 @@ _gcry_twofish_ctr_enc(void *context, uns - burn_stack_depth = burn; - - /* XOR the input with the encrypted counter and store in output. */ -- buf_xor(outbuf, tmpbuf, inbuf, TWOFISH_BLOCKSIZE); -+ cipher_block_xor(outbuf, tmpbuf, inbuf, TWOFISH_BLOCKSIZE); - outbuf += TWOFISH_BLOCKSIZE; - inbuf += TWOFISH_BLOCKSIZE; - /* Increment the counter. */ -@@ -1327,7 +1330,7 @@ _gcry_twofish_cbc_dec(void *context, uns - if (burn > burn_stack_depth) - burn_stack_depth = burn; - -- buf_xor_n_copy_2(outbuf, savebuf, iv, inbuf, TWOFISH_BLOCKSIZE); -+ cipher_block_xor_n_copy_2(outbuf, savebuf, iv, inbuf, TWOFISH_BLOCKSIZE); - inbuf += TWOFISH_BLOCKSIZE; - outbuf += TWOFISH_BLOCKSIZE; - } -@@ -1399,7 +1402,7 @@ _gcry_twofish_cfb_dec(void *context, uns - if (burn > burn_stack_depth) - burn_stack_depth = burn; - -- buf_xor_n_copy(outbuf, iv, inbuf, TWOFISH_BLOCKSIZE); -+ cipher_block_xor_n_copy(outbuf, iv, inbuf, TWOFISH_BLOCKSIZE); - outbuf += TWOFISH_BLOCKSIZE; - inbuf += TWOFISH_BLOCKSIZE; - } -@@ -1710,7 +1713,7 @@ selftest (void) - 0x05, 0x93, 0x1C, 0xB6, 0xD4, 0x08, 0xE7, 0xFA - }; - -- twofish_setkey (&ctx, key, sizeof(key)); -+ twofish_setkey (&ctx, key, sizeof(key), NULL); - twofish_encrypt (&ctx, scratch, plaintext); - if (memcmp (scratch, ciphertext, sizeof (ciphertext))) - return "Twofish-128 test encryption failed."; -@@ -1718,7 +1721,7 @@ selftest (void) - if (memcmp (scratch, plaintext, sizeof (plaintext))) - return "Twofish-128 test decryption failed."; - -- twofish_setkey (&ctx, key_256, sizeof(key_256)); -+ twofish_setkey (&ctx, key_256, sizeof(key_256), NULL); - twofish_encrypt (&ctx, scratch, plaintext_256); - if (memcmp (scratch, ciphertext_256, sizeof (ciphertext_256))) - return "Twofish-256 test encryption failed."; -@@ -1800,13 +1803,13 @@ main() - /* Encryption test. */ - for (i = 0; i < 125; i++) - { -- twofish_setkey (&ctx, buffer[0], sizeof (buffer[0])); -+ twofish_setkey (&ctx, buffer[0], sizeof (buffer[0]), NULL); - for (j = 0; j < 1000; j++) - twofish_encrypt (&ctx, buffer[2], buffer[2]); -- twofish_setkey (&ctx, buffer[1], sizeof (buffer[1])); -+ twofish_setkey (&ctx, buffer[1], sizeof (buffer[1]), NULL); - for (j = 0; j < 1000; j++) - twofish_encrypt (&ctx, buffer[3], buffer[3]); -- twofish_setkey (&ctx, buffer[2], sizeof (buffer[2])*2); -+ twofish_setkey (&ctx, buffer[2], sizeof (buffer[2])*2, NULL); - for (j = 0; j < 1000; j++) { - twofish_encrypt (&ctx, buffer[0], buffer[0]); - twofish_encrypt (&ctx, buffer[1], buffer[1]); -@@ -1818,15 +1821,15 @@ main() - /* Decryption test. */ - for (i = 0; i < 125; i++) - { -- twofish_setkey (&ctx, buffer[2], sizeof (buffer[2])*2); -+ twofish_setkey (&ctx, buffer[2], sizeof (buffer[2])*2, NULL); - for (j = 0; j < 1000; j++) { - twofish_decrypt (&ctx, buffer[0], buffer[0]); - twofish_decrypt (&ctx, buffer[1], buffer[1]); - } -- twofish_setkey (&ctx, buffer[1], sizeof (buffer[1])); -+ twofish_setkey (&ctx, buffer[1], sizeof (buffer[1]), NULL); - for (j = 0; j < 1000; j++) - twofish_decrypt (&ctx, buffer[3], buffer[3]); -- twofish_setkey (&ctx, buffer[0], sizeof (buffer[0])); -+ twofish_setkey (&ctx, buffer[0], sizeof (buffer[0]), NULL); - for (j = 0; j < 1000; j++) - twofish_decrypt (&ctx, buffer[2], buffer[2]); - } -diff -up libgcrypt-1.8.5/configure.ac.aes-perf libgcrypt-1.8.5/configure.ac ---- libgcrypt-1.8.5/configure.ac.aes-perf 2020-04-22 18:29:41.655862516 +0200 -+++ libgcrypt-1.8.5/configure.ac 2020-04-22 18:29:41.681862019 +0200 -@@ -649,6 +649,14 @@ AC_ARG_ENABLE(arm-crypto-support, - armcryptosupport=$enableval,armcryptosupport=yes) - AC_MSG_RESULT($armcryptosupport) - -+# Implementation of the --disable-ppc-crypto-support switch. -+AC_MSG_CHECKING([whether PPC crypto support is requested]) -+AC_ARG_ENABLE(ppc-crypto-support, -+ AC_HELP_STRING([--disable-ppc-crypto-support], -+ [Disable support for the PPC crypto instructions introduced in POWER 8 (PowerISA 2.07)]), -+ ppccryptosupport=$enableval,ppccryptosupport=yes) -+AC_MSG_RESULT($ppccryptosupport) -+ - # Implementation of the --disable-O-flag-munging switch. - AC_MSG_CHECKING([whether a -O flag munging is requested]) - AC_ARG_ENABLE([O-flag-munging], -@@ -1196,6 +1204,9 @@ if test "$mpi_cpu_arch" != "arm" ; then - fi - fi - -+if test "$mpi_cpu_arch" != "ppc"; then -+ ppccryptosupport="n/a" -+fi - - ############################################# - #### #### -@@ -1722,6 +1733,113 @@ if test "$gcry_cv_gcc_inline_asm_aarch64 - fi - - -+# -+# Check whether PowerPC AltiVec/VSX intrinsics -+# -+AC_CACHE_CHECK([whether compiler supports PowerPC AltiVec/VSX intrinsics], -+ [gcry_cv_cc_ppc_altivec], -+ [if test "$mpi_cpu_arch" != "ppc" ; then -+ gcry_cv_cc_ppc_altivec="n/a" -+ else -+ gcry_cv_cc_ppc_altivec=no -+ AC_COMPILE_IFELSE([AC_LANG_SOURCE( -+ [[#include -+ typedef vector unsigned char block; -+ block fn(block in) -+ { -+ block t = vec_perm (in, in, vec_vsx_ld (0, (unsigned char*)0)); -+ return vec_cipher_be (t, in); -+ } -+ ]])], -+ [gcry_cv_cc_ppc_altivec=yes]) -+ fi]) -+if test "$gcry_cv_cc_ppc_altivec" = "yes" ; then -+ AC_DEFINE(HAVE_COMPATIBLE_CC_PPC_ALTIVEC,1, -+ [Defined if underlying compiler supports PowerPC AltiVec/VSX/crypto intrinsics]) -+fi -+ -+_gcc_cflags_save=$CFLAGS -+CFLAGS="$CFLAGS -maltivec -mvsx -mcrypto" -+ -+if test "$gcry_cv_cc_ppc_altivec" = "no" && -+ test "$mpi_cpu_arch" = "ppc" ; then -+ AC_CACHE_CHECK([whether compiler supports PowerPC AltiVec/VSX/crypto intrinsics with extra GCC flags], -+ [gcry_cv_cc_ppc_altivec_cflags], -+ [gcry_cv_cc_ppc_altivec_cflags=no -+ AC_COMPILE_IFELSE([AC_LANG_SOURCE( -+ [[#include -+ typedef vector unsigned char block; -+ block fn(block in) -+ { -+ block t = vec_perm (in, in, vec_vsx_ld (0, (unsigned char*)0)); -+ return vec_cipher_be (t, in); -+ }]])], -+ [gcry_cv_cc_ppc_altivec_cflags=yes])]) -+ if test "$gcry_cv_cc_ppc_altivec_cflags" = "yes" ; then -+ AC_DEFINE(HAVE_COMPATIBLE_CC_PPC_ALTIVEC,1, -+ [Defined if underlying compiler supports PowerPC AltiVec/VSX/crypto intrinsics]) -+ AC_DEFINE(HAVE_COMPATIBLE_CC_PPC_ALTIVEC_WITH_CFLAGS,1, -+ [Defined if underlying compiler supports PowerPC AltiVec/VSX/crypto intrinsics with extra GCC flags]) -+ fi -+fi -+ -+AM_CONDITIONAL(ENABLE_PPC_VCRYPTO_EXTRA_CFLAGS, -+ test "$gcry_cv_cc_ppc_altivec_cflags" = "yes") -+ -+# Restore flags. -+CFLAGS=$_gcc_cflags_save; -+ -+ -+# -+# Check whether GCC inline assembler supports PowerPC AltiVec/VSX/crypto instructions -+# -+AC_CACHE_CHECK([whether GCC inline assembler supports PowerPC AltiVec/VSX/crypto instructions], -+ [gcry_cv_gcc_inline_asm_ppc_altivec], -+ [if test "$mpi_cpu_arch" != "ppc" ; then -+ gcry_cv_gcc_inline_asm_ppc_altivec="n/a" -+ else -+ gcry_cv_gcc_inline_asm_ppc_altivec=no -+ AC_COMPILE_IFELSE([AC_LANG_SOURCE( -+ [[__asm__(".globl testfn;\n" -+ "testfn:\n" -+ "stvx %v31,%r12,%r0;\n" -+ "lvx %v20,%r12,%r0;\n" -+ "vcipher %v0, %v1, %v22;\n" -+ "lxvw4x %vs32, %r0, %r1;\n" -+ "vadduwm %v0, %v1, %v22;\n" -+ ); -+ ]])], -+ [gcry_cv_gcc_inline_asm_ppc_altivec=yes]) -+ fi]) -+if test "$gcry_cv_gcc_inline_asm_ppc_altivec" = "yes" ; then -+ AC_DEFINE(HAVE_GCC_INLINE_ASM_PPC_ALTIVEC,1, -+ [Defined if inline assembler supports PowerPC AltiVec/VSX/crypto instructions]) -+fi -+ -+ -+# -+# Check whether GCC inline assembler supports PowerISA 3.00 instructions -+# -+AC_CACHE_CHECK([whether GCC inline assembler supports PowerISA 3.00 instructions], -+ [gcry_cv_gcc_inline_asm_ppc_arch_3_00], -+ [if test "$mpi_cpu_arch" != "ppc" ; then -+ gcry_cv_gcc_inline_asm_ppc_arch_3_00="n/a" -+ else -+ gcry_cv_gcc_inline_asm_ppc_arch_3_00=no -+ AC_COMPILE_IFELSE([AC_LANG_SOURCE( -+ [[__asm__(".globl testfn;\n" -+ "testfn:\n" -+ "stxvb16x %r1,%v12,%v30;\n" -+ ); -+ ]])], -+ [gcry_cv_gcc_inline_asm_ppc_arch_3_00=yes]) -+ fi]) -+if test "$gcry_cv_gcc_inline_asm_ppc_arch_3_00" = "yes" ; then -+ AC_DEFINE(HAVE_GCC_INLINE_ASM_PPC_ARCH_3_00,1, -+ [Defined if inline assembler supports PowerISA 3.00 instructions]) -+fi -+ -+ - ####################################### - #### Checks for library functions. #### - ####################################### -@@ -1999,6 +2117,10 @@ if test x"$armcryptosupport" = xyes ; th - AC_DEFINE(ENABLE_ARM_CRYPTO_SUPPORT,1, - [Enable support for ARMv8 Crypto Extension instructions.]) - fi -+if test x"$ppccryptosupport" = xyes ; then -+ AC_DEFINE(ENABLE_PPC_CRYPTO_SUPPORT,1, -+ [Enable support for POWER 8 (PowerISA 2.07) crypto extension.]) -+fi - if test x"$jentsupport" = xyes ; then - AC_DEFINE(ENABLE_JENT_SUPPORT, 1, - [Enable support for the jitter entropy collector.]) -@@ -2106,6 +2228,21 @@ if test "$found" = "1" ; then - GCRYPT_CIPHERS="$GCRYPT_CIPHERS rijndael-armv8-ce.lo" - GCRYPT_CIPHERS="$GCRYPT_CIPHERS rijndael-armv8-aarch64-ce.lo" - ;; -+ powerpc64le-*-*) -+ # Build with the crypto extension implementation -+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS rijndael-ppc.lo" -+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS rijndael-ppc9le.lo" -+ ;; -+ powerpc64-*-*) -+ # Big-Endian. -+ # Build with the crypto extension implementation -+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS rijndael-ppc.lo" -+ ;; -+ powerpc-*-*) -+ # Big-Endian. -+ # Build with the crypto extension implementation -+ GCRYPT_CIPHERS="$GCRYPT_CIPHERS rijndael-ppc.lo" -+ ;; - esac - - case "$mpi_cpu_arch" in -@@ -2555,6 +2692,7 @@ case "$mpi_cpu_arch" in - ;; - ppc) - AC_DEFINE(HAVE_CPU_ARCH_PPC, 1, [Defined for PPC platforms]) -+ GCRYPT_HWF_MODULES="hwf-ppc.lo" - ;; - arm) - AC_DEFINE(HAVE_CPU_ARCH_ARM, 1, [Defined for ARM platforms]) -@@ -2653,6 +2791,7 @@ GCRY_MSG_SHOW([Try using Intel AVX: - GCRY_MSG_SHOW([Try using Intel AVX2: ],[$avx2support]) - GCRY_MSG_SHOW([Try using ARM NEON: ],[$neonsupport]) - GCRY_MSG_SHOW([Try using ARMv8 crypto: ],[$armcryptosupport]) -+GCRY_MSG_SHOW([Try using PPC crypto: ],[$ppccryptosupport]) - GCRY_MSG_SHOW([],[]) - - if test "x${gpg_config_script_warn}" != x; then -diff -up libgcrypt-1.8.5/src/cipher.h.aes-perf libgcrypt-1.8.5/src/cipher.h ---- libgcrypt-1.8.5/src/cipher.h.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/src/cipher.h 2020-04-22 18:29:41.681862019 +0200 -@@ -158,6 +158,9 @@ size_t _gcry_aes_ocb_crypt (gcry_cipher_ - const void *inbuf_arg, size_t nblocks, int encrypt); - size_t _gcry_aes_ocb_auth (gcry_cipher_hd_t c, const void *abuf_arg, - size_t nblocks); -+void _gcry_aes_xts_crypt (void *context, unsigned char *tweak, -+ void *outbuf_arg, const void *inbuf_arg, -+ size_t nblocks, int encrypt); - - /*-- blowfish.c --*/ - void _gcry_blowfish_cfb_dec (void *context, unsigned char *iv, -diff -up libgcrypt-1.8.5/src/cipher-proto.h.aes-perf libgcrypt-1.8.5/src/cipher-proto.h ---- libgcrypt-1.8.5/src/cipher-proto.h.aes-perf 2020-04-22 18:29:41.643862745 +0200 -+++ libgcrypt-1.8.5/src/cipher-proto.h 2020-04-22 18:29:41.681862019 +0200 -@@ -132,7 +132,8 @@ typedef struct gcry_pk_spec - /* Type for the cipher_setkey function. */ - typedef gcry_err_code_t (*gcry_cipher_setkey_t) (void *c, - const unsigned char *key, -- unsigned keylen); -+ unsigned keylen, -+ gcry_cipher_hd_t hd); - - /* Type for the cipher_encrypt function. */ - typedef unsigned int (*gcry_cipher_encrypt_t) (void *c, -diff -up libgcrypt-1.8.5/src/g10lib.h.aes-perf libgcrypt-1.8.5/src/g10lib.h ---- libgcrypt-1.8.5/src/g10lib.h.aes-perf 2020-04-22 18:29:41.660862420 +0200 -+++ libgcrypt-1.8.5/src/g10lib.h 2020-04-22 18:50:46.990661309 +0200 -@@ -233,7 +233,9 @@ char **_gcry_strtokenize (const char *st - - #define HWF_INTEL_RDTSC (1 << 20) - -- -+#define HWF_PPC_VCRYPTO (1 << 22) -+#define HWF_PPC_ARCH_3_00 (1 << 23) -+#define HWF_PPC_ARCH_2_07 (1 << 24) - - gpg_err_code_t _gcry_disable_hw_feature (const char *name); - void _gcry_detect_hw_features (void); -diff -up libgcrypt-1.8.5/src/hwf-common.h.aes-perf libgcrypt-1.8.5/src/hwf-common.h ---- libgcrypt-1.8.5/src/hwf-common.h.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/src/hwf-common.h 2020-04-22 18:29:41.682862000 +0200 -@@ -22,6 +22,6 @@ - - unsigned int _gcry_hwf_detect_x86 (void); - unsigned int _gcry_hwf_detect_arm (void); -- -+unsigned int _gcry_hwf_detect_ppc (void); - - #endif /*HWF_COMMON_H*/ -diff -up libgcrypt-1.8.5/src/hwfeatures.c.aes-perf libgcrypt-1.8.5/src/hwfeatures.c ---- libgcrypt-1.8.5/src/hwfeatures.c.aes-perf 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/src/hwfeatures.c 2020-04-22 18:51:48.326487879 +0200 -@@ -42,6 +42,7 @@ static struct - const char *desc; - } hwflist[] = - { -+#if defined(HAVE_CPU_ARCH_X86) - { HWF_PADLOCK_RNG, "padlock-rng" }, - { HWF_PADLOCK_AES, "padlock-aes" }, - { HWF_PADLOCK_SHA, "padlock-sha" }, -@@ -58,11 +59,17 @@ static struct - { HWF_INTEL_AVX2, "intel-avx2" }, - { HWF_INTEL_FAST_VPGATHER, "intel-fast-vpgather" }, - { HWF_INTEL_RDTSC, "intel-rdtsc" }, -+#elif defined(HAVE_CPU_ARCH_ARM) - { HWF_ARM_NEON, "arm-neon" }, - { HWF_ARM_AES, "arm-aes" }, - { HWF_ARM_SHA1, "arm-sha1" }, - { HWF_ARM_SHA2, "arm-sha2" }, -- { HWF_ARM_PMULL, "arm-pmull" } -+ { HWF_ARM_PMULL, "arm-pmull" }, -+#elif defined(HAVE_CPU_ARCH_PPC) -+ { HWF_PPC_VCRYPTO, "ppc-vcrypto" }, -+ { HWF_PPC_ARCH_3_00, "ppc-arch_3_00" }, -+ { HWF_PPC_ARCH_2_07, "ppc-arch_2_07" }, -+#endif - }; - - /* A bit vector with the hardware features which shall not be used. -@@ -207,12 +214,14 @@ _gcry_detect_hw_features (void) - { - hw_features = _gcry_hwf_detect_x86 (); - } --#endif /* HAVE_CPU_ARCH_X86 */ --#if defined (HAVE_CPU_ARCH_ARM) -+#elif defined (HAVE_CPU_ARCH_ARM) - { - hw_features = _gcry_hwf_detect_arm (); - } --#endif /* HAVE_CPU_ARCH_ARM */ -- -+#elif defined (HAVE_CPU_ARCH_PPC) -+ { -+ hw_features = _gcry_hwf_detect_ppc (); -+ } -+#endif - hw_features &= ~disabled_hw_features; - } -diff -up libgcrypt-1.8.5/src/hwf-ppc.c.aes-perf libgcrypt-1.8.5/src/hwf-ppc.c ---- libgcrypt-1.8.5/src/hwf-ppc.c.aes-perf 2020-04-22 18:29:41.682862000 +0200 -+++ libgcrypt-1.8.5/src/hwf-ppc.c 2020-04-22 18:50:21.396150974 +0200 -@@ -0,0 +1,243 @@ -+/* hwf-ppc.c - Detect hardware features - PPC part -+ * Copyright (C) 2013,2019 Jussi Kivilinna -+ * Copyright (C) 2019 Shawn Landden -+ * -+ * This file is part of Libgcrypt. -+ * -+ * Libgcrypt is free software; you can redistribute it and/or modify -+ * it under the terms of the GNU Lesser General Public License as -+ * published by the Free Software Foundation; either version 2.1 of -+ * the License, or (at your option) any later version. -+ * -+ * Libgcrypt is distributed in the hope that it will be useful, -+ * but WITHOUT ANY WARRANTY; without even the implied warranty of -+ * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the -+ * GNU Lesser General Public License for more details. -+ * -+ * You should have received a copy of the GNU Lesser General Public -+ * License along with this program; if not, see . -+ */ -+ -+#include -+#include -+#include -+#include -+#include -+#include -+#include -+#if defined(HAVE_SYS_AUXV_H) && (defined(HAVE_GETAUXVAL) || \ -+ defined(HAVE_ELF_AUX_INFO)) -+#include -+#endif -+ -+#include "g10lib.h" -+#include "hwf-common.h" -+ -+#if !defined (__powerpc__) && !defined (__powerpc64__) -+# error Module build for wrong CPU. -+#endif -+ -+ -+#if defined(HAVE_SYS_AUXV_H) && defined(HAVE_ELF_AUX_INFO) && \ -+ !defined(HAVE_GETAUXVAL) && defined(AT_HWCAP) -+#define HAVE_GETAUXVAL -+static unsigned long getauxval(unsigned long type) -+{ -+ unsigned long auxval = 0; -+ int err; -+ -+ /* FreeBSD provides 'elf_aux_info' function that does the same as -+ * 'getauxval' on Linux. */ -+ -+ err = elf_aux_info (type, &auxval, sizeof(auxval)); -+ if (err) -+ { -+ errno = err; -+ auxval = 0; -+ } -+ -+ return auxval; -+} -+#endif -+ -+ -+#undef HAS_SYS_AT_HWCAP -+#if defined(__linux__) || \ -+ (defined(HAVE_SYS_AUXV_H) && defined(HAVE_GETAUXVAL)) -+#define HAS_SYS_AT_HWCAP 1 -+ -+struct feature_map_s -+ { -+ unsigned int hwcap_flag; -+ unsigned int hwcap2_flag; -+ unsigned int hwf_flag; -+ }; -+ -+#if defined(__powerpc__) || defined(__powerpc64__) -+ -+/* Note: These macros have same values on Linux and FreeBSD. */ -+#ifndef AT_HWCAP -+# define AT_HWCAP 16 -+#endif -+#ifndef AT_HWCAP2 -+# define AT_HWCAP2 26 -+#endif -+ -+#ifndef PPC_FEATURE2_ARCH_2_07 -+# define PPC_FEATURE2_ARCH_2_07 0x80000000 -+#endif -+#ifndef PPC_FEATURE2_VEC_CRYPTO -+# define PPC_FEATURE2_VEC_CRYPTO 0x02000000 -+#endif -+#ifndef PPC_FEATURE2_ARCH_3_00 -+# define PPC_FEATURE2_ARCH_3_00 0x00800000 -+#endif -+ -+static const struct feature_map_s ppc_features[] = -+ { -+ { 0, PPC_FEATURE2_ARCH_2_07, HWF_PPC_ARCH_2_07 }, -+#ifdef ENABLE_PPC_CRYPTO_SUPPORT -+ { 0, PPC_FEATURE2_VEC_CRYPTO, HWF_PPC_VCRYPTO }, -+#endif -+ { 0, PPC_FEATURE2_ARCH_3_00, HWF_PPC_ARCH_3_00 }, -+ }; -+#endif -+ -+static int -+get_hwcap(unsigned int *hwcap, unsigned int *hwcap2) -+{ -+ struct { unsigned long a_type; unsigned long a_val; } auxv; -+ FILE *f; -+ int err = -1; -+ static int hwcap_initialized = 0; -+ static unsigned int stored_hwcap = 0; -+ static unsigned int stored_hwcap2 = 0; -+ -+ if (hwcap_initialized) -+ { -+ *hwcap = stored_hwcap; -+ *hwcap2 = stored_hwcap2; -+ return 0; -+ } -+ -+#if 0 // TODO: configure.ac detection for __builtin_cpu_supports -+ // TODO: move to 'detect_ppc_builtin_cpu_supports' -+#if defined(__GLIBC__) && defined(__GNUC__) && __GNUC__ >= 6 -+ /* __builtin_cpu_supports returns 0 if glibc support doesn't exist, so -+ * we can only trust positive results. */ -+#ifdef ENABLE_PPC_CRYPTO_SUPPORT -+ if (__builtin_cpu_supports("vcrypto")) /* TODO: Configure.ac */ -+ { -+ stored_hwcap2 |= PPC_FEATURE2_VEC_CRYPTO; -+ hwcap_initialized = 1; -+ } -+#endif -+ -+ if (__builtin_cpu_supports("arch_3_00")) /* TODO: Configure.ac */ -+ { -+ stored_hwcap2 |= PPC_FEATURE2_ARCH_3_00; -+ hwcap_initialized = 1; -+ } -+#endif -+#endif -+ -+#if defined(HAVE_SYS_AUXV_H) && defined(HAVE_GETAUXVAL) -+ errno = 0; -+ auxv.a_val = getauxval (AT_HWCAP); -+ if (errno == 0) -+ { -+ stored_hwcap |= auxv.a_val; -+ hwcap_initialized = 1; -+ } -+ -+ if (AT_HWCAP2 >= 0) -+ { -+ errno = 0; -+ auxv.a_val = getauxval (AT_HWCAP2); -+ if (errno == 0) -+ { -+ stored_hwcap2 |= auxv.a_val; -+ hwcap_initialized = 1; -+ } -+ } -+ -+ if (hwcap_initialized && (stored_hwcap || stored_hwcap2)) -+ { -+ *hwcap = stored_hwcap; -+ *hwcap2 = stored_hwcap2; -+ return 0; -+ } -+#endif -+ -+ f = fopen("/proc/self/auxv", "r"); -+ if (!f) -+ { -+ *hwcap = stored_hwcap; -+ *hwcap2 = stored_hwcap2; -+ return -1; -+ } -+ -+ while (fread(&auxv, sizeof(auxv), 1, f) > 0) -+ { -+ if (auxv.a_type == AT_HWCAP) -+ { -+ stored_hwcap |= auxv.a_val; -+ hwcap_initialized = 1; -+ } -+ -+ if (auxv.a_type == AT_HWCAP2) -+ { -+ stored_hwcap2 |= auxv.a_val; -+ hwcap_initialized = 1; -+ } -+ } -+ -+ if (hwcap_initialized) -+ err = 0; -+ -+ fclose(f); -+ -+ *hwcap = stored_hwcap; -+ *hwcap2 = stored_hwcap2; -+ return err; -+} -+ -+static unsigned int -+detect_ppc_at_hwcap(void) -+{ -+ unsigned int hwcap; -+ unsigned int hwcap2; -+ unsigned int features = 0; -+ unsigned int i; -+ -+ if (get_hwcap(&hwcap, &hwcap2) < 0) -+ return features; -+ -+ for (i = 0; i < DIM(ppc_features); i++) -+ { -+ if (hwcap & ppc_features[i].hwcap_flag) -+ features |= ppc_features[i].hwf_flag; -+ -+ if (hwcap2 & ppc_features[i].hwcap2_flag) -+ features |= ppc_features[i].hwf_flag; -+ } -+ -+ return features; -+} -+ -+#endif -+ -+unsigned int -+_gcry_hwf_detect_ppc (void) -+{ -+ unsigned int ret = 0; -+ unsigned int broken_hwfs = 0; -+ -+#if defined (HAS_SYS_AT_HWCAP) -+ ret |= detect_ppc_at_hwcap (); -+#endif -+ -+ ret &= ~broken_hwfs; -+ -+ return ret; -+} -diff -up libgcrypt-1.8.5/src/Makefile.am.aes-perf libgcrypt-1.8.5/src/Makefile.am ---- libgcrypt-1.8.5/src/Makefile.am.aes-perf 2018-11-14 14:16:40.000000000 +0100 -+++ libgcrypt-1.8.5/src/Makefile.am 2020-04-22 18:29:41.683861981 +0200 -@@ -66,7 +66,7 @@ libgcrypt_la_SOURCES = \ - hmac256.c hmac256.h context.c context.h \ - ec-context.h - --EXTRA_libgcrypt_la_SOURCES = hwf-x86.c hwf-arm.c -+EXTRA_libgcrypt_la_SOURCES = hwf-x86.c hwf-arm.c hwf-ppc.c - gcrypt_hwf_modules = @GCRYPT_HWF_MODULES@ - - diff --git a/libgcrypt-1.8.5-fips-module.patch b/libgcrypt-1.8.5-fips-module.patch index 60b9ce2..8ffe465 100644 --- a/libgcrypt-1.8.5-fips-module.patch +++ b/libgcrypt-1.8.5-fips-module.patch @@ -23,7 +23,7 @@ diff -up libgcrypt-1.8.5/src/fips.c.fips-module libgcrypt-1.8.5/src/fips.c - actually used. The file itself may be empty. */ - if ( !access (FIPS_FORCE_FILE, F_OK) ) - { -- gcry_assert (!no_fips_mode_required); +- gcry_assert (!_gcry_no_fips_mode_required); - goto leave; - } - @@ -42,7 +42,7 @@ diff -up libgcrypt-1.8.5/src/fips.c.fips-module libgcrypt-1.8.5/src/fips.c - { - /* System is in fips mode. */ - fclose (fp); -- gcry_assert (!no_fips_mode_required); +- gcry_assert (!_gcry_no_fips_mode_required); - goto leave; - } - fclose (fp); @@ -65,7 +65,7 @@ diff -up libgcrypt-1.8.5/src/fips.c.fips-module libgcrypt-1.8.5/src/fips.c - } - /* Fips not not requested, set flag. */ - no_fips_mode_required = 1; + _gcry_no_fips_mode_required = 1; diff -up libgcrypt-1.8.5/src/g10lib.h.fips-module libgcrypt-1.8.5/src/g10lib.h --- libgcrypt-1.8.5/src/g10lib.h.fips-module 2020-04-20 19:07:45.918919759 +0200 @@ -77,9 +77,9 @@ diff -up libgcrypt-1.8.5/src/g10lib.h.fips-module libgcrypt-1.8.5/src/g10lib.h +/* The name of the file used to force libgcrypt into fips mode. */ +#define FIPS_FORCE_FILE "/etc/gcrypt/fips_enabled" + - void _gcry_initialize_fips_mode (int force); + extern int _gcry_no_fips_mode_required; - int _gcry_fips_mode (void); + void _gcry_initialize_fips_mode (int force); diff -up libgcrypt-1.8.5/src/global.c.fips-module libgcrypt-1.8.5/src/global.c --- libgcrypt-1.8.5/src/global.c.fips-module 2020-04-20 19:07:45.919919741 +0200 +++ libgcrypt-1.8.5/src/global.c 2020-04-20 19:07:45.950919149 +0200 diff --git a/libgcrypt-1.8.5-getrandom.patch b/libgcrypt-1.8.5-getrandom.patch index ff2ef3b..1779dc1 100644 --- a/libgcrypt-1.8.5-getrandom.patch +++ b/libgcrypt-1.8.5-getrandom.patch @@ -154,13 +154,13 @@ diff -up libgcrypt-1.8.5/random/rndlinux.c.getrandom libgcrypt-1.8.5/random/rndl --- libgcrypt-1.8.5/random/rndlinux.c.getrandom 2020-04-20 15:01:50.159848963 +0200 +++ libgcrypt-1.8.5/random/rndlinux.c 2020-04-20 16:14:21.901610921 +0200 @@ -35,6 +35,7 @@ - #include - #if defined(__linux__) && defined(HAVE_SYSCALL) + #if defined(__linux__) || !defined(HAVE_GETENTROPY) + #ifdef HAVE_SYSCALL # include +# include - #endif - - #include "types.h" + # ifdef __NR_getrandom + # define getentropy(buf,buflen) syscall (__NR_getrandom, buf, buflen, 0) + # endif @@ -147,12 +148,12 @@ _gcry_rndlinux_gather_random (void (*add if (!add) { @@ -216,25 +216,17 @@ diff -up libgcrypt-1.8.5/random/rndlinux.c.getrandom libgcrypt-1.8.5/random/rndl { if (fd_random == -1) { -@@ -255,6 +272,7 @@ _gcry_rndlinux_gather_random (void (*add - * syscall and not a new device and thus we are not able to use - * select(2) to have a timeout. */ - #if defined(__linux__) && defined(HAVE_SYSCALL) && defined(__NR_getrandom) -+ if (fd == -2) - { - long ret; - size_t nbytes; @@ -270,9 +288,7 @@ _gcry_rndlinux_gather_random (void (*add _gcry_post_syscall (); } while (ret == -1 && errno == EINTR); - if (ret == -1 && errno == ENOSYS) -- ; /* The syscall is not supported - fallback to pulling from fd. */ +- ; /* getentropy is not supported - fallback to pulling from fd. */ - else + if (1) - { /* The syscall is supported. Some sanity checks. */ + { /* getentropy is supported. Some sanity checks. */ if (ret == -1) - log_fatal ("unexpected error from getrandom: %s\n", + log_fatal ("unexpected error from getentropy: %s\n", diff -up libgcrypt-1.8.5/src/g10lib.h.getrandom libgcrypt-1.8.5/src/g10lib.h --- libgcrypt-1.8.5/src/g10lib.h.getrandom 2020-04-20 15:08:16.528538580 +0200 +++ libgcrypt-1.8.5/src/g10lib.h 2020-04-20 15:08:28.641309399 +0200 diff --git a/libgcrypt-1.8.5-intel-cet.patch b/libgcrypt-1.8.5-intel-cet.patch deleted file mode 100644 index f58084e..0000000 --- a/libgcrypt-1.8.5-intel-cet.patch +++ /dev/null @@ -1,348 +0,0 @@ -diff -up libgcrypt-1.8.5/cipher/camellia-aesni-avx2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/camellia-aesni-avx2-amd64.S ---- libgcrypt-1.8.5/cipher/camellia-aesni-avx2-amd64.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/camellia-aesni-avx2-amd64.S 2020-01-23 15:36:44.148972045 +0100 -@@ -18,8 +18,9 @@ - * License along with this program; if not, see . - */ - --#ifdef __x86_64 - #include -+ -+#ifdef __x86_64 - #if (defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \ - defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS)) && \ - defined(ENABLE_AESNI_SUPPORT) && defined(ENABLE_AVX2_SUPPORT) -diff -up libgcrypt-1.8.5/cipher/camellia-aesni-avx-amd64.S.intel-cet libgcrypt-1.8.5/cipher/camellia-aesni-avx-amd64.S ---- libgcrypt-1.8.5/cipher/camellia-aesni-avx-amd64.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/camellia-aesni-avx-amd64.S 2020-01-23 15:36:44.145972088 +0100 -@@ -18,8 +18,9 @@ - * License along with this program; if not, see . - */ - --#ifdef __x86_64 - #include -+ -+#ifdef __x86_64 - #if (defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \ - defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS)) && \ - defined(ENABLE_AESNI_SUPPORT) && defined(ENABLE_AVX_SUPPORT) -diff -up libgcrypt-1.8.5/cipher/chacha20-avx2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/chacha20-avx2-amd64.S ---- libgcrypt-1.8.5/cipher/chacha20-avx2-amd64.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/chacha20-avx2-amd64.S 2020-01-23 15:36:16.780250066 +0100 -@@ -48,6 +48,9 @@ - .globl _gcry_chacha20_amd64_avx2_blocks - ELF(.type _gcry_chacha20_amd64_avx2_blocks,@function;) - _gcry_chacha20_amd64_avx2_blocks: -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - .Lchacha_blocks_avx2_local: - vzeroupper - pushq %rbx -diff -up libgcrypt-1.8.5/cipher/chacha20-sse2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/chacha20-sse2-amd64.S ---- libgcrypt-1.8.5/cipher/chacha20-sse2-amd64.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/chacha20-sse2-amd64.S 2020-01-23 15:36:16.783250095 +0100 -@@ -41,6 +41,9 @@ - .globl _gcry_chacha20_amd64_sse2_blocks - ELF(.type _gcry_chacha20_amd64_sse2_blocks,@function;) - _gcry_chacha20_amd64_sse2_blocks: -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - .Lchacha_blocks_sse2_local: - pushq %rbx - pushq %rbp -diff -up libgcrypt-1.8.5/cipher/poly1305-avx2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/poly1305-avx2-amd64.S ---- libgcrypt-1.8.5/cipher/poly1305-avx2-amd64.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/poly1305-avx2-amd64.S 2020-01-23 15:36:16.784250105 +0100 -@@ -43,6 +43,9 @@ - .globl _gcry_poly1305_amd64_avx2_init_ext - ELF(.type _gcry_poly1305_amd64_avx2_init_ext,@function;) - _gcry_poly1305_amd64_avx2_init_ext: -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - .Lpoly1305_init_ext_avx2_local: - xor %edx, %edx - vzeroupper -@@ -406,6 +409,9 @@ ELF(.size _gcry_poly1305_amd64_avx2_init - .globl _gcry_poly1305_amd64_avx2_blocks - ELF(.type _gcry_poly1305_amd64_avx2_blocks,@function;) - _gcry_poly1305_amd64_avx2_blocks: -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - .Lpoly1305_blocks_avx2_local: - vzeroupper - pushq %rbp -@@ -732,6 +738,9 @@ ELF(.size _gcry_poly1305_amd64_avx2_bloc - .globl _gcry_poly1305_amd64_avx2_finish_ext - ELF(.type _gcry_poly1305_amd64_avx2_finish_ext,@function;) - _gcry_poly1305_amd64_avx2_finish_ext: -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - .Lpoly1305_finish_ext_avx2_local: - vzeroupper - pushq %rbp -diff -up libgcrypt-1.8.5/cipher/poly1305-sse2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/poly1305-sse2-amd64.S ---- libgcrypt-1.8.5/cipher/poly1305-sse2-amd64.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/poly1305-sse2-amd64.S 2020-01-23 15:36:16.787250134 +0100 -@@ -42,6 +42,9 @@ - .globl _gcry_poly1305_amd64_sse2_init_ext - ELF(.type _gcry_poly1305_amd64_sse2_init_ext,@function;) - _gcry_poly1305_amd64_sse2_init_ext: -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - .Lpoly1305_init_ext_x86_local: - xor %edx, %edx - pushq %r12 -@@ -288,6 +291,9 @@ ELF(.size _gcry_poly1305_amd64_sse2_init - .globl _gcry_poly1305_amd64_sse2_finish_ext - ELF(.type _gcry_poly1305_amd64_sse2_finish_ext,@function;) - _gcry_poly1305_amd64_sse2_finish_ext: -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - .Lpoly1305_finish_ext_x86_local: - pushq %rbp - movq %rsp, %rbp -@@ -439,6 +445,9 @@ ELF(.size _gcry_poly1305_amd64_sse2_fini - .globl _gcry_poly1305_amd64_sse2_blocks - ELF(.type _gcry_poly1305_amd64_sse2_blocks,@function;) - _gcry_poly1305_amd64_sse2_blocks: -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - .Lpoly1305_blocks_x86_local: - pushq %rbp - movq %rsp, %rbp -diff -up libgcrypt-1.8.5/cipher/serpent-avx2-amd64.S.intel-cet libgcrypt-1.8.5/cipher/serpent-avx2-amd64.S ---- libgcrypt-1.8.5/cipher/serpent-avx2-amd64.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/cipher/serpent-avx2-amd64.S 2020-01-23 15:36:44.151972003 +0100 -@@ -18,8 +18,9 @@ - * License along with this program; if not, see . - */ - --#ifdef __x86_64 - #include -+ -+#ifdef __x86_64 - #if (defined(HAVE_COMPATIBLE_GCC_AMD64_PLATFORM_AS) || \ - defined(HAVE_COMPATIBLE_GCC_WIN64_PLATFORM_AS)) && defined(USE_SERPENT) && \ - defined(ENABLE_AVX2_SUPPORT) -diff -up libgcrypt-1.8.5/configure.ac.intel-cet libgcrypt-1.8.5/configure.ac ---- libgcrypt-1.8.5/configure.ac.intel-cet 2019-08-29 15:00:08.000000000 +0200 -+++ libgcrypt-1.8.5/configure.ac 2020-01-23 15:35:28.147774463 +0100 -@@ -95,6 +95,12 @@ AH_TOP([ - AH_BOTTOM([ - #define _GCRYPT_IN_LIBGCRYPT 1 - -+/* Add .note.gnu.property section for Intel CET in assembler sources -+ when CET is enabled. */ -+#if defined(__ASSEMBLER__) && defined(__CET__) -+# include -+#endif -+ - /* If the configure check for endianness has been disabled, get it from - OS macros. This is intended for making fat binary builds on OS X. */ - #ifdef DISABLED_ENDIAN_CHECK -diff -up libgcrypt-1.8.5/mpi/config.links.intel-cet libgcrypt-1.8.5/mpi/config.links ---- libgcrypt-1.8.5/mpi/config.links.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/mpi/config.links 2020-01-23 15:35:46.398952954 +0100 -@@ -382,6 +382,16 @@ if test x"$mpi_cpu_arch" = x ; then - mpi_cpu_arch="unknown" - fi - -+# Add .note.gnu.property section for Intel CET in assembler sources -+# when CET is enabled. */ -+if test x"$mpi_cpu_arch" = xx86 ; then -+ cat <> ./mpi/asm-syntax.h -+ -+#if defined(__ASSEMBLER__) && defined(__CET__) -+# include -+#endif -+EOF -+fi - - # Make sysdep.h - echo '/* created by config.links - do not edit */' >./mpi/sysdep.h -diff -up libgcrypt-1.8.5/mpi/i386/mpih-add1.S.intel-cet libgcrypt-1.8.5/mpi/i386/mpih-add1.S ---- libgcrypt-1.8.5/mpi/i386/mpih-add1.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/mpi/i386/mpih-add1.S 2020-01-23 15:37:40.470175379 +0100 -@@ -52,6 +52,10 @@ C_SYMBOL_NAME(_gcry_mpih_add_n:) - movl 20(%esp),%edx /* s2_ptr */ - movl 24(%esp),%ecx /* size */ - -+#if defined __CET__ && (__CET__ & 1) != 0 -+ pushl %ebx -+#endif -+ - movl %ecx,%eax - shrl $3,%ecx /* compute count for unrolled loop */ - negl %eax -@@ -63,6 +67,9 @@ C_SYMBOL_NAME(_gcry_mpih_add_n:) - subl %eax,%esi /* ... by a constant when we ... */ - subl %eax,%edx /* ... enter the loop */ - shrl $2,%eax /* restore previous value */ -+#if defined __CET__ && (__CET__ & 1) != 0 -+ leal -4(,%eax,4),%ebx /* Count for 4-byte endbr32 */ -+#endif - #ifdef PIC - /* Calculate start address in loop for PIC. Due to limitations in some - assemblers, Loop-L0-3 cannot be put into the leal */ -@@ -75,29 +82,53 @@ L0: leal (%eax,%eax,8),%eax - /* Calculate start address in loop for non-PIC. */ - leal (Loop - 3)(%eax,%eax,8),%eax - #endif -+#if defined __CET__ && (__CET__ & 1) != 0 -+ addl %ebx,%eax /* Adjust for endbr32 */ -+#endif - jmp *%eax /* jump into loop */ - ALIGN (3) - Loop: movl (%esi),%eax - adcl (%edx),%eax - movl %eax,(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 4(%esi),%eax - adcl 4(%edx),%eax - movl %eax,4(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 8(%esi),%eax - adcl 8(%edx),%eax - movl %eax,8(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 12(%esi),%eax - adcl 12(%edx),%eax - movl %eax,12(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 16(%esi),%eax - adcl 16(%edx),%eax - movl %eax,16(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 20(%esi),%eax - adcl 20(%edx),%eax - movl %eax,20(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 24(%esi),%eax - adcl 24(%edx),%eax - movl %eax,24(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 28(%esi),%eax - adcl 28(%edx),%eax - movl %eax,28(%edi) -@@ -110,6 +141,10 @@ Loop: movl (%esi),%eax - sbbl %eax,%eax - negl %eax - -+#if defined __CET__ && (__CET__ & 1) != 0 -+ popl %ebx -+#endif -+ - popl %esi - popl %edi - ret -diff -up libgcrypt-1.8.5/mpi/i386/mpih-sub1.S.intel-cet libgcrypt-1.8.5/mpi/i386/mpih-sub1.S ---- libgcrypt-1.8.5/mpi/i386/mpih-sub1.S.intel-cet 2017-11-23 19:16:58.000000000 +0100 -+++ libgcrypt-1.8.5/mpi/i386/mpih-sub1.S 2020-01-23 15:37:40.472175351 +0100 -@@ -53,6 +53,10 @@ C_SYMBOL_NAME(_gcry_mpih_sub_n:) - movl 20(%esp),%edx /* s2_ptr */ - movl 24(%esp),%ecx /* size */ - -+#if defined __CET__ && (__CET__ & 1) != 0 -+ pushl %ebx -+#endif -+ - movl %ecx,%eax - shrl $3,%ecx /* compute count for unrolled loop */ - negl %eax -@@ -64,6 +68,9 @@ C_SYMBOL_NAME(_gcry_mpih_sub_n:) - subl %eax,%esi /* ... by a constant when we ... */ - subl %eax,%edx /* ... enter the loop */ - shrl $2,%eax /* restore previous value */ -+#if defined __CET__ && (__CET__ & 1) != 0 -+ leal -4(,%eax,4),%ebx /* Count for 4-byte endbr32 */ -+#endif - #ifdef PIC - /* Calculate start address in loop for PIC. Due to limitations in some - assemblers, Loop-L0-3 cannot be put into the leal */ -@@ -76,29 +83,53 @@ L0: leal (%eax,%eax,8),%eax - /* Calculate start address in loop for non-PIC. */ - leal (Loop - 3)(%eax,%eax,8),%eax - #endif -+#if defined __CET__ && (__CET__ & 1) != 0 -+ addl %ebx,%eax /* Adjust for endbr32 */ -+#endif - jmp *%eax /* jump into loop */ - ALIGN (3) - Loop: movl (%esi),%eax - sbbl (%edx),%eax - movl %eax,(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 4(%esi),%eax - sbbl 4(%edx),%eax - movl %eax,4(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 8(%esi),%eax - sbbl 8(%edx),%eax - movl %eax,8(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 12(%esi),%eax - sbbl 12(%edx),%eax - movl %eax,12(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 16(%esi),%eax - sbbl 16(%edx),%eax - movl %eax,16(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 20(%esi),%eax - sbbl 20(%edx),%eax - movl %eax,20(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 24(%esi),%eax - sbbl 24(%edx),%eax - movl %eax,24(%edi) -+#ifdef _CET_ENDBR -+ _CET_ENDBR -+#endif - movl 28(%esi),%eax - sbbl 28(%edx),%eax - movl %eax,28(%edi) -@@ -111,6 +142,10 @@ Loop: movl (%esi),%eax - sbbl %eax,%eax - negl %eax - -+#if defined __CET__ && (__CET__ & 1) != 0 -+ popl %ebx -+#endif -+ - popl %esi - popl %edi - ret diff --git a/libgcrypt-1.8.5-use-fipscheck.patch b/libgcrypt-1.8.5-use-fipscheck.patch index 298ec4c..46145d8 100644 --- a/libgcrypt-1.8.5-use-fipscheck.patch +++ b/libgcrypt-1.8.5-use-fipscheck.patch @@ -75,15 +75,3 @@ diff -up libgcrypt-1.8.5/src/fips.c.use-fipscheck libgcrypt-1.8.5/src/fips.c p = strrchr (fname, '/'); if (p) p++; -diff -up libgcrypt-1.8.5/src/Makefile.am.use-fipscheck libgcrypt-1.8.5/src/Makefile.am ---- libgcrypt-1.8.5/src/Makefile.am.use-fipscheck 2020-04-23 10:18:36.237764702 +0200 -+++ libgcrypt-1.8.5/src/Makefile.am 2020-04-23 10:19:03.186247455 +0200 -@@ -125,7 +125,7 @@ libgcrypt_la_LIBADD = $(gcrypt_res) \ - ../cipher/libcipher.la \ - ../random/librandom.la \ - ../mpi/libmpi.la \ -- ../compat/libcompat.la $(GPG_ERROR_LIBS) -+ ../compat/libcompat.la $(GPG_ERROR_LIBS) -ldl - - - dumpsexp_SOURCES = dumpsexp.c diff --git a/libgcrypt-1.9.0-kdf-missing-terminator.patch b/libgcrypt-1.9.0-kdf-missing-terminator.patch new file mode 100644 index 0000000..1d60f3f --- /dev/null +++ b/libgcrypt-1.9.0-kdf-missing-terminator.patch @@ -0,0 +1,31 @@ +From: Jussi Kivilinna +Date: Tue, 19 Jan 2021 18:04:30 +0000 (+0200) +Subject: kdf: add missing null-terminator for self-test test-vector array +X-Git-Url: http://git.gnupg.org/cgi-bin/gitweb.cgi?p=libgcrypt.git;a=commitdiff_plain;h=c6425a5537294dfe2beaafc9105f7af4ceac677f + +kdf: add missing null-terminator for self-test test-vector array + +* cipher/kdf.c (selftest_pbkdf2): Add null-terminator to TV array. +-- + +This was causing kdf self-test to fail on s390x builds. + +GnuPG-bug-id: 5254 +Signed-off-by: Jussi Kivilinna +--- + +diff --git a/cipher/kdf.c b/cipher/kdf.c +index 3d707bd0..b916a3f8 100644 +--- a/cipher/kdf.c ++++ b/cipher/kdf.c +@@ -452,7 +452,8 @@ selftest_pbkdf2 (int extended, selftest_report_func_t report) + "\x34\x8c\x89\xdb\xcb\xd3\x2b\x2f\x32\xd8\x14\xb8\x11\x6e\x84\xcf" + "\x2b\x17\x34\x7e\xbc\x18\x00\x18\x1c\x4e\x2a\x1f\xb8\xdd\x53\xe1" + "\xc6\x35\x51\x8c\x7d\xac\x47\xe9" +- } ++ }, ++ { NULL } + }; + const char *what; + const char *errtxt; + diff --git a/libgcrypt.spec b/libgcrypt.spec index 6c82b2e..3cc62db 100644 --- a/libgcrypt.spec +++ b/libgcrypt.spec @@ -1,17 +1,17 @@ Name: libgcrypt -Version: 1.8.7 +Version: 1.9.0 Release: 1%{?dist} -URL: http://www.gnupg.org/ +URL: https://www.gnupg.org/ Source0: libgcrypt-%{version}-hobbled.tar.xz # The original libgcrypt sources now contain potentially patented ECC # cipher support. We have to remove it in the tarball we ship with # the hobble-libgcrypt script. # (We replace it with RH approved ECC in Source4-5) -# tar -xf libgcrypt-1.8.7.tar.bz2 -# pushd libgcrypt-1.8.7 && ../hobble-libgcrypt && popd -# tar -cvJf libgcrypt-1.8.7-hobbled.tar.xz libgcrypt-1.8.7 -#Source0: ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-{version}.tar.bz2 -#Source1: ftp://ftp.gnupg.org/gcrypt/libgcrypt/libgcrypt-{version}.tar.bz2.sig +# tar -xf libgcrypt-x.y.z.tar.bz2 +# pushd libgcrypt-x.y.z && ../hobble-libgcrypt && popd +# tar -cvJf libgcrypt-x.y.z-hobbled.tar.xz libgcrypt-x.y.z +#Source0: https://www.gnupg.org/ftp/gcrypt/libgcrypt/libgcrypt-{version}.tar.bz2 +#Source1: https://www.gnupg.org/ftp/gcrypt/libgcrypt/libgcrypt-{version}.tar.bz2.sig Source2: wk@g10code.com Source3: hobble-libgcrypt # Approved ECC support @@ -40,18 +40,14 @@ Patch18: libgcrypt-1.8.3-fips-ctor.patch Patch22: libgcrypt-1.7.3-fips-reqs.patch # Do not try to open /dev/urandom if getrandom() works Patch24: libgcrypt-1.8.5-getrandom.patch -# CMAC selftest for FIPS POST -Patch25: libgcrypt-1.8.3-cmac-selftest.patch # Continuous FIPS entropy test Patch26: libgcrypt-1.8.3-fips-enttest.patch # Disable non-approved FIPS hashes in the enforced FIPS mode Patch27: libgcrypt-1.8.3-md-fips-enforce.patch -# Intel CET support, in upstream master -Patch28: libgcrypt-1.8.5-intel-cet.patch # FIPS module is redefined a little bit (implicit by kernel FIPS mode) Patch30: libgcrypt-1.8.5-fips-module.patch -# Backported AES performance improvements -Patch31: libgcrypt-1.8.5-aes-perf.patch +# Missing terminator in the kdf vectors causing s390x builds failing +Patch31: libgcrypt-1.9.0-kdf-missing-terminator.patch %global gcrylibdir %{_libdir} %global gcrysoname libgcrypt.so.20 @@ -67,12 +63,13 @@ BuildRequires: gawk, libgpg-error-devel >= 1.11, pkgconfig # This is needed only when patching the .texi doc. BuildRequires: texinfo BuildRequires: autoconf, automake, libtool +BuildRequires: make %package devel Summary: Development files for the %{name} package License: LGPLv2+ and GPLv2+ Requires: libgpg-error-devel -Requires: %{name} = %{version}-%{release} +Requires: %{name}%{?_isa} = %{version}-%{release} Requires: pkgconfig %description @@ -97,12 +94,10 @@ applications using libgcrypt. %patch18 -p1 -b .fips-ctor %patch22 -p1 -b .fips-reqs %patch24 -p1 -b .getrandom -%patch25 -p1 -b .cmac-selftest %patch26 -p1 -b .fips-enttest %patch27 -p1 -b .fips-enforce -%patch28 -p1 -b .intel-cet %patch30 -p1 -b .fips-module -%patch31 -p1 -b .aes-perf +%patch31 -p1 -b .kdf-terminator cp %{SOURCE4} cipher/ cp %{SOURCE5} %{SOURCE6} tests/ @@ -194,7 +189,6 @@ install -m644 %{SOURCE7} $RPM_BUILD_ROOT/etc/gcrypt/random.conf %{gcrylibdir}/libgcrypt.so.*.* %{gcrylibdir}/%{gcrysoname} %{gcrylibdir}/.%{gcrysoname}.hmac -%{!?_licensedir:%global license %%doc} %license COPYING.LIB %doc AUTHORS NEWS THANKS @@ -210,10 +204,12 @@ install -m644 %{SOURCE7} $RPM_BUILD_ROOT/etc/gcrypt/random.conf %{_mandir}/man1/* %{_infodir}/gcrypt.info* -%{!?_licensedir:%global license %%doc} %license COPYING %changelog +* Wed Jan 20 2021 Jakub Jelen - 1.9.0-1 +- New upstream release (#1917878) + * Tue Nov 24 2020 Jakub Jelen - 1.8.7-1 - new upstream release (#1891123) diff --git a/sources b/sources index b46a259..69b2536 100644 --- a/sources +++ b/sources @@ -1 +1 @@ -SHA512 (libgcrypt-1.8.7-hobbled.tar.xz) = e9655f5387f08d18dcfcef3bce737aa7bb0242a5ebcb2be0dd2892fad3761496e3e51b283b61e8537b30b157a3ef5657a5bf4288c7d3aec94982b0c6da749876 +SHA512 (libgcrypt-1.9.0-hobbled.tar.xz) = d4ea9a1b732b05f605f0c99dd2b1e9747539bf2b6a8ff2fad7ab5350888f68b7f0b94bdd9253356ec9c8e6d3b87b5c76bc8dc4fbb3950acd8354b691f1f2ad3e diff --git a/t-mpi-point.c b/t-mpi-point.c index 2a8c36c..9a6190e 100644 --- a/t-mpi-point.c +++ b/t-mpi-point.c @@ -1188,11 +1188,11 @@ main (int argc, char **argv) if (!gcry_check_version (GCRYPT_VERSION)) die ("version mismatch\n"); - xgcry_control (GCRYCTL_DISABLE_SECMEM, 0); - xgcry_control (GCRYCTL_ENABLE_QUICK_RANDOM, 0); + xgcry_control ((GCRYCTL_DISABLE_SECMEM, 0)); + xgcry_control ((GCRYCTL_ENABLE_QUICK_RANDOM, 0)); if (debug) - xgcry_control (GCRYCTL_SET_DEBUG_FLAGS, 1u, 0); - xgcry_control (GCRYCTL_INITIALIZATION_FINISHED, 0); + xgcry_control ((GCRYCTL_SET_DEBUG_FLAGS, 1u, 0)); + xgcry_control ((GCRYCTL_INITIALIZATION_FINISHED, 0)); set_get_point (); context_alloc ();