|
|
7711c0 |
From 8c411fd4449466ba46b272d60b739015a6b47e19 Mon Sep 17 00:00:00 2001
|
|
|
7711c0 |
From: "Daniel P. Berrange" <berrange@redhat.com>
|
|
|
7711c0 |
Date: Wed, 24 Apr 2019 10:30:26 +0200
|
|
|
7711c0 |
Subject: [PATCH 07/12] crypto: convert xts_tweak_encdec to use xts_uint128
|
|
|
7711c0 |
type
|
|
|
7711c0 |
MIME-Version: 1.0
|
|
|
7711c0 |
Content-Type: text/plain; charset=UTF-8
|
|
|
7711c0 |
Content-Transfer-Encoding: 8bit
|
|
|
7711c0 |
|
|
|
7711c0 |
RH-Author: Daniel P. Berrange <berrange@redhat.com>
|
|
|
7711c0 |
Message-id: <20190424103030.2925-6-berrange@redhat.com>
|
|
|
7711c0 |
Patchwork-id: 85890
|
|
|
7711c0 |
O-Subject: [RHEL-7.7 qemu-kvm-rhev PATCH 5/9] crypto: convert xts_tweak_encdec to use xts_uint128 type
|
|
|
7711c0 |
Bugzilla: 1666336
|
|
|
7711c0 |
RH-Acked-by: Philippe Mathieu-Daudé <philmd@redhat.com>
|
|
|
7711c0 |
RH-Acked-by: John Snow <jsnow@redhat.com>
|
|
|
7711c0 |
RH-Acked-by: Eric Blake <eblake@redhat.com>
|
|
|
7711c0 |
|
|
|
7711c0 |
Using 64-bit arithmetic increases the performance for xts-aes-128
|
|
|
7711c0 |
when built with gcrypt:
|
|
|
7711c0 |
|
|
|
7711c0 |
Encrypt: 272 MB/s -> 355 MB/s
|
|
|
7711c0 |
Decrypt: 275 MB/s -> 362 MB/s
|
|
|
7711c0 |
|
|
|
7711c0 |
Reviewed-by: Alberto Garcia <berto@igalia.com>
|
|
|
7711c0 |
Signed-off-by: Daniel P. Berrangé <berrange@redhat.com>
|
|
|
7711c0 |
(cherry picked from commit db217c69f0849add67cfa2cd6601c329398be12c)
|
|
|
7711c0 |
Signed-off-by: Miroslav Rezanina <mrezanin@redhat.com>
|
|
|
7711c0 |
---
|
|
|
7711c0 |
crypto/xts.c | 84 +++++++++++++++++++++++++++++++++++++++++-------------------
|
|
|
7711c0 |
1 file changed, 58 insertions(+), 26 deletions(-)
|
|
|
7711c0 |
|
|
|
7711c0 |
diff --git a/crypto/xts.c b/crypto/xts.c
|
|
|
7711c0 |
index bee23f8..0ad231f 100644
|
|
|
7711c0 |
--- a/crypto/xts.c
|
|
|
7711c0 |
+++ b/crypto/xts.c
|
|
|
7711c0 |
@@ -31,6 +31,13 @@ typedef union {
|
|
|
7711c0 |
uint64_t u[2];
|
|
|
7711c0 |
} xts_uint128;
|
|
|
7711c0 |
|
|
|
7711c0 |
+static inline void xts_uint128_xor(xts_uint128 *D,
|
|
|
7711c0 |
+ const xts_uint128 *S1,
|
|
|
7711c0 |
+ const xts_uint128 *S2)
|
|
|
7711c0 |
+{
|
|
|
7711c0 |
+ D->u[0] = S1->u[0] ^ S2->u[0];
|
|
|
7711c0 |
+ D->u[1] = S1->u[1] ^ S2->u[1];
|
|
|
7711c0 |
+}
|
|
|
7711c0 |
|
|
|
7711c0 |
static void xts_mult_x(uint8_t *I)
|
|
|
7711c0 |
{
|
|
|
7711c0 |
@@ -60,25 +67,19 @@ static void xts_mult_x(uint8_t *I)
|
|
|
7711c0 |
*/
|
|
|
7711c0 |
static void xts_tweak_encdec(const void *ctx,
|
|
|
7711c0 |
xts_cipher_func *func,
|
|
|
7711c0 |
- const uint8_t *src,
|
|
|
7711c0 |
- uint8_t *dst,
|
|
|
7711c0 |
- uint8_t *iv)
|
|
|
7711c0 |
+ const xts_uint128 *src,
|
|
|
7711c0 |
+ xts_uint128 *dst,
|
|
|
7711c0 |
+ xts_uint128 *iv)
|
|
|
7711c0 |
{
|
|
|
7711c0 |
- unsigned long x;
|
|
|
7711c0 |
-
|
|
|
7711c0 |
/* tweak encrypt block i */
|
|
|
7711c0 |
- for (x = 0; x < XTS_BLOCK_SIZE; x++) {
|
|
|
7711c0 |
- dst[x] = src[x] ^ iv[x];
|
|
|
7711c0 |
- }
|
|
|
7711c0 |
+ xts_uint128_xor(dst, src, iv);
|
|
|
7711c0 |
|
|
|
7711c0 |
- func(ctx, XTS_BLOCK_SIZE, dst, dst);
|
|
|
7711c0 |
+ func(ctx, XTS_BLOCK_SIZE, dst->b, dst->b);
|
|
|
7711c0 |
|
|
|
7711c0 |
- for (x = 0; x < XTS_BLOCK_SIZE; x++) {
|
|
|
7711c0 |
- dst[x] = dst[x] ^ iv[x];
|
|
|
7711c0 |
- }
|
|
|
7711c0 |
+ xts_uint128_xor(dst, dst, iv);
|
|
|
7711c0 |
|
|
|
7711c0 |
/* LFSR the tweak */
|
|
|
7711c0 |
- xts_mult_x(iv);
|
|
|
7711c0 |
+ xts_mult_x(iv->b);
|
|
|
7711c0 |
}
|
|
|
7711c0 |
|
|
|
7711c0 |
|
|
|
7711c0 |
@@ -110,20 +111,34 @@ void xts_decrypt(const void *datactx,
|
|
|
7711c0 |
/* encrypt the iv */
|
|
|
7711c0 |
encfunc(tweakctx, XTS_BLOCK_SIZE, T.b, iv);
|
|
|
7711c0 |
|
|
|
7711c0 |
- for (i = 0; i < lim; i++) {
|
|
|
7711c0 |
- xts_tweak_encdec(datactx, decfunc, src, dst, T.b);
|
|
|
7711c0 |
-
|
|
|
7711c0 |
- src += XTS_BLOCK_SIZE;
|
|
|
7711c0 |
- dst += XTS_BLOCK_SIZE;
|
|
|
7711c0 |
+ if (QEMU_PTR_IS_ALIGNED(src, sizeof(uint64_t)) &&
|
|
|
7711c0 |
+ QEMU_PTR_IS_ALIGNED(dst, sizeof(uint64_t))) {
|
|
|
7711c0 |
+ xts_uint128 *S = (xts_uint128 *)src;
|
|
|
7711c0 |
+ xts_uint128 *D = (xts_uint128 *)dst;
|
|
|
7711c0 |
+ for (i = 0; i < lim; i++, S++, D++) {
|
|
|
7711c0 |
+ xts_tweak_encdec(datactx, decfunc, S, D, &T);
|
|
|
7711c0 |
+ }
|
|
|
7711c0 |
+ } else {
|
|
|
7711c0 |
+ xts_uint128 D;
|
|
|
7711c0 |
+
|
|
|
7711c0 |
+ for (i = 0; i < lim; i++) {
|
|
|
7711c0 |
+ memcpy(&D, src, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
+ xts_tweak_encdec(datactx, decfunc, &D, &D, &T);
|
|
|
7711c0 |
+ memcpy(dst, &D, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
+ src += XTS_BLOCK_SIZE;
|
|
|
7711c0 |
+ dst += XTS_BLOCK_SIZE;
|
|
|
7711c0 |
+ }
|
|
|
7711c0 |
}
|
|
|
7711c0 |
|
|
|
7711c0 |
/* if length is not a multiple of XTS_BLOCK_SIZE then */
|
|
|
7711c0 |
if (mo > 0) {
|
|
|
7711c0 |
+ xts_uint128 S, D;
|
|
|
7711c0 |
memcpy(&CC, &T, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
xts_mult_x(CC.b);
|
|
|
7711c0 |
|
|
|
7711c0 |
/* PP = tweak decrypt block m-1 */
|
|
|
7711c0 |
- xts_tweak_encdec(datactx, decfunc, src, PP.b, CC.b);
|
|
|
7711c0 |
+ memcpy(&S, src, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
+ xts_tweak_encdec(datactx, decfunc, &S, &PP, &CC;;
|
|
|
7711c0 |
|
|
|
7711c0 |
/* Pm = first length % XTS_BLOCK_SIZE bytes of PP */
|
|
|
7711c0 |
for (i = 0; i < mo; i++) {
|
|
|
7711c0 |
@@ -135,7 +150,8 @@ void xts_decrypt(const void *datactx,
|
|
|
7711c0 |
}
|
|
|
7711c0 |
|
|
|
7711c0 |
/* Pm-1 = Tweak uncrypt CC */
|
|
|
7711c0 |
- xts_tweak_encdec(datactx, decfunc, CC.b, dst, T.b);
|
|
|
7711c0 |
+ xts_tweak_encdec(datactx, decfunc, &CC, &D, &T);
|
|
|
7711c0 |
+ memcpy(dst, &D, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
}
|
|
|
7711c0 |
|
|
|
7711c0 |
/* Decrypt the iv back */
|
|
|
7711c0 |
@@ -171,17 +187,32 @@ void xts_encrypt(const void *datactx,
|
|
|
7711c0 |
/* encrypt the iv */
|
|
|
7711c0 |
encfunc(tweakctx, XTS_BLOCK_SIZE, T.b, iv);
|
|
|
7711c0 |
|
|
|
7711c0 |
- for (i = 0; i < lim; i++) {
|
|
|
7711c0 |
- xts_tweak_encdec(datactx, encfunc, src, dst, T.b);
|
|
|
7711c0 |
+ if (QEMU_PTR_IS_ALIGNED(src, sizeof(uint64_t)) &&
|
|
|
7711c0 |
+ QEMU_PTR_IS_ALIGNED(dst, sizeof(uint64_t))) {
|
|
|
7711c0 |
+ xts_uint128 *S = (xts_uint128 *)src;
|
|
|
7711c0 |
+ xts_uint128 *D = (xts_uint128 *)dst;
|
|
|
7711c0 |
+ for (i = 0; i < lim; i++, S++, D++) {
|
|
|
7711c0 |
+ xts_tweak_encdec(datactx, encfunc, S, D, &T);
|
|
|
7711c0 |
+ }
|
|
|
7711c0 |
+ } else {
|
|
|
7711c0 |
+ xts_uint128 D;
|
|
|
7711c0 |
+
|
|
|
7711c0 |
+ for (i = 0; i < lim; i++) {
|
|
|
7711c0 |
+ memcpy(&D, src, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
+ xts_tweak_encdec(datactx, encfunc, &D, &D, &T);
|
|
|
7711c0 |
+ memcpy(dst, &D, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
|
|
|
7711c0 |
- dst += XTS_BLOCK_SIZE;
|
|
|
7711c0 |
- src += XTS_BLOCK_SIZE;
|
|
|
7711c0 |
+ dst += XTS_BLOCK_SIZE;
|
|
|
7711c0 |
+ src += XTS_BLOCK_SIZE;
|
|
|
7711c0 |
+ }
|
|
|
7711c0 |
}
|
|
|
7711c0 |
|
|
|
7711c0 |
/* if length is not a multiple of XTS_BLOCK_SIZE then */
|
|
|
7711c0 |
if (mo > 0) {
|
|
|
7711c0 |
+ xts_uint128 S, D;
|
|
|
7711c0 |
/* CC = tweak encrypt block m-1 */
|
|
|
7711c0 |
- xts_tweak_encdec(datactx, encfunc, src, CC.b, T.b);
|
|
|
7711c0 |
+ memcpy(&S, src, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
+ xts_tweak_encdec(datactx, encfunc, &S, &CC, &T);
|
|
|
7711c0 |
|
|
|
7711c0 |
/* Cm = first length % XTS_BLOCK_SIZE bytes of CC */
|
|
|
7711c0 |
for (i = 0; i < mo; i++) {
|
|
|
7711c0 |
@@ -194,7 +225,8 @@ void xts_encrypt(const void *datactx,
|
|
|
7711c0 |
}
|
|
|
7711c0 |
|
|
|
7711c0 |
/* Cm-1 = Tweak encrypt PP */
|
|
|
7711c0 |
- xts_tweak_encdec(datactx, encfunc, PP.b, dst, T.b);
|
|
|
7711c0 |
+ xts_tweak_encdec(datactx, encfunc, &PP, &D, &T);
|
|
|
7711c0 |
+ memcpy(dst, &D, XTS_BLOCK_SIZE);
|
|
|
7711c0 |
}
|
|
|
7711c0 |
|
|
|
7711c0 |
/* Decrypt the iv back */
|
|
|
7711c0 |
--
|
|
|
7711c0 |
1.8.3.1
|
|
|
7711c0 |
|