From stefbon at gmail.com Mon Aug 2 19:59:23 2021 From: stefbon at gmail.com (Stef Bon) Date: Mon, 2 Aug 2021 19:59:23 +0200 Subject: SNTRUP761. Message-ID: Hi, does anyone have some experience with SNTRUP761 sntrup761 (X25519)? It's a relative new encryptor, which should protect against the (hypothetical) qubit machines. Does libgcrypt support it? Stef From jussi.kivilinna at iki.fi Tue Aug 10 20:50:47 2021 From: jussi.kivilinna at iki.fi (Jussi Kivilinna) Date: Tue, 10 Aug 2021 21:50:47 +0300 Subject: [PATCH] tests/bench-slope: allow non-FIPS ECC benchmarking in non-FIPS mode Message-ID: <20210810185047.2048710-1-jussi.kivilinna@iki.fi> * tests/bench-slope.c (_ecc_bench): Check for 'in_fips_mode'. -- Signed-off-by: Jussi Kivilinna --- tests/bench-slope.c | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/tests/bench-slope.c b/tests/bench-slope.c index 5bdb391e..d1b7f24f 100644 --- a/tests/bench-slope.c +++ b/tests/bench-slope.c @@ -2200,7 +2200,7 @@ ecc_algo_fips_allowed (int algo) case ECC_ALGO_NIST_P256: case ECC_ALGO_NIST_P384: case ECC_ALGO_NIST_P521: - return 1; + return 1; case ECC_ALGO_SECP256K1: case ECC_ALGO_ED25519: case ECC_ALGO_ED448: @@ -2704,7 +2704,7 @@ _ecc_bench (int algo) int i; /* Skip not allowed mechanisms */ - if (!ecc_algo_fips_allowed(algo)) + if (in_fips_mode && !ecc_algo_fips_allowed (algo)) return; algo_name = ecc_algo_name (algo); -- 2.30.2 From jussi.kivilinna at iki.fi Fri Aug 13 17:01:28 2021 From: jussi.kivilinna at iki.fi (Jussi Kivilinna) Date: Fri, 13 Aug 2021 18:01:28 +0300 Subject: [PATCH 3/4] Add x86 HW acceleration for GCM-SIV counter mode In-Reply-To: <20210813150129.3175810-1-jussi.kivilinna@iki.fi> References: <20210813150129.3175810-1-jussi.kivilinna@iki.fi> Message-ID: <20210813150129.3175810-3-jussi.kivilinna@iki.fi> * cipher/cipher-gcm-siv.c (do_ctr_le32): Use bulk function if available. * cipher/cipher-internal.h (cipher_bulk_ops): Add 'ctr32le_enc'. * cipher/rijndael-aesni.c (_gcry_aes_aesni_ctr32le_enc): New. * cipher/rijndael-vaes-avx2-amd64.S (_gcry_vaes_avx2_ctr32le_enc_amd64, .Lle_addd_*): New. * cipher/rijndael-vaes.c (_gcry_vaes_avx2_ctr32le_enc_amd64) (_gcry_aes_vaes_ctr32le_enc): New. * cipher/rijndael.c (_gcry_aes_aesni_ctr32le_enc) (_gcry_aes_vaes_ctr32le_enc): New prototypes. (do_setkey): Add setup of 'bulk_ops->ctr32le_enc' for AES-NI and VAES. * tests/basic.c (check_gcm_siv_cipher): Add large test-vector for bulk ops testing. -- Counter mode in GCM-SIV is little-endian on first 4 bytes of of counter block, unlike regular CTR mode which works on big-endian full block. Benchmark on AMD Ryzen 7 5800X: Before: AES | nanosecs/byte mebibytes/sec cycles/byte auto Mhz GCM-SIV enc | 1.00 ns/B 953.2 MiB/s 4.85 c/B 4850 GCM-SIV dec | 1.01 ns/B 940.1 MiB/s 4.92 c/B 4850 GCM-SIV auth | 0.118 ns/B 8051 MiB/s 0.575 c/B 4850 After (~6x faster): AES | nanosecs/byte mebibytes/sec cycles/byte auto Mhz GCM-SIV enc | 0.150 ns/B 6367 MiB/s 0.727 c/B 4850 GCM-SIV dec | 0.161 ns/B 5909 MiB/s 0.783 c/B 4850 GCM-SIV auth | 0.118 ns/B 8051 MiB/s 0.574 c/B 4850 Signed-off-by: Jussi Kivilinna --- cipher/cipher-gcm-siv.c | 26 ++- cipher/cipher-internal.h | 2 + cipher/rijndael-aesni.c | 192 +++++++++++++++++ cipher/rijndael-vaes-avx2-amd64.S | 328 ++++++++++++++++++++++++++++++ cipher/rijndael-vaes.c | 21 ++ cipher/rijndael.c | 10 +- tests/basic.c | 139 +++++++++++++ 7 files changed, 708 insertions(+), 10 deletions(-) diff --git a/cipher/cipher-gcm-siv.c b/cipher/cipher-gcm-siv.c index b735d199..813cf579 100644 --- a/cipher/cipher-gcm-siv.c +++ b/cipher/cipher-gcm-siv.c @@ -178,12 +178,21 @@ do_ctr_le32 (gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, gcry_cipher_encrypt_t enc_fn = c->spec->encrypt; unsigned char tmp[GCRY_SIV_BLOCK_LEN]; unsigned int burn = 0, nburn; - size_t n; + size_t nblocks; if (inbuflen == 0) return; - n = GCRY_SIV_BLOCK_LEN; + /* Use a bulk method if available. */ + nblocks = inbuflen / GCRY_SIV_BLOCK_LEN; + if (nblocks && c->bulk.ctr32le_enc) + { + c->bulk.ctr32le_enc (c->context.c, c->u_ctr.ctr, outbuf, inbuf, nblocks); + inbuf += nblocks * GCRY_SIV_BLOCK_LEN; + outbuf += nblocks * GCRY_SIV_BLOCK_LEN; + inbuflen -= nblocks * GCRY_SIV_BLOCK_LEN; + } + do { nburn = enc_fn (c->context.c, tmp, c->u_ctr.ctr); @@ -195,20 +204,19 @@ do_ctr_le32 (gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, break; cipher_block_xor(outbuf, inbuf, tmp, GCRY_SIV_BLOCK_LEN); - inbuflen -= n; - outbuf += n; - inbuf += n; + inbuflen -= GCRY_SIV_BLOCK_LEN; + outbuf += GCRY_SIV_BLOCK_LEN; + inbuf += GCRY_SIV_BLOCK_LEN; } while (inbuflen); if (inbuflen) { - n = inbuflen; buf_xor(outbuf, inbuf, tmp, inbuflen); - inbuflen -= n; - outbuf += n; - inbuf += n; + outbuf += inbuflen; + inbuf += inbuflen; + inbuflen -= inbuflen; } wipememory (tmp, sizeof(tmp)); diff --git a/cipher/cipher-internal.h b/cipher/cipher-internal.h index 8b04cff7..0bc85b1a 100644 --- a/cipher/cipher-internal.h +++ b/cipher/cipher-internal.h @@ -157,6 +157,8 @@ typedef struct cipher_bulk_ops const void *inbuf_arg, size_t nblocks); void (*ctr_enc)(void *context, unsigned char *iv, void *outbuf_arg, const void *inbuf_arg, size_t nblocks); + void (*ctr32le_enc)(void *context, unsigned char *iv, void *outbuf_arg, + const void *inbuf_arg, size_t nblocks); size_t (*ocb_crypt)(gcry_cipher_hd_t c, void *outbuf_arg, const void *inbuf_arg, size_t nblocks, int encrypt); size_t (*ocb_auth)(gcry_cipher_hd_t c, const void *abuf_arg, size_t nblocks); diff --git a/cipher/rijndael-aesni.c b/cipher/rijndael-aesni.c index 9dde0489..34a4a447 100644 --- a/cipher/rijndael-aesni.c +++ b/cipher/rijndael-aesni.c @@ -1854,6 +1854,198 @@ _gcry_aes_aesni_ctr_enc (RIJNDAEL_context *ctx, unsigned char *ctr, } +void ASM_FUNC_ATTR +_gcry_aes_aesni_ctr32le_enc (RIJNDAEL_context *ctx, unsigned char *ctr, + unsigned char *outbuf, const unsigned char *inbuf, + size_t nblocks) +{ + static const byte le_addd_const[8][16] __attribute__ ((aligned (16))) = + { + { 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + { 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + { 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + { 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + { 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + { 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + { 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 }, + { 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 } + }; + aesni_prepare_2_7_variable; + + aesni_prepare (); + aesni_prepare_2_7(); + + asm volatile ("movdqa %[ctr], %%xmm5\n\t" /* Preload CTR */ + : /* No output */ + : [ctr] "m" (*ctr) + : "memory"); + +#ifdef __x86_64__ + if (nblocks >= 8) + { + aesni_prepare_8_15_variable; + + aesni_prepare_8_15(); + + for ( ;nblocks >= 8 ; nblocks -= 8 ) + { + asm volatile + ("movdqa (%[key]), %%xmm0\n\t" + + "movdqa %%xmm5, %%xmm1\n\t" /* load input blocks */ + "movdqa %%xmm5, %%xmm2\n\t" + "movdqa %%xmm5, %%xmm3\n\t" + "movdqa %%xmm5, %%xmm4\n\t" + "movdqa %%xmm5, %%xmm8\n\t" + "movdqa %%xmm5, %%xmm9\n\t" + "movdqa %%xmm5, %%xmm10\n\t" + "movdqa %%xmm5, %%xmm11\n\t" + + "paddd 0*16(%[addd]), %%xmm2\n\t" + "paddd 1*16(%[addd]), %%xmm3\n\t" + "paddd 2*16(%[addd]), %%xmm4\n\t" + "paddd 3*16(%[addd]), %%xmm8\n\t" + "paddd 4*16(%[addd]), %%xmm9\n\t" + "paddd 5*16(%[addd]), %%xmm10\n\t" + "paddd 6*16(%[addd]), %%xmm11\n\t" + + "pxor %%xmm0, %%xmm1\n\t" /* xmm1 ^= key[0] */ + "pxor %%xmm0, %%xmm2\n\t" /* xmm2 ^= key[0] */ + "pxor %%xmm0, %%xmm3\n\t" /* xmm3 ^= key[0] */ + "pxor %%xmm0, %%xmm4\n\t" /* xmm4 ^= key[0] */ + "pxor %%xmm0, %%xmm8\n\t" /* xmm8 ^= key[0] */ + "pxor %%xmm0, %%xmm9\n\t" /* xmm9 ^= key[0] */ + "pxor %%xmm0, %%xmm10\n\t" /* xmm10 ^= key[0] */ + "pxor %%xmm0, %%xmm11\n\t" /* xmm11 ^= key[0] */ + + "movdqu 0*16(%[inbuf]), %%xmm6\n\t" + "movdqu 1*16(%[inbuf]), %%xmm7\n\t" + "movdqu 2*16(%[inbuf]), %%xmm12\n\t" + "movdqu 3*16(%[inbuf]), %%xmm13\n\t" + "movdqu 4*16(%[inbuf]), %%xmm14\n\t" + "movdqu 5*16(%[inbuf]), %%xmm15\n\t" + + "paddd 7*16(%[addd]), %%xmm5\n\t" + : /* No output */ + : [addd] "r" (&le_addd_const[0][0]), + [inbuf] "r" (inbuf), + [key] "r" (ctx->keyschenc) + : "memory"); + + do_aesni_enc_vec8 (ctx); + + asm volatile + ("pxor %%xmm0, %%xmm6\n\t" + "pxor %%xmm0, %%xmm7\n\t" + "pxor %%xmm0, %%xmm12\n\t" + "pxor %%xmm0, %%xmm13\n\t" + "pxor %%xmm0, %%xmm14\n\t" + "pxor %%xmm0, %%xmm15\n\t" + "aesenclast %%xmm6, %%xmm1\n\t" + "aesenclast %%xmm7, %%xmm2\n\t" + "movdqu 6*16(%[inbuf]), %%xmm6\n\t" + "movdqu 7*16(%[inbuf]), %%xmm7\n\t" + "aesenclast %%xmm12, %%xmm3\n\t" + "aesenclast %%xmm13, %%xmm4\n\t" + "pxor %%xmm0, %%xmm6\n\t" + "pxor %%xmm0, %%xmm7\n\t" + "aesenclast %%xmm14, %%xmm8\n\t" + "aesenclast %%xmm15, %%xmm9\n\t" + "aesenclast %%xmm6, %%xmm10\n\t" + "aesenclast %%xmm7, %%xmm11\n\t" + "movdqu %%xmm1, 0*16(%[outbuf])\n\t" + "movdqu %%xmm2, 1*16(%[outbuf])\n\t" + "movdqu %%xmm3, 2*16(%[outbuf])\n\t" + "movdqu %%xmm4, 3*16(%[outbuf])\n\t" + "movdqu %%xmm8, 4*16(%[outbuf])\n\t" + "movdqu %%xmm9, 5*16(%[outbuf])\n\t" + "movdqu %%xmm10, 6*16(%[outbuf])\n\t" + "movdqu %%xmm11, 7*16(%[outbuf])\n\t" + : /* No output */ + : [inbuf] "r" (inbuf), + [outbuf] "r" (outbuf) + : "memory"); + + outbuf += 8*BLOCKSIZE; + inbuf += 8*BLOCKSIZE; + } + + aesni_cleanup_8_15(); + } +#endif + + for ( ;nblocks >= 4 ; nblocks -= 4 ) + { + asm volatile + ("movdqa %%xmm5, %%xmm1\n\t" /* load input blocks */ + "movdqa %%xmm5, %%xmm2\n\t" + "movdqa %%xmm5, %%xmm3\n\t" + "movdqa %%xmm5, %%xmm4\n\t" + "paddd 0*16(%[addd]), %%xmm2\n\t" + "paddd 1*16(%[addd]), %%xmm3\n\t" + "paddd 2*16(%[addd]), %%xmm4\n\t" + "paddd 3*16(%[addd]), %%xmm5\n\t" + "movdqu 0*16(%[inbuf]), %%xmm6\n\t" + "movdqu 1*16(%[inbuf]), %%xmm7\n\t" + : /* No output */ + : [addd] "r" (&le_addd_const[0][0]), + [inbuf] "r" (inbuf) + : "memory"); + + do_aesni_enc_vec4 (ctx); + + asm volatile + ("pxor %%xmm6, %%xmm1\n\t" + "pxor %%xmm7, %%xmm2\n\t" + "movdqu 2*16(%[inbuf]), %%xmm6\n\t" + "movdqu 3*16(%[inbuf]), %%xmm7\n\t" + "movdqu %%xmm1, 0*16(%[outbuf])\n\t" + "movdqu %%xmm2, 1*16(%[outbuf])\n\t" + "pxor %%xmm6, %%xmm3\n\t" + "pxor %%xmm7, %%xmm4\n\t" + "movdqu %%xmm3, 2*16(%[outbuf])\n\t" + "movdqu %%xmm4, 3*16(%[outbuf])\n\t" + : /* No output */ + : [inbuf] "r" (inbuf), + [outbuf] "r" (outbuf) + : "memory"); + + outbuf += 4*BLOCKSIZE; + inbuf += 4*BLOCKSIZE; + } + + for ( ;nblocks; nblocks-- ) + { + asm volatile ("movdqa %%xmm5, %%xmm0\n\t" + "paddd %[add_one], %%xmm5\n\t" + "movdqu %[inbuf], %%xmm6\n\t" + : + : [add_one] "m" (*le_addd_const[0]), + [inbuf] "m" (*inbuf) + : "memory" ); + + do_aesni_enc (ctx); + + asm volatile ("pxor %%xmm0, %%xmm6\n\t" + "movdqu %%xmm6, %[outbuf]\n\t" + : [outbuf] "=m" (*outbuf) + : + : "memory" ); + + outbuf += BLOCKSIZE; + inbuf += BLOCKSIZE; + } + + asm volatile ("movdqa %%xmm5, %[ctr]\n\t" + : [ctr] "=m" (*ctr) + : + : "memory" ); + + aesni_cleanup (); + aesni_cleanup_2_7 (); +} + + unsigned int ASM_FUNC_ATTR _gcry_aes_aesni_decrypt (const RIJNDAEL_context *ctx, unsigned char *dst, const unsigned char *src) diff --git a/cipher/rijndael-vaes-avx2-amd64.S b/cipher/rijndael-vaes-avx2-amd64.S index c4deea9b..d4ecf59f 100644 --- a/cipher/rijndael-vaes-avx2-amd64.S +++ b/cipher/rijndael-vaes-avx2-amd64.S @@ -1107,6 +1107,290 @@ _gcry_vaes_avx2_ctr_enc_amd64: CFI_ENDPROC(); ELF(.size _gcry_vaes_avx2_ctr_enc_amd64,.-_gcry_vaes_avx2_ctr_enc_amd64) +/********************************************************************** + Little-endian 32-bit CTR-mode encryption (GCM-SIV) + **********************************************************************/ +ELF(.type _gcry_vaes_avx2_ctr32le_enc_amd64, at function) +.globl _gcry_vaes_avx2_ctr32le_enc_amd64 +_gcry_vaes_avx2_ctr32le_enc_amd64: + /* input: + * %rdi: round keys + * %rsi: counter + * %rdx: dst + * %rcx: src + * %r8: nblocks + * %r9: nrounds + */ + CFI_STARTPROC(); + + vbroadcasti128 (%rsi), %ymm15; // CTR + + /* Process 16 blocks per loop. */ +.align 8 +.Lctr32le_enc_blk16: + cmpq $16, %r8; + jb .Lctr32le_enc_blk8; + + leaq -16(%r8), %r8; + + vbroadcasti128 (0 * 16)(%rdi), %ymm8; + + /* Increment counters. */ + vpaddd .Lle_addd_0 rRIP, %ymm15, %ymm0; + vpaddd .Lle_addd_2 rRIP, %ymm15, %ymm1; + vpaddd .Lle_addd_4 rRIP, %ymm15, %ymm2; + vpaddd .Lle_addd_6 rRIP, %ymm15, %ymm3; + vpaddd .Lle_addd_8 rRIP, %ymm15, %ymm4; + vpaddd .Lle_addd_10 rRIP, %ymm15, %ymm5; + vpaddd .Lle_addd_12 rRIP, %ymm15, %ymm6; + vpaddd .Lle_addd_14 rRIP, %ymm15, %ymm7; + + vpaddd .Lle_addd_16_2 rRIP, %ymm15, %ymm15; + + /* AES rounds */ + XOR8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (1 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (2 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (3 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (4 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (5 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (6 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (7 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (8 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (9 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (10 * 16)(%rdi), %ymm8; + cmpl $12, %r9d; + jb .Lctr32le_enc_blk16_last; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (11 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (12 * 16)(%rdi), %ymm8; + jz .Lctr32le_enc_blk16_last; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (13 * 16)(%rdi), %ymm8; + VAESENC8(%ymm8, %ymm0, %ymm1, %ymm2, %ymm3, %ymm4, %ymm5, %ymm6, %ymm7); + vbroadcasti128 (14 * 16)(%rdi), %ymm8; + + /* Last round and output handling. */ + .Lctr32le_enc_blk16_last: + vpxor (0 * 16)(%rcx), %ymm8, %ymm9; /* Xor src to last round key. */ + vpxor (2 * 16)(%rcx), %ymm8, %ymm10; + vpxor (4 * 16)(%rcx), %ymm8, %ymm11; + vpxor (6 * 16)(%rcx), %ymm8, %ymm12; + vaesenclast %ymm9, %ymm0, %ymm0; + vaesenclast %ymm10, %ymm1, %ymm1; + vaesenclast %ymm11, %ymm2, %ymm2; + vaesenclast %ymm12, %ymm3, %ymm3; + vpxor (8 * 16)(%rcx), %ymm8, %ymm9; + vpxor (10 * 16)(%rcx), %ymm8, %ymm10; + vpxor (12 * 16)(%rcx), %ymm8, %ymm11; + vpxor (14 * 16)(%rcx), %ymm8, %ymm8; + leaq (16 * 16)(%rcx), %rcx; + vaesenclast %ymm9, %ymm4, %ymm4; + vaesenclast %ymm10, %ymm5, %ymm5; + vaesenclast %ymm11, %ymm6, %ymm6; + vaesenclast %ymm8, %ymm7, %ymm7; + vmovdqu %ymm0, (0 * 16)(%rdx); + vmovdqu %ymm1, (2 * 16)(%rdx); + vmovdqu %ymm2, (4 * 16)(%rdx); + vmovdqu %ymm3, (6 * 16)(%rdx); + vmovdqu %ymm4, (8 * 16)(%rdx); + vmovdqu %ymm5, (10 * 16)(%rdx); + vmovdqu %ymm6, (12 * 16)(%rdx); + vmovdqu %ymm7, (14 * 16)(%rdx); + leaq (16 * 16)(%rdx), %rdx; + + jmp .Lctr32le_enc_blk16; + + /* Handle trailing eight blocks. */ +.align 8 +.Lctr32le_enc_blk8: + cmpq $8, %r8; + jb .Lctr32le_enc_blk4; + + leaq -8(%r8), %r8; + + vbroadcasti128 (0 * 16)(%rdi), %ymm4; + + /* Increment counters. */ + vpaddd .Lle_addd_0 rRIP, %ymm15, %ymm0; + vpaddd .Lle_addd_2 rRIP, %ymm15, %ymm1; + vpaddd .Lle_addd_4 rRIP, %ymm15, %ymm2; + vpaddd .Lle_addd_6 rRIP, %ymm15, %ymm3; + + vpaddd .Lle_addd_8_2 rRIP, %ymm15, %ymm15; + + /* AES rounds */ + XOR4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (1 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (2 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (3 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (4 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (5 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (6 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (7 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (8 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (9 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (10 * 16)(%rdi), %ymm4; + cmpl $12, %r9d; + jb .Lctr32le_enc_blk8_last; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (11 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (12 * 16)(%rdi), %ymm4; + jz .Lctr32le_enc_blk8_last; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (13 * 16)(%rdi), %ymm4; + VAESENC4(%ymm4, %ymm0, %ymm1, %ymm2, %ymm3); + vbroadcasti128 (14 * 16)(%rdi), %ymm4; + + /* Last round and output handling. */ + .Lctr32le_enc_blk8_last: + vpxor (0 * 16)(%rcx), %ymm4, %ymm5; /* Xor src to last round key. */ + vpxor (2 * 16)(%rcx), %ymm4, %ymm6; + vpxor (4 * 16)(%rcx), %ymm4, %ymm7; + vpxor (6 * 16)(%rcx), %ymm4, %ymm4; + leaq (8 * 16)(%rcx), %rcx; + vaesenclast %ymm5, %ymm0, %ymm0; + vaesenclast %ymm6, %ymm1, %ymm1; + vaesenclast %ymm7, %ymm2, %ymm2; + vaesenclast %ymm4, %ymm3, %ymm3; + vmovdqu %ymm0, (0 * 16)(%rdx); + vmovdqu %ymm1, (2 * 16)(%rdx); + vmovdqu %ymm2, (4 * 16)(%rdx); + vmovdqu %ymm3, (6 * 16)(%rdx); + leaq (8 * 16)(%rdx), %rdx; + + /* Handle trailing four blocks. */ +.align 8 +.Lctr32le_enc_blk4: + cmpq $4, %r8; + jb .Lctr32le_enc_blk1; + + leaq -4(%r8), %r8; + + vbroadcasti128 (0 * 16)(%rdi), %ymm4; + + /* Increment counters. */ + vpaddd .Lle_addd_0 rRIP, %ymm15, %ymm0; + vpaddd .Lle_addd_2 rRIP, %ymm15, %ymm1; + + vpaddd .Lle_addd_4_2 rRIP, %ymm15, %ymm15; + + /* AES rounds */ + XOR2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (1 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (2 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (3 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (4 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (5 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (6 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (7 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (8 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (9 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (10 * 16)(%rdi), %ymm4; + cmpl $12, %r9d; + jb .Lctr32le_enc_blk4_last; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (11 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (12 * 16)(%rdi), %ymm4; + jz .Lctr32le_enc_blk4_last; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (13 * 16)(%rdi), %ymm4; + VAESENC2(%ymm4, %ymm0, %ymm1); + vbroadcasti128 (14 * 16)(%rdi), %ymm4; + + /* Last round and output handling. */ + .Lctr32le_enc_blk4_last: + vpxor (0 * 16)(%rcx), %ymm4, %ymm5; /* Xor src to last round key. */ + vpxor (2 * 16)(%rcx), %ymm4, %ymm6; + leaq (4 * 16)(%rcx), %rcx; + vaesenclast %ymm5, %ymm0, %ymm0; + vaesenclast %ymm6, %ymm1, %ymm1; + vmovdqu %ymm0, (0 * 16)(%rdx); + vmovdqu %ymm1, (2 * 16)(%rdx); + leaq (4 * 16)(%rdx), %rdx; + + /* Process trailing one to three blocks, one per loop. */ +.align 8 +.Lctr32le_enc_blk1: + cmpq $1, %r8; + jb .Ldone_ctr32le_enc; + + leaq -1(%r8), %r8; + + /* Load and increament counter. */ + vmovdqu %xmm15, %xmm0; + vpaddd .Lle_addd_1 rRIP, %xmm15, %xmm15; + + /* AES rounds. */ + vpxor (0 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (1 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (2 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (3 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (4 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (5 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (6 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (7 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (8 * 16)(%rdi), %xmm0, %xmm0; + vaesenc (9 * 16)(%rdi), %xmm0, %xmm0; + vmovdqa (10 * 16)(%rdi), %xmm1; + cmpl $12, %r9d; + jb .Lctr32le_enc_blk1_last; + vaesenc %xmm1, %xmm0, %xmm0; + vaesenc (11 * 16)(%rdi), %xmm0, %xmm0; + vmovdqa (12 * 16)(%rdi), %xmm1; + jz .Lctr32le_enc_blk1_last; + vaesenc %xmm1, %xmm0, %xmm0; + vaesenc (13 * 16)(%rdi), %xmm0, %xmm0; + vmovdqa (14 * 16)(%rdi), %xmm1; + + /* Last round and output handling. */ + .Lctr32le_enc_blk1_last: + vpxor (%rcx), %xmm1, %xmm1; /* Xor src to last round key. */ + leaq 16(%rcx), %rcx; + vaesenclast %xmm1, %xmm0, %xmm0; /* Last round and xor with xmm1. */ + vmovdqu %xmm0, (%rdx); + leaq 16(%rdx), %rdx; + + jmp .Lctr32le_enc_blk1; + +.align 8 +.Ldone_ctr32le_enc: + vmovdqu %xmm15, (%rsi); + vzeroall; + ret + CFI_ENDPROC(); +ELF(.size _gcry_vaes_avx2_ctr32le_enc_amd64,.-_gcry_vaes_avx2_ctr32le_enc_amd64) + /********************************************************************** OCB-mode encryption/decryption **********************************************************************/ @@ -2677,6 +2961,50 @@ _gcry_vaes_consts: .byte 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14 .Lbige_addb_15: .byte 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15 + +.Lle_addd_0: + .byte 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_1: + .byte 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_2: + .byte 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_3: + .byte 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_4: + .byte 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_5: + .byte 5, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_6: + .byte 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_7: + .byte 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_8: + .byte 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_9: + .byte 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_10: + .byte 10, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_11: + .byte 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_12: + .byte 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_13: + .byte 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_14: + .byte 14, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_15: + .byte 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + +.Lle_addd_4_2: + .byte 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + .byte 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_8_2: + .byte 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + .byte 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 +.Lle_addd_16_2: + .byte 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + .byte 16, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0 + .Lxts_gfmul_clmul: .long 0x00, 0x87, 0x00, 0x00 .long 0x00, 0x87, 0x00, 0x00 diff --git a/cipher/rijndael-vaes.c b/cipher/rijndael-vaes.c index 56afce17..0d7d1367 100644 --- a/cipher/rijndael-vaes.c +++ b/cipher/rijndael-vaes.c @@ -65,6 +65,14 @@ extern void _gcry_vaes_avx2_ctr_enc_amd64 (const void *keysched, size_t nblocks, unsigned int nrounds) ASM_FUNC_ABI; +extern void _gcry_vaes_avx2_ctr32le_enc_amd64 (const void *keysched, + unsigned char *ctr, + void *outbuf_arg, + const void *inbuf_arg, + size_t nblocks, + unsigned int nrounds) + ASM_FUNC_ABI; + extern void _gcry_vaes_avx2_ocb_crypt_amd64 (const void *keysched, unsigned int blkn, void *outbuf_arg, @@ -127,6 +135,19 @@ _gcry_aes_vaes_ctr_enc (void *context, unsigned char *iv, _gcry_vaes_avx2_ctr_enc_amd64 (keysched, iv, outbuf, inbuf, nblocks, nrounds); } +void +_gcry_aes_vaes_ctr32le_enc (void *context, unsigned char *iv, + void *outbuf, const void *inbuf, + size_t nblocks) +{ + RIJNDAEL_context *ctx = context; + const void *keysched = ctx->keyschenc32; + unsigned int nrounds = ctx->rounds; + + _gcry_vaes_avx2_ctr32le_enc_amd64 (keysched, iv, outbuf, inbuf, nblocks, + nrounds); +} + size_t _gcry_aes_vaes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, const void *inbuf_arg, size_t nblocks, diff --git a/cipher/rijndael.c b/cipher/rijndael.c index 8df9aac3..c096321f 100644 --- a/cipher/rijndael.c +++ b/cipher/rijndael.c @@ -86,6 +86,9 @@ extern void _gcry_aes_aesni_cbc_enc (void *context, unsigned char *iv, extern void _gcry_aes_aesni_ctr_enc (void *context, unsigned char *ctr, void *outbuf_arg, const void *inbuf_arg, size_t nblocks); +extern void _gcry_aes_aesni_ctr32le_enc (void *context, unsigned char *ctr, + void *outbuf_arg, + const void *inbuf_arg, size_t nblocks); extern void _gcry_aes_aesni_cfb_dec (void *context, unsigned char *iv, void *outbuf_arg, const void *inbuf_arg, size_t nblocks); @@ -114,6 +117,9 @@ extern void _gcry_aes_vaes_cbc_dec (void *context, unsigned char *iv, extern void _gcry_aes_vaes_ctr_enc (void *context, unsigned char *ctr, void *outbuf_arg, const void *inbuf_arg, size_t nblocks); +extern void _gcry_aes_vaes_ctr32le_enc (void *context, unsigned char *ctr, + void *outbuf_arg, const void *inbuf_arg, + size_t nblocks); extern size_t _gcry_aes_vaes_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, const void *inbuf_arg, size_t nblocks, int encrypt); @@ -497,6 +503,7 @@ do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen, bulk_ops->cbc_enc = _gcry_aes_aesni_cbc_enc; bulk_ops->cbc_dec = _gcry_aes_aesni_cbc_dec; bulk_ops->ctr_enc = _gcry_aes_aesni_ctr_enc; + bulk_ops->ctr32le_enc = _gcry_aes_aesni_ctr32le_enc; bulk_ops->ocb_crypt = _gcry_aes_aesni_ocb_crypt; bulk_ops->ocb_auth = _gcry_aes_aesni_ocb_auth; bulk_ops->xts_crypt = _gcry_aes_aesni_xts_crypt; @@ -509,6 +516,7 @@ do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen, bulk_ops->cfb_dec = _gcry_aes_vaes_cfb_dec; bulk_ops->cbc_dec = _gcry_aes_vaes_cbc_dec; bulk_ops->ctr_enc = _gcry_aes_vaes_ctr_enc; + bulk_ops->ctr32le_enc = _gcry_aes_vaes_ctr32le_enc; bulk_ops->ocb_crypt = _gcry_aes_vaes_ocb_crypt; bulk_ops->xts_crypt = _gcry_aes_vaes_xts_crypt; } @@ -516,7 +524,7 @@ do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen, } #endif #ifdef USE_PADLOCK - else if (hwfeatures & HWF_PADLOCK_AES && keylen == 128/8) + else if ((hwfeatures & HWF_PADLOCK_AES) && keylen == 128/8) { ctx->encrypt_fn = _gcry_aes_padlock_encrypt; ctx->decrypt_fn = _gcry_aes_padlock_decrypt; diff --git a/tests/basic.c b/tests/basic.c index 148aaec6..4ce90165 100644 --- a/tests/basic.c +++ b/tests/basic.c @@ -5916,6 +5916,145 @@ check_gcm_siv_cipher (void) "\x18\xce\x4f\x0b\x8c\xb4\xd0\xca\xc6\x5f\xea\x8f\x79\x25\x7b\x20" "\x88\x8e\x53\xe7\x22\x99\xe5\x6d", "\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + }, + /* Large block testing */ + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x72\x94\x7b\x5d\x3c\x14\xc0\xa6\x27\x8d\x8d\xee\xbd\xe8\x8c\x6a" + "\x21\x34\xce\x64\x8f\x01\x01\xc6\xe4\x5d\xed\x2e\xb9\xec\xac\x53" + "\xf2\x07\xed\x60\xc8\xa2\x2f\x2e\x83\x0e\xf2\xbc\x42\x51\x24\x3b" + "\x41\x4f\x26\x84\xf0\x25\x69\x3f\x38\x29\xfb\xe9\xbb\x1a\x94\xd1" + "\x94\x0c\xce\xad\x8e\x66\xeb\xda\xc9\x1c\x72\x5a\x7f\x95\x4f\x9c" + "\x02\x27\x79\x8f\xe7\x51\x51\x3d\x1e\x2c\x4e\xcd\x07\xe5\xd1\xf0" + "\x6c\x95\x82\x37\x00\x50\x5e\xff\x82\xfb\x69\x0b\x4e\x7f\x10\x12" + "\x7d\x18\x7f\xa8\x88\x59\xfb\x55\x9b\x70\x36\xfc\xde\x75\xed\x77" + "\xf9\x09\x87\x29\x30\x7c\x81\x41\x12\xc2\xbd\xcd\x9f\x86\x98\x38" + "\x96\x44\x4c\xda\x2e\xbe\x7a\xfb\xdd\x4a\x4e\xa0\x84\x94\xd5\x76" + "\xa6\xae\x02\xcb\x1b\xd4\xd8\xcb\xa5\x24\x28\xe1\x3c\x1e\xdc\x3d" + "\x25\x50\xe7\xfb\x92\xad\xd9\x80\x33\xe0\xb2\x50\x07\xd4\x43\x40" + "\x41\x63\x98\x63\xa6\x1a\xfc\x56\x84\x3f\xf7\x4f\x31\xe7\xfe\xc5" + "\x73\x52\xfd\x6d\x9b\xbb\x9b\xf8\x19\xf8\xdc\x9f\x3a\x88\xa6\x7c" + "\xf3\x6b\xbe\xde\xda\x05\x2e\x79\x54\xb9\x3e\x59\x43\x0a\x1b\x16" + "\xcf\x94\x97\x71\x03\x74\x12\x37\xaf\xd4\x0a\x4b\x30\x16\x9b\x8b" + "\x9f\xae\x78\x46\x83\xde\x34\xc5\x31\x71\x67\x5e\xdb\x8d\x93\x71" + "\x90\x03\x72\x00\x9f\x4e\x1e\x7d\xf3\x3f\xf8\x31\xe7\xf6\xb4\x6d" + "\x8d\xdc\xa0\x85\x32\x7b\x32\x40\x8c\xa9\x90\x69\xac\x03\xdb\xd4" + "\xa5\x62\x9c\xfd\x78\xde\xc8\x4a\x18\x67\xa0\xee\x5e\x1e\xad\x1a" + "\x1c\xee\x78\xbd\xea\xdc\xc8\x34\xd1\x92\x20\xa7\x0d\x12\x90\x88" + "\x91\xe4\x6c\x3c\x06\x78\x13\x00\xdc\xc7\x3e\xd7\x91\xf7\xc1\xd6" + "\x5a\x99\x95\x23\xb5\xd8\x3d\x0f\x12\xaf\x25\xd8\xcf\xe8\x27\x7f" + "\xbc\x7c\xe2\xad\x34\x66\x7f\xfb\xf5\xa8\x11\xc1\xe6\x04\x37\x41" + "\xaf\x96\xb3\xb7\xee\x05\xf5\xd7\x7c\xc6\xfe\x2e\xa9\x07\x47\x08" + "\xa4\x50\x65\xc0\x2e\xd7\x27\xd8\x70\x8c\xf1\x12\x30\x4a\x82\xf6" + "\xb7\x68\xdb\x9d\x73\xc2\x82\x3d\x44\xda\xfb\xdd\x03\xc1\xdc\xfc" + "\x3f\x7f\x2e\xe2\xd3\x73\x24\xaf\xd1\x35\xa9\x4f\x3a\xad\x9d\x5c" + "\xd7\xc6\xa3\xb1\x11\xf1\xbb\xa0\x23\xe1\x22\x88\x5b\x10\xb3\xd6" + "\x01\x78\x5f\x9e\x4d\x96\x7b\xeb\x81\x6b\xce\x2d\xf5\x6a\xd1\xa8" + "\xb7\x56\xdd\xd0\x4b\xb0\xc9\x64\x7a\x2f\x63\xcb\xd6\x61\x84\x4b" + "\x9e\x4d\x0b\x2c\x99\xbc\xa2\x94\xf5\x07\x20\xe6\xe9\xc2\xd2\xa6" + "\x1c\x37\xd5\x88\x01\x71\xe2\x16\xcd\x10\x7a\x07\x8b\xf3\xb5\x49" + "\x75\xbe\x0b\xe1\xb2\x28\x15\x88\x2b\xb4\xee\x34\xfd\x67\x30\xd8" + "\xdc\x38\x90\x66\xb6\x51\x90\xb3\xdb\xee\x4e\x66\xc3\x05\xdf\xee" + "\x32\xac\x8b\xa2\x00\xcc\xff\xa2\x52\x19\x79\x7e\x6c\xc9\x68\xb2" + "\xab\xe4\x69\x11\xea\x00\xc9\x2b\x58\x77\x8b\x6c\x28\x0e\x40\x42" + "\xcc\xa7\xb2\x58\xed\x5e\x0b\x19\x49\xe5\x5e\xb1\xb5\x24\x61\x63" + "\x7d\x5b\x6a\x7d\x3d\xc1\x6e\x09\x64\x43\x66\x31\x3c\xb0\x26\x2e" + "\xc8\x27\xf6\x5a\x5f\x22\x94\x42\x62\x2a\xf6\x5a\x7d\xc2\x4a\x0d" + "\xd2\xad\xaa\x0e\xb2\xa4\x29\x1c\xb8\x3b\xaa\xc9\x1d\x1a\x30\xf8" + "\x0b\x35\xb2\x84\x75\xc3\x08\x0c\xe5\x36\xa9\xff\xfe\xb9\xc2\xb7" + "\x51\xab\x2d\x9d\x3e\x1c\x08\x8c\x6c\x64\xe1\xd9\x97\xf4\xfc\x4d" + "\x77\x6d\x0e\xce\x73\x0b\x7f\x57\x41\xed\xdf\x96\x11\xb3\xcc\x61" + "\xe8\x12\x31\x16\x72\x4c\x10\xd4\x52\x14\x4c\x83\xaa\x3c\x29\x6c" + "\x51\x40\x9a\x4d\x9b\xd0\xe6\x7f\xad\x31\x54\x88\x90\xe1\xa8\x0e" + "\xd8\xf4\x84\x11\xdb\x02\x41\xff\xb0\x8a\x92\x95\x97\xd6\x98\x8a" + "\xa0\x43\xda\x70\xbb\x17\xd0\x5a\x81\x3e\xf7\xcf\xc9\x33\xd9\x76" + "\x2f\x53\xa2\xac\xa0\x8a\x73\xe4\x0c\x81\xbe\x26\x01\x3f\x6d\x79" + "\x8a\x37\x59\x5b\x0a\x9a\x10\x6b\x04\x30\xed\xda\x11\x73\x73\xd9" + "\xa2\x9a\xf8\x8e\x67\x82\x5a\x8d\xc0\x52\xe8\x42\x89\xcd\x9c\xb1" + "\x5c\x3d\xd4\x75\x03\x71\x03\x3f\xdc\x6b\x79\xb4\x02\xb6\xac\xc4" + "\x11\x0f\x61\xc8\xf7\x5d\xc6\xbf\x48\x02\xa3\xdc\xa8\x37\x10\x85" + "\xb2\x8d\xbd\xb0\x79\x09\xb0\x5f\x30\x6c\x40\xba\x03\xbb\x22\xcc" + "\x80\xa1\xc3\x91\x88\x25\x92\xbe\xa6\xfa\x14\x77\x56\xb3\xc0\xb5" + "\x69\x8c\x6f\xed\x21\xaf\x0c\x79\x07\x64\xa2\xea\xeb\x47\x2c\x1e" + "\x7d\x6c\x12\xae\x75\xc4\xee\x12\x46\x72\x87\x65\x73\x51\xee\xf8" + "\x08\x63\x20\xa1\x61\xca\x73\x8f\xdf\xcb\x97\xf8\xfc\xb0\x56\xea" + "\x34\x9d\xce\xb8\x91\xb8\xfc\xec\x76\xd0\x71\xb7\x92\xc9\xb2\x28" + "\xee\x0b\x5d\x7c\x4a\xf6\x73\x4d\xc2\x5b\x5b\xae\x7b\xa6\x9c\xba" + "\x29\x7d\x7d\x3c\x29\x01\x04\x2d\xd1\x6c\x8d\x8d\xe5\xb4\x6b\xf9" + "\x2a\x83\xb8\x14\x00\x1c\x91\x72\x5e\x8f\x13\x56\x6d\x9b\x6d\x27" + "\xe8\x22\x55\x4b\x2f\x8a\x31\x16\x98\x03\x51\x73\xa7\x2e\x18\x81" + "\x51\x0a\x8f\x6d\x17\xd0\xea\x04\x1c\x11\xb9\x6b\x8e\xaa\x76", + 1023, + "\x24\x28\x57\xa0\x3c\x12\xe2\x6a\x65\x97\x96\x75\xb3\x75\x67\x4c" + "\xd0\xc8\xa7\x07\x7d\x55\x10\xbc\x11\xfe\x45\xbe\x74\x27\x74\xa3" + "\x8b\xb8\x51\x0d\x2e\xf0\x0c\x83\xed\xe4\x8b\x15\xf2\xae\xd8\xd5" + "\xed\xd8\x76\x0d\x0b\x5b\x7b\x5a\x17\x83\xf3\x37\xe5\x81\x90\xe8" + "\x15\xb4\xec\xf8\x5a\x00\x72\xf2\xbb\x68\x0a\xc9\x6c\x4a\x80\x45" + "\x04\xa5\x7e\xfa\xf3\x45\x08\x65\xd0\xd6\xcd\x08\xd1\x1f\x45\x6b" + "\x23\xa9\x0f\xe1\x10\x90\x26\x48\x23\x23\xd2\x60\xc4\x16\x31\x03" + "\x30\xbe\xf0\xd0\xa0\xa7\xcf\xaa\xe6\x27\x0e\xd6\x7b\x79\xf1\x2b" + "\x29\x27\x57\x5a\xb7\xf5\xf8\xb4\x35\x3c\xb5\x10\xb8\xbf\xbe\xad" + "\xdb\x9b\x3d\x35\x63\xac\x9b\xfe\x86\x69\x2c\x54\xf5\x7c\x7f\xd6" + "\xcd\x33\x9f\x57\x7f\xce\x72\x18\x60\x2e\x20\x33\xc9\x13\x9f\x60" + "\x5f\x67\x24\xc2\xbb\x9e\x63\x80\xcf\x96\xb9\xf0\xf1\x9e\xcc\x5a" + "\x61\x02\x6d\xab\x4c\xf7\x13\xe4\x48\x0f\x9f\xc9\x24\x8e\x40\x06" + "\x53\x30\xac\xd9\xe8\xf5\xcd\xd4\xcf\x99\x1f\x3c\x08\x74\x38\x7e" + "\x0b\x76\x0d\xc3\xbd\x2a\x75\x3a\x55\x0c\xc6\xd3\x50\x59\x53\xf2" + "\x14\x1d\x09\xb0\xfa\x8d\xc2\x5e\x6b\x79\xed\x5e\x10\xcc\x1c\xbe" + "\x03\x75\x6d\x23\x44\xe8\xd6\x4d\x8f\xe4\xd6\x1a\x16\x83\x79\x72" + "\xdc\x51\x25\x61\x75\xe7\x00\x09\xdf\xfe\x0c\x6c\x99\xa8\xb0\xfc" + "\xbf\xb6\x7f\xae\x0a\x75\x2e\xd4\x69\xfe\xf1\xb7\x68\xbe\x07\xf0" + "\x9c\xa3\x82\x1f\x4d\x06\xa7\x73\x53\xbd\x98\x99\x93\x1b\xc9\xb6" + "\x04\x5e\xc0\xc1\xd8\x53\x7e\x6f\xd9\x4e\xa0\x37\xab\x71\x92\xc7" + "\x97\x4b\x80\x40\x14\xd0\xd0\xee\x93\xfb\xd5\x76\xce\xd7\x9e\x74" + "\xee\x5d\xe6\x79\xb2\x92\xbb\xff\x63\x19\x61\x64\xcc\x60\x80\x8b" + "\x9c\x1f\x38\x01\x43\xf7\xa6\xcd\x20\x5d\x1d\x2a\x2a\x25\xf4\xd5" + "\x17\x3d\x9c\xd2\x56\x8c\x76\x4e\xa0\xba\x24\x55\x55\xd4\x87\x78" + "\xde\x30\xe8\x6f\x39\xa5\x22\x91\x2b\x7b\x20\x6f\xf6\x44\xff\xec" + "\x29\x4e\xc8\x30\xf7\x23\x18\xef\xb8\x33\xfb\x5f\xf2\xe2\xd8\xc1" + "\xe3\x0f\x24\x19\xff\x99\x7e\xa2\xdb\x78\xde\xc3\x92\x47\x9b\x32" + "\xd6\xfa\xb7\x34\x14\xa4\xde\xd0\xa4\x6f\x7b\x03\x90\x80\x7a\x1e" + "\xb7\xc7\xc3\x75\x98\xa6\x76\xfc\xa6\x38\xa3\xf6\x17\xe8\x90\x25" + "\x28\x66\x41\x78\xe9\x70\x44\xbc\x62\x64\xf5\xaa\xd8\x62\x09\xf3" + "\xff\x05\xd5\x4e\xea\x8d\xf0\x0e\x4e\x3c\x37\xbe\x30\xe6\x69\x15" + "\xc5\x7b\xa6\x67\x1a\x74\xca\x4f\x0f\x5c\xf3\xac\x20\xaa\xc3\xad" + "\xf5\xb3\x58\x8e\x22\x53\x3d\xe8\x0a\x1b\x33\x88\xf1\x8d\x9c\xc8" + "\x5a\xb6\xd3\xde\x1a\x7a\x21\x12\x1e\x70\x0e\x52\x90\x24\xe0\x1c" + "\xaa\x04\x79\xbc\x58\x42\xcb\xe1\x42\x82\xbe\xeb\x17\xd6\xd9\x8c" + "\xc5\xe8\x77\xde\x77\xb5\x31\xf5\x7f\x09\x8b\x7d\x59\x6e\xbd\xe0" + "\x7b\x0f\xe6\x29\x37\x7b\x19\x90\x69\x33\x0a\xbe\x50\xa5\x11\xba" + "\xc5\x90\x78\x31\xfc\x1e\x6c\x8e\x50\x99\x2b\xd9\x50\x39\xaa\x80" + "\x19\x59\xae\x1e\x7a\x7d\xea\xbe\x92\x0e\xc1\xd4\x99\x71\x50\xb2" + "\x46\x0e\x39\x73\x45\x92\x8e\xd7\xb3\xcd\xf7\x37\x8e\x78\x2b\x2b" + "\xba\x33\xc1\x3e\xdd\xac\x9d\x09\xcd\xb0\x7e\x78\x05\x70\x44\x98" + "\x8d\xcd\xf3\xf4\x55\x07\x6a\x75\x66\x6a\xd2\xf2\x4a\x95\x6b\x07" + "\xfc\x8d\x6d\xe9\x40\xc5\x94\x19\xb5\x29\x5c\xaa\xb0\x7b\x2b\x8d" + "\x64\x41\xfd\x10\x58\xba\x6c\x1f\x7e\x88\x5f\x77\x8c\xe0\x3a\xda" + "\x7d\xed\xc4\xf1\x30\xce\x8d\x47\xd8\xe2\x8c\xca\xea\xf8\xb7\x73" + "\x9d\xb4\xfc\x06\x09\x17\x20\x00\x96\xe4\xd2\x07\x01\x10\x44\xef" + "\x7e\x18\x74\xac\xba\xe3\x26\x0d\x11\x27\xbf\xf3\xbb\x06\x1c\x49" + "\xd7\xae\x79\xe1\xaf\xd8\x66\x48\x03\xb6\x08\x66\x79\x11\xc5\x68" + "\x47\xbc\x4b\xfe\xc4\xa6\x7b\x3a\x66\xcd\x9f\x93\x70\xc2\x42\xd9" + "\xac\x54\x36\x73\x1b\x3a\x89\x1f\x13\xc7\x63\x9e\x43\xbf\xdd\xd7" + "\x54\xc7\xda\x6f\x74\x83\x81\x27\x19\xb3\xde\x1a\x14\xec\x0b\x96" + "\xee\x12\x02\xd1\x9f\x30\xe1\xef\xb8\xb4\xe9\xa4\x72\xc1\x48\xbc" + "\x23\x21\x64\x32\x0d\xac\x49\x6e\x53\x80\x37\x10\x2d\xcf\x6f\x11" + "\xf3\xd0\xf3\x02\xb6\x9d\x6e\x3c\x44\x39\x4d\xee\x8b\x8f\xea\xfa" + "\x20\xf4\x98\x67\x9c\xe3\x12\x82\xa8\xbe\x1c\x3a\x1c\x51\x81\x9d" + "\xc0\xfe\x46\x79\xd0\x19\x6b\xf1\x5d\xbb\x4d\xde\x42\xc9\x72\x93" + "\x62\x65\xb5\x88\xb1\x5f\x92\xfe\x56\x56\x58\xfb\x7a\x81\x7c\x02" + "\xb0\xc0\x53\x84\x6f\x13\x20\x53\xec\x49\x93\xae\x7e\x3c\x3f\xdf" + "\xe7\xba\xa0\x40\x24\x10\xd4\xe6\xf5\xed\x65\xd3\x21\x36\xb1\xe6" + "\x11\x0a\x47\xbc\xd3\x21\x33\x30\x03\x37\x8c\x45\xe5\xdd\xb0\xd5" + "\xcb\x80\x42\xdd\x84\xd6\x70\xf0\xbb\x5b\x44\xe0\x84\x8b\x83\x7c" + "\xcb\xec\x6a\x28\xa3\xf3\x4a\x6c\x0d\xb0\x79\x34\x13\x10\x64\xfc" + "\xee\x12\x55\x82\x25\x25\x30\xb9\xa6\xf8\x3c\x81\x36\xcd\xef", + "\xce\xc3\x13\x6c\x40\x2a\xcc\x51\xa1\xce\xb3\xed\xe8\xa6\x5b\x04", } }; -- 2.30.2 From jussi.kivilinna at iki.fi Fri Aug 13 17:01:29 2021 From: jussi.kivilinna at iki.fi (Jussi Kivilinna) Date: Fri, 13 Aug 2021 18:01:29 +0300 Subject: [PATCH 4/4] Add ARMv8-CE HW acceleration for GCM-SIV counter mode In-Reply-To: <20210813150129.3175810-1-jussi.kivilinna@iki.fi> References: <20210813150129.3175810-1-jussi.kivilinna@iki.fi> Message-ID: <20210813150129.3175810-4-jussi.kivilinna@iki.fi> * cipher/rijndael-armv8-aarch32-ce.S (_gcry_aes_ctr32le_enc_armv8_ce): New. * cipher/rijndael-armv8-aarch64-ce.S (_gcry_aes_ctr32le_enc_armv8_ce): New. * cipher/rijndael-armv8-ce.c (_gcry_aes_ctr32le_enc_armv8_ce) (_gcry_aes_armv8_ce_ctr32le_enc): New. * cipher/rijndael.c (_gcry_aes_armv8_ce_ctr32le_enc): New prototype. (do_setkey): Add setup of 'bulk_ops->ctr32le_enc' for ARMv8-CE. -- Benchmark on Cortex-A53 (aarch64): Before: AES | nanosecs/byte mebibytes/sec cycles/byte auto Mhz GCM-SIV enc | 11.77 ns/B 81.03 MiB/s 7.63 c/B 647.9 GCM-SIV dec | 11.92 ns/B 79.98 MiB/s 7.73 c/B 647.9 GCM-SIV auth | 2.99 ns/B 318.9 MiB/s 1.94 c/B 648.0 After (~2.4x faster): AES | nanosecs/byte mebibytes/sec cycles/byte auto Mhz GCM-SIV enc | 4.66 ns/B 204.5 MiB/s 3.02 c/B 647.9 GCM-SIV dec | 4.82 ns/B 198.0 MiB/s 3.12 c/B 647.9 GCM-SIV auth | 3.00 ns/B 318.4 MiB/s 1.94 c/B 648.0 Signed-off-by: Jussi Kivilinna --- cipher/rijndael-armv8-aarch32-ce.S | 121 +++++++++++++++++++++++++++++ cipher/rijndael-armv8-aarch64-ce.S | 109 ++++++++++++++++++++++++++ cipher/rijndael-armv8-ce.c | 17 ++++ cipher/rijndael.c | 5 ++ 4 files changed, 252 insertions(+) diff --git a/cipher/rijndael-armv8-aarch32-ce.S b/cipher/rijndael-armv8-aarch32-ce.S index 66440bd4..6d78af0a 100644 --- a/cipher/rijndael-armv8-aarch32-ce.S +++ b/cipher/rijndael-armv8-aarch32-ce.S @@ -1016,6 +1016,127 @@ _gcry_aes_ctr_enc_armv8_ce: .size _gcry_aes_ctr_enc_armv8_ce,.-_gcry_aes_ctr_enc_armv8_ce; +/* + * void _gcry_aes_ctr32le_enc_armv8_ce (const void *keysched, + * unsigned char *outbuf, + * const unsigned char *inbuf, + * unsigned char *iv, + * unsigned int nrounds); + */ + +.align 3 +.globl _gcry_aes_ctr32le_enc_armv8_ce +.type _gcry_aes_ctr32le_enc_armv8_ce,%function; +_gcry_aes_ctr32le_enc_armv8_ce: + /* input: + * r0: keysched + * r1: outbuf + * r2: inbuf + * r3: iv + * %st+0: nblocks => r4 + * %st+4: nrounds => r5 + */ + + vpush {q4-q7} + push {r4-r12,lr} /* 4*16 + 4*10 = 104b */ + ldr r4, [sp, #(104+0)] + ldr r5, [sp, #(104+4)] + cmp r4, #0 + beq .Lctr32le_enc_skip + + cmp r5, #12 + vld1.8 {q0}, [r3] /* load IV */ + + aes_preload_keys(r0, r6); + + beq .Lctr32le_enc_entry_192 + bhi .Lctr32le_enc_entry_256 + +#define CTR_ENC(bits, ...) \ + .Lctr32le_enc_entry_##bits: \ + cmp r4, #4; \ + blo .Lctr32le_enc_loop_##bits; \ + \ + .Lctr32le_enc_loop4_##bits: \ + veor q2, q2; \ + sub r4, r4, #4; \ + vmov.i64 d4, #0xffffffff; /* q2 <= -1:0:0:0 */ \ + vmov q1, q0; \ + vadd.u32 q3, q2, q2; /* q3 <= -2:0:0:0 */ \ + vadd.u32 q0, q3, q3; /* q0 <= -4:0:0:0 */ \ + vadd.u32 q4, q3, q2; /* q4 <= -3:0:0:0 */ \ + vsub.u32 q0, q1, q0; \ + vsub.u32 q2, q1, q2; \ + vst1.8 {q0}, [r3]; \ + vsub.u32 q3, q1, q3; \ + vsub.u32 q4, q1, q4; \ + \ + cmp r4, #4; \ + vld1.8 {q0}, [r2]!; /* load ciphertext */ \ + \ + do_aes_4_##bits(e, mc, q1, q2, q3, q4, ##__VA_ARGS__); \ + \ + veor q1, q1, q0; \ + vld1.8 {q0}, [r2]!; /* load ciphertext */ \ + vst1.8 {q1}, [r1]!; /* store plaintext */ \ + vld1.8 {q1}, [r2]!; /* load ciphertext */ \ + veor q2, q2, q0; \ + veor q3, q3, q1; \ + vld1.8 {q0}, [r2]!; /* load ciphertext */ \ + vst1.8 {q2}, [r1]!; /* store plaintext */ \ + veor q4, q4, q0; \ + vld1.8 {q0}, [r3]; /* reload IV */ \ + vst1.8 {q3-q4}, [r1]!; /* store plaintext */ \ + \ + bhs .Lctr32le_enc_loop4_##bits; \ + cmp r4, #0; \ + beq .Lctr32le_enc_done; \ + \ + .Lctr32le_enc_loop_##bits: \ + \ + veor q2, q2; \ + vmov q1, q0; \ + vmov.i64 d4, #0xffffffff; /* q2 <= -1:0:0:0 */ \ + subs r4, r4, #1; \ + vsub.u32 q0, q0, q2; \ + vld1.8 {q2}, [r2]!; /* load ciphertext */ \ + \ + do_aes_one##bits(e, mc, q1, q1, ##__VA_ARGS__); \ + \ + veor q1, q2, q1; \ + vst1.8 {q1}, [r1]!; /* store plaintext */ \ + \ + bne .Lctr32le_enc_loop_##bits; \ + b .Lctr32le_enc_done; + + CTR_ENC(128) + CTR_ENC(192, r0, r6) + CTR_ENC(256, r0, r6) + +#undef CTR_ENC + +.Lctr32le_enc_done: + vst1.8 {q0}, [r3] /* store IV */ + + CLEAR_REG(q0) + CLEAR_REG(q1) + CLEAR_REG(q2) + CLEAR_REG(q3) + CLEAR_REG(q8) + CLEAR_REG(q9) + CLEAR_REG(q10) + CLEAR_REG(q11) + CLEAR_REG(q12) + CLEAR_REG(q13) + CLEAR_REG(q14) + +.Lctr32le_enc_skip: + pop {r4-r12,lr} + vpop {q4-q7} + bx lr +.size _gcry_aes_ctr32le_enc_armv8_ce,.-_gcry_aes_ctr32le_enc_armv8_ce; + + /* * void _gcry_aes_ocb_enc_armv8_ce (const void *keysched, * unsigned char *outbuf, diff --git a/cipher/rijndael-armv8-aarch64-ce.S b/cipher/rijndael-armv8-aarch64-ce.S index 3af29e0d..a87d2ca5 100644 --- a/cipher/rijndael-armv8-aarch64-ce.S +++ b/cipher/rijndael-armv8-aarch64-ce.S @@ -676,6 +676,115 @@ _gcry_aes_ctr_enc_armv8_ce: ELF(.size _gcry_aes_ctr_enc_armv8_ce,.-_gcry_aes_ctr_enc_armv8_ce;) +/* + * void _gcry_aes_ctr32le_enc_armv8_ce (const void *keysched, + * unsigned char *outbuf, + * const unsigned char *inbuf, + * unsigned char *iv, + * unsigned int nrounds); + */ + +.align 3 +.globl _gcry_aes_ctr32le_enc_armv8_ce +ELF(.type _gcry_aes_ctr32le_enc_armv8_ce,%function;) +_gcry_aes_ctr32le_enc_armv8_ce: + /* input: + * r0: keysched + * r1: outbuf + * r2: inbuf + * r3: iv + * x4: nblocks + * w5: nrounds + */ + CFI_STARTPROC(); + + cbz x4, .Lctr32le_enc_skip + + mov w6, #1 + movi v16.16b, #0 + mov v16.S[0], w6 + + /* load IV */ + ld1 {v0.16b}, [x3] + + aes_preload_keys(x0, w5); + + b.eq .Lctr32le_enc_entry_192 + b.hi .Lctr32le_enc_entry_256 + +#define CTR_ENC(bits) \ + .Lctr32le_enc_entry_##bits: \ + cmp x4, #4; \ + b.lo .Lctr32le_enc_loop_##bits; \ + \ + .Lctr32le_enc_loop4_##bits: \ + sub x4, x4, #4; \ + \ + add v3.4s, v16.4s, v16.4s; /* 2 */ \ + mov v1.16b, v0.16b; \ + add v2.4s, v0.4s, v16.4s; \ + add v4.4s, v3.4s, v16.4s; /* 3 */ \ + add v6.4s, v3.4s, v3.4s; /* 4 */ \ + add v3.4s, v0.4s, v3.4s; \ + add v4.4s, v0.4s, v4.4s; \ + add v0.4s, v0.4s, v6.4s; \ + \ + cmp x4, #4; \ + ld1 {v5.16b-v7.16b}, [x2], #48; /* preload ciphertext */ \ + \ + do_aes_4_##bits(e, mc, v1, v2, v3, v4); \ + \ + eor v1.16b, v1.16b, v5.16b; \ + ld1 {v5.16b}, [x2], #16; /* load ciphertext */ \ + eor v2.16b, v2.16b, v6.16b; \ + eor v3.16b, v3.16b, v7.16b; \ + eor v4.16b, v4.16b, v5.16b; \ + st1 {v1.16b-v4.16b}, [x1], #64; /* store plaintext */ \ + \ + b.hs .Lctr32le_enc_loop4_##bits; \ + CLEAR_REG(v3); \ + CLEAR_REG(v4); \ + CLEAR_REG(v5); \ + CLEAR_REG(v6); \ + CLEAR_REG(v7); \ + cbz x4, .Lctr32le_enc_done; \ + \ + .Lctr32le_enc_loop_##bits: \ + \ + mov v1.16b, v0.16b; \ + ld1 {v2.16b}, [x2], #16; /* load ciphertext */ \ + sub x4, x4, #1; \ + add v0.4s, v0.4s, v16.4s; \ + \ + do_aes_one##bits(e, mc, v1, v1); \ + \ + eor v1.16b, v2.16b, v1.16b; \ + st1 {v1.16b}, [x1], #16; /* store plaintext */ \ + \ + cbnz x4, .Lctr32le_enc_loop_##bits; \ + b .Lctr32le_enc_done; + + CTR_ENC(128) + CTR_ENC(192) + CTR_ENC(256) + +#undef CTR_ENC + +.Lctr32le_enc_done: + aes_clear_keys(w5) + + st1 {v0.16b}, [x3] /* store IV */ + + CLEAR_REG(v0) + CLEAR_REG(v1) + CLEAR_REG(v2) + +.Lctr32le_enc_skip: + ret + CFI_ENDPROC(); +ELF(.size _gcry_aes_ctr32le_enc_armv8_ce,.-_gcry_aes_ctr32le_enc_armv8_ce;) + + /* * void _gcry_aes_cfb_enc_armv8_ce (const void *keysched, * unsigned char *outbuf, diff --git a/cipher/rijndael-armv8-ce.c b/cipher/rijndael-armv8-ce.c index 6e46830e..b24ae3e9 100644 --- a/cipher/rijndael-armv8-ce.c +++ b/cipher/rijndael-armv8-ce.c @@ -75,6 +75,12 @@ extern void _gcry_aes_ctr_enc_armv8_ce (const void *keysched, unsigned char *iv, size_t nblocks, unsigned int nrounds); +extern void _gcry_aes_ctr32le_enc_armv8_ce (const void *keysched, + unsigned char *outbuf, + const unsigned char *inbuf, + unsigned char *iv, size_t nblocks, + unsigned int nrounds); + extern void _gcry_aes_ocb_enc_armv8_ce (const void *keysched, unsigned char *outbuf, const unsigned char *inbuf, @@ -345,6 +351,17 @@ _gcry_aes_armv8_ce_ctr_enc (RIJNDAEL_context *ctx, unsigned char *iv, _gcry_aes_ctr_enc_armv8_ce(keysched, outbuf, inbuf, iv, nblocks, nrounds); } +void +_gcry_aes_armv8_ce_ctr32le_enc (RIJNDAEL_context *ctx, unsigned char *iv, + unsigned char *outbuf, + const unsigned char *inbuf, size_t nblocks) +{ + const void *keysched = ctx->keyschenc32; + unsigned int nrounds = ctx->rounds; + + _gcry_aes_ctr32le_enc_armv8_ce(keysched, outbuf, inbuf, iv, nblocks, nrounds); +} + size_t _gcry_aes_armv8_ce_ocb_crypt (gcry_cipher_hd_t c, void *outbuf_arg, const void *inbuf_arg, size_t nblocks, diff --git a/cipher/rijndael.c b/cipher/rijndael.c index c096321f..df41b911 100644 --- a/cipher/rijndael.c +++ b/cipher/rijndael.c @@ -209,6 +209,10 @@ extern void _gcry_aes_armv8_ce_cbc_enc (void *context, unsigned char *iv, extern void _gcry_aes_armv8_ce_ctr_enc (void *context, unsigned char *ctr, void *outbuf_arg, const void *inbuf_arg, size_t nblocks); +extern void _gcry_aes_armv8_ce_ctr32le_enc (void *context, unsigned char *ctr, + void *outbuf_arg, + const void *inbuf_arg, + size_t nblocks); extern void _gcry_aes_armv8_ce_cfb_dec (void *context, unsigned char *iv, void *outbuf_arg, const void *inbuf_arg, size_t nblocks); @@ -570,6 +574,7 @@ do_setkey (RIJNDAEL_context *ctx, const byte *key, const unsigned keylen, bulk_ops->cbc_enc = _gcry_aes_armv8_ce_cbc_enc; bulk_ops->cbc_dec = _gcry_aes_armv8_ce_cbc_dec; bulk_ops->ctr_enc = _gcry_aes_armv8_ce_ctr_enc; + bulk_ops->ctr32le_enc = _gcry_aes_armv8_ce_ctr32le_enc; bulk_ops->ocb_crypt = _gcry_aes_armv8_ce_ocb_crypt; bulk_ops->ocb_auth = _gcry_aes_armv8_ce_ocb_auth; bulk_ops->xts_crypt = _gcry_aes_armv8_ce_xts_crypt; -- 2.30.2 From jussi.kivilinna at iki.fi Fri Aug 13 17:01:26 2021 From: jussi.kivilinna at iki.fi (Jussi Kivilinna) Date: Fri, 13 Aug 2021 18:01:26 +0300 Subject: [PATCH 1/4] Add SIV mode (RFC 5297) Message-ID: <20210813150129.3175810-1-jussi.kivilinna@iki.fi> * cipher/Makefile.am: Add 'cipher-siv.c'. * cipher/cipher-ctr.c (_gcry_cipher_ctr_encrypt): Rename to _gcry_cipher_ctr_encrypt_ctx and add algo context parameter. (_gcry_cipher_ctr_encrypt): New using _gcry_cipher_ctr_encrypt_ctx. * cipher/cipher-internal.h (gcry_cipher_handle): Add 'u_mode.siv'. (_gcry_cipher_ctr_encrypt_ctx, _gcry_cipher_siv_encrypt) (_gcry_cipher_siv_decrypt, _gcry_cipher_siv_set_nonce) (_gcry_cipher_siv_authenticate, _gcry_cipher_siv_set_decryption_tag) (_gcry_cipher_siv_get_tag, _gcry_cipher_siv_check_tag) (_gcry_cipher_siv_setkey): New. * cipher/cipher-siv.c: New. * cipher/cipher.c (_gcry_cipher_open_internal, cipher_setkey) (cipher_reset, _gcry_cipher_setup_mode_ops, _gcry_cipher_info): Add GCRY_CIPHER_MODE_SIV handling. (_gcry_cipher_ctl): Add GCRYCTL_SET_DECRYPTION_TAG handling. * doc/gcrypt.texi: Add documentation for SIV mode. * src/gcrypt.h.in (GCRYCTL_SET_DECRYPTION_TAG): New. (GCRY_CIPHER_MODE_SIV): New. (gcry_cipher_set_decryption_tag): New. * tests/basic.c (check_siv_cipher): New. (check_cipher_modes): Add call for 'check_siv_cipher'. * tests/bench-slope.c (bench_encrypt_init): Use double size key for SIV mode. (bench_aead_encrypt_do_bench, bench_aead_decrypt_do_bench) (bench_aead_authenticate_do_bench): Reset cipher context on each run. (bench_aead_authenticate_do_bench): Support nonce-less operation. (bench_siv_encrypt_do_bench, bench_siv_decrypt_do_bench) (bench_siv_authenticate_do_bench, siv_encrypt_ops) (siv_decrypt_ops, siv_authenticate_ops): New. (cipher_modes): Add SIV mode benchmarks. (cipher_bench_one): Restrict SIV mode testing to 16 byte block-size. -- Signed-off-by: Jussi Kivilinna --- cipher/Makefile.am | 1 + cipher/cipher-ctr.c | 21 +- cipher/cipher-internal.h | 57 ++++++ cipher/cipher-ocb.c | 3 +- cipher/cipher-siv.c | 377 +++++++++++++++++++++++++++++++++++ cipher/cipher.c | 93 ++++++++- doc/gcrypt.texi | 37 +++- src/gcrypt.h.in | 14 +- tests/basic.c | 421 +++++++++++++++++++++++++++++++++++++++ tests/bench-slope.c | 78 +++++++- 10 files changed, 1082 insertions(+), 20 deletions(-) create mode 100644 cipher/cipher-siv.c diff --git a/cipher/Makefile.am b/cipher/Makefile.am index 52a00aa9..4d3e0d19 100644 --- a/cipher/Makefile.am +++ b/cipher/Makefile.am @@ -53,6 +53,7 @@ libcipher_la_SOURCES = \ cipher-ocb.c \ cipher-xts.c \ cipher-eax.c \ + cipher-siv.c \ cipher-selftest.c cipher-selftest.h \ pubkey.c pubkey-internal.h pubkey-util.c \ md.c \ diff --git a/cipher/cipher-ctr.c b/cipher/cipher-ctr.c index 5f0afc2f..d66c5687 100644 --- a/cipher/cipher-ctr.c +++ b/cipher/cipher-ctr.c @@ -31,9 +31,10 @@ gcry_err_code_t -_gcry_cipher_ctr_encrypt (gcry_cipher_hd_t c, - unsigned char *outbuf, size_t outbuflen, - const unsigned char *inbuf, size_t inbuflen) +_gcry_cipher_ctr_encrypt_ctx (gcry_cipher_hd_t c, + unsigned char *outbuf, size_t outbuflen, + const unsigned char *inbuf, size_t inbuflen, + void *algo_context) { size_t n; int i; @@ -65,7 +66,7 @@ _gcry_cipher_ctr_encrypt (gcry_cipher_hd_t c, nblocks = inbuflen >> blocksize_shift; if (nblocks && c->bulk.ctr_enc) { - c->bulk.ctr_enc (&c->context.c, c->u_ctr.ctr, outbuf, inbuf, nblocks); + c->bulk.ctr_enc (algo_context, c->u_ctr.ctr, outbuf, inbuf, nblocks); inbuf += nblocks << blocksize_shift; outbuf += nblocks << blocksize_shift; inbuflen -= nblocks << blocksize_shift; @@ -80,7 +81,7 @@ _gcry_cipher_ctr_encrypt (gcry_cipher_hd_t c, n = blocksize; do { - nburn = enc_fn (&c->context.c, tmp, c->u_ctr.ctr); + nburn = enc_fn (algo_context, tmp, c->u_ctr.ctr); burn = nburn > burn ? nburn : burn; cipher_block_add(c->u_ctr.ctr, 1, blocksize); @@ -118,3 +119,13 @@ _gcry_cipher_ctr_encrypt (gcry_cipher_hd_t c, return 0; } + + +gcry_err_code_t +_gcry_cipher_ctr_encrypt (gcry_cipher_hd_t c, + unsigned char *outbuf, size_t outbuflen, + const unsigned char *inbuf, size_t inbuflen) +{ + return _gcry_cipher_ctr_encrypt_ctx (c, outbuf, outbuflen, inbuf, inbuflen, + &c->context.c); +} diff --git a/cipher/cipher-internal.h b/cipher/cipher-internal.h index 0e4a90fc..e9f48a2f 100644 --- a/cipher/cipher-internal.h +++ b/cipher/cipher-internal.h @@ -398,6 +398,31 @@ struct gcry_cipher_handle * cipher context. */ char *tweak_context; } xts; + + /* Mode specific storage for SIV mode. */ + struct { + /* Tag used for decryption. */ + unsigned char dec_tag[GCRY_SIV_BLOCK_LEN]; + + /* S2V state. */ + unsigned char s2v_d[GCRY_SIV_BLOCK_LEN]; + + /* Number of AAD elements processed. */ + unsigned int aad_count:8; + + /* Flags for SIV state. */ + unsigned int dec_tag_set:1; + + /* --- Following members are not cleared in gcry_cipher_reset --- */ + + /* S2V CMAC state. */ + gcry_cmac_context_t s2v_cmac; + unsigned char s2v_zero_block[GCRY_SIV_BLOCK_LEN]; + + /* Pointer to CTR cipher context, allocated after actual + * cipher context. */ + char *ctr_context; + } siv; } u_mode; /* What follows are two contexts of the cipher in use. The first @@ -453,6 +478,11 @@ gcry_err_code_t _gcry_cipher_ofb_encrypt const unsigned char *inbuf, size_t inbuflen); /*-- cipher-ctr.c --*/ +gcry_err_code_t _gcry_cipher_ctr_encrypt_ctx +/* */ (gcry_cipher_hd_t c, + unsigned char *outbuf, size_t outbuflen, + const unsigned char *inbuf, size_t inbuflen, + void *algo_context); gcry_err_code_t _gcry_cipher_ctr_encrypt /* */ (gcry_cipher_hd_t c, unsigned char *outbuf, size_t outbuflen, @@ -622,6 +652,33 @@ gcry_err_code_t _gcry_cipher_xts_decrypt const unsigned char *inbuf, size_t inbuflen); +/*-- cipher-siv.c --*/ +gcry_err_code_t _gcry_cipher_siv_encrypt +/* */ (gcry_cipher_hd_t c, + unsigned char *outbuf, size_t outbuflen, + const unsigned char *inbuf, size_t inbuflen); +gcry_err_code_t _gcry_cipher_siv_decrypt +/* */ (gcry_cipher_hd_t c, + unsigned char *outbuf, size_t outbuflen, + const unsigned char *inbuf, size_t inbuflen); +gcry_err_code_t _gcry_cipher_siv_set_nonce +/* */ (gcry_cipher_hd_t c, const unsigned char *nonce, + size_t noncelen); +gcry_err_code_t _gcry_cipher_siv_authenticate +/* */ (gcry_cipher_hd_t c, const unsigned char *abuf, size_t abuflen); +gcry_err_code_t _gcry_cipher_siv_set_decryption_tag +/* */ (gcry_cipher_hd_t c, const byte *tag, size_t taglen); +gcry_err_code_t _gcry_cipher_siv_get_tag +/* */ (gcry_cipher_hd_t c, + unsigned char *outtag, size_t taglen); +gcry_err_code_t _gcry_cipher_siv_check_tag +/* */ (gcry_cipher_hd_t c, + const unsigned char *intag, size_t taglen); +gcry_err_code_t _gcry_cipher_siv_setkey +/* */ (gcry_cipher_hd_t c, + const unsigned char *ctrkey, size_t ctrkeylen); + + /* Return the L-value for block N. Note: 'cipher_ocb.c' ensures that N * will never be multiple of 65536 (1 << OCB_L_TABLE_SIZE), thus N can * be directly passed to _gcry_ctz() function and resulting index will diff --git a/cipher/cipher-ocb.c b/cipher/cipher-ocb.c index 24db6a9e..bfafa4c8 100644 --- a/cipher/cipher-ocb.c +++ b/cipher/cipher-ocb.c @@ -107,7 +107,8 @@ ocb_get_L_big (gcry_cipher_hd_t c, u64 n, unsigned char *l_buf) /* Called after key has been set. Sets up L table. */ -void _gcry_cipher_ocb_setkey (gcry_cipher_hd_t c) +void +_gcry_cipher_ocb_setkey (gcry_cipher_hd_t c) { unsigned char ktop[OCB_BLOCK_LEN]; unsigned int burn = 0; diff --git a/cipher/cipher-siv.c b/cipher/cipher-siv.c new file mode 100644 index 00000000..9a71f2ef --- /dev/null +++ b/cipher/cipher-siv.c @@ -0,0 +1,377 @@ +/* cipher-siv.c - SIV implementation + * Copyright (C) 2021 Jussi Kivilinna + * + * This file is part of Libgcrypt. + * + * Libgcrypt is free software; you can redistribute it and/or modify + * it under the terms of the GNU Lesser general Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * Libgcrypt is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this program; if not, see . + */ + +#include +#include +#include +#include +#include + +#include "g10lib.h" +#include "cipher.h" +#include "bufhelp.h" +#include "./cipher-internal.h" + + +static inline void +s2v_double (unsigned char *d) +{ + u64 hi, lo, mask; + + hi = buf_get_be64(d + 0); + lo = buf_get_be64(d + 8); + + mask = -(hi >> 63); + hi = (hi << 1) ^ (lo >> 63); + lo = (lo << 1) ^ (mask & 0x87); + + buf_put_be64(d + 0, hi); + buf_put_be64(d + 8, lo); +} + + +static void +s2v_pad (unsigned char *out, const byte *in, size_t inlen) +{ + static const unsigned char padding[GCRY_SIV_BLOCK_LEN] = { 0x80 }; + + gcry_assert(inlen < GCRY_SIV_BLOCK_LEN); + + buf_cpy (out, in, inlen); + buf_cpy (out + inlen, padding, GCRY_SIV_BLOCK_LEN - inlen); +} + + +gcry_err_code_t +_gcry_cipher_siv_setkey (gcry_cipher_hd_t c, + const unsigned char *ctrkey, size_t ctrkeylen) +{ + static const unsigned char zero[GCRY_SIV_BLOCK_LEN] = { 0 }; + gcry_err_code_t err; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + + c->u_mode.siv.aad_count = 0; + c->u_mode.siv.dec_tag_set = 0; + c->marks.tag = 0; + c->marks.iv = 0; + + /* Set CTR mode key. */ + err = c->spec->setkey (c->u_mode.siv.ctr_context, ctrkey, ctrkeylen, + &c->bulk); + if (err != 0) + return err; + + /* Initialize S2V. */ + memset (&c->u_mode.siv.s2v_cmac, 0, sizeof(c->u_mode.siv.s2v_cmac)); + err = _gcry_cmac_generate_subkeys (c, &c->u_mode.siv.s2v_cmac); + if (err != 0) + return err; + + err = _gcry_cmac_write (c, &c->u_mode.siv.s2v_cmac, zero, GCRY_SIV_BLOCK_LEN); + if (err != 0) + return err; + + err = _gcry_cmac_final (c, &c->u_mode.siv.s2v_cmac); + if (err != 0) + return err; + + memcpy (c->u_mode.siv.s2v_zero_block, c->u_mode.siv.s2v_cmac.u_iv.iv, + GCRY_SIV_BLOCK_LEN); + memcpy (c->u_mode.siv.s2v_d, c->u_mode.siv.s2v_zero_block, + GCRY_SIV_BLOCK_LEN); + if (err != 0) + return err; + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_siv_authenticate (gcry_cipher_hd_t c, + const byte *aadbuf, size_t aadbuflen) +{ + gcry_err_code_t err; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (c->marks.tag) + return GPG_ERR_INV_STATE; + if (c->marks.iv) + return GPG_ERR_INV_STATE; + + if (c->u_mode.siv.aad_count >= 126) + return GPG_ERR_INV_STATE; /* Too many AD vector components. */ + + c->u_mode.siv.aad_count++; + + _gcry_cmac_reset (&c->u_mode.siv.s2v_cmac); + + err = _gcry_cmac_write (c, &c->u_mode.siv.s2v_cmac, aadbuf, aadbuflen); + if (err != 0) + return err; + + err = _gcry_cmac_final (c, &c->u_mode.siv.s2v_cmac); + if (err != 0) + return err; + + s2v_double (c->u_mode.siv.s2v_d); + cipher_block_xor_1 (c->u_mode.siv.s2v_d, c->u_mode.siv.s2v_cmac.u_iv.iv, + GCRY_SIV_BLOCK_LEN); + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_siv_set_nonce (gcry_cipher_hd_t c, const byte *nonce, + size_t noncelen) +{ + gcry_err_code_t err; + + err = _gcry_cipher_siv_authenticate (c, nonce, noncelen); + if (err) + return err; + + /* Nonce is the last AD before plaintext. */ + c->marks.iv = 1; + + return 0; +} + + +static gcry_err_code_t +s2v_plaintext (gcry_cipher_hd_t c, const byte *plain, size_t plainlen) +{ + gcry_err_code_t err; + + if (c->u_mode.siv.aad_count >= 127) + return GPG_ERR_INV_STATE; /* Too many AD vector components. */ + + _gcry_cmac_reset (&c->u_mode.siv.s2v_cmac); + + if (plainlen >= GCRY_SIV_BLOCK_LEN) + { + err = _gcry_cmac_write (c, &c->u_mode.siv.s2v_cmac, plain, + plainlen - GCRY_SIV_BLOCK_LEN); + if (err) + return err; + + cipher_block_xor_1 (c->u_mode.siv.s2v_d, + plain + plainlen - GCRY_SIV_BLOCK_LEN, + GCRY_SIV_BLOCK_LEN); + + err = _gcry_cmac_write (c, &c->u_mode.siv.s2v_cmac, c->u_mode.siv.s2v_d, + GCRY_SIV_BLOCK_LEN); + if (err) + return err; + } + else + { + unsigned char pad_sn[GCRY_SIV_BLOCK_LEN]; + + s2v_double (c->u_mode.siv.s2v_d); + s2v_pad (pad_sn, plain, plainlen); + cipher_block_xor_1 (pad_sn, c->u_mode.siv.s2v_d, GCRY_SIV_BLOCK_LEN); + + err = _gcry_cmac_write (c, &c->u_mode.siv.s2v_cmac, pad_sn, + GCRY_SIV_BLOCK_LEN); + wipememory (pad_sn, sizeof(pad_sn)); + if (err) + return err; + } + + c->u_mode.siv.aad_count++; + + return _gcry_cmac_final (c, &c->u_mode.siv.s2v_cmac); +} + + +gcry_err_code_t +_gcry_cipher_siv_encrypt (gcry_cipher_hd_t c, + byte *outbuf, size_t outbuflen, + const byte *inbuf, size_t inbuflen) +{ + gcry_err_code_t err; + u64 q_lo; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (outbuflen < inbuflen) + return GPG_ERR_BUFFER_TOO_SHORT; + if (c->marks.tag) + return GPG_ERR_INV_STATE; + if (c->u_mode.siv.dec_tag_set) + return GPG_ERR_INV_STATE; + + /* Pass plaintext to S2V. */ + err = s2v_plaintext (c, inbuf, inbuflen); + if (err != 0) + return err; + + /* Clear 31th and 63th bits. */ + memcpy (c->u_ctr.ctr, c->u_mode.siv.s2v_cmac.u_iv.iv, GCRY_SIV_BLOCK_LEN); + q_lo = buf_get_be64(c->u_ctr.ctr + 8); + q_lo &= ~((u64)1 << 31); + q_lo &= ~((u64)1 << 63); + buf_put_be64(c->u_ctr.ctr + 8, q_lo); + + /* Encrypt plaintext. */ + err = _gcry_cipher_ctr_encrypt_ctx(c, outbuf, outbuflen, inbuf, inbuflen, + c->u_mode.siv.ctr_context); + if (err != 0) + return err; + + c->marks.tag = 1; + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_siv_set_decryption_tag (gcry_cipher_hd_t c, + const byte *tag, size_t taglen) +{ + if (taglen != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_INV_ARG; + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (c->marks.tag) + return GPG_ERR_INV_STATE; + + memcpy (&c->u_mode.siv.dec_tag, tag, GCRY_SIV_BLOCK_LEN); + c->u_mode.siv.dec_tag_set = 1; + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_siv_decrypt (gcry_cipher_hd_t c, + byte *outbuf, size_t outbuflen, + const byte *inbuf, size_t inbuflen) +{ + gcry_err_code_t err; + u64 q_lo; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (outbuflen < inbuflen) + return GPG_ERR_BUFFER_TOO_SHORT; + if (c->marks.tag) + return GPG_ERR_INV_STATE; + if (!c->u_mode.siv.dec_tag_set) + return GPG_ERR_INV_STATE; + + /* Clear 31th and 63th bits. */ + memcpy (c->u_ctr.ctr, c->u_mode.siv.dec_tag, GCRY_SIV_BLOCK_LEN); + q_lo = buf_get_be64(c->u_ctr.ctr + 8); + q_lo &= ~((u64)1 << 31); + q_lo &= ~((u64)1 << 63); + buf_put_be64(c->u_ctr.ctr + 8, q_lo); + + /* Decrypt ciphertext. */ + err = _gcry_cipher_ctr_encrypt_ctx(c, outbuf, outbuflen, inbuf, inbuflen, + c->u_mode.siv.ctr_context); + if (err != 0) + return err; + + /* Pass plaintext to S2V. */ + err = s2v_plaintext (c, outbuf, inbuflen); + if (err != 0) + return err; + + c->marks.tag = 1; + + if (!buf_eq_const(c->u_mode.siv.s2v_cmac.u_iv.iv, c->u_mode.siv.dec_tag, + GCRY_SIV_BLOCK_LEN)) + { + wipememory (outbuf, inbuflen); + return GPG_ERR_CHECKSUM; + } + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_siv_get_tag (gcry_cipher_hd_t c, unsigned char *outbuf, + size_t outbuflen) +{ + gcry_err_code_t err; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (c->u_mode.siv.dec_tag_set) + return GPG_ERR_INV_STATE; + + if (!c->marks.tag) + { + /* Finalize SIV with zero-length plaintext. */ + err = s2v_plaintext (c, NULL, 0); + if (err != 0) + return err; + + c->marks.tag = 1; + } + + if (outbuflen > GCRY_SIV_BLOCK_LEN) + outbuflen = GCRY_SIV_BLOCK_LEN; + + /* We already checked that OUTBUF is large enough to hold + * the result or has valid truncated length. */ + memcpy (outbuf, c->u_mode.siv.s2v_cmac.u_iv.iv, outbuflen); + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_siv_check_tag (gcry_cipher_hd_t c, const unsigned char *intag, + size_t taglen) +{ + gcry_err_code_t err; + size_t n; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + + if (!c->marks.tag) + { + /* Finalize SIV with zero-length plaintext. */ + err = s2v_plaintext (c, NULL, 0); + if (err != 0) + return err; + + c->marks.tag = 1; + } + + n = GCRY_SIV_BLOCK_LEN; + if (taglen < n) + n = taglen; + + if (!buf_eq_const(c->u_mode.siv.s2v_cmac.u_iv.iv, intag, n) + || GCRY_SIV_BLOCK_LEN != taglen) + { + return GPG_ERR_CHECKSUM; + } + + return 0; +} diff --git a/cipher/cipher.c b/cipher/cipher.c index 1039dff7..a274466f 100644 --- a/cipher/cipher.c +++ b/cipher/cipher.c @@ -550,6 +550,7 @@ _gcry_cipher_open_internal (gcry_cipher_hd_t *handle, case GCRY_CIPHER_MODE_CMAC: case GCRY_CIPHER_MODE_EAX: case GCRY_CIPHER_MODE_GCM: + case GCRY_CIPHER_MODE_SIV: if (!spec->encrypt || !spec->decrypt) err = GPG_ERR_INV_CIPHER_MODE; break; @@ -609,6 +610,7 @@ _gcry_cipher_open_internal (gcry_cipher_hd_t *handle, switch (mode) { case GCRY_CIPHER_MODE_XTS: + case GCRY_CIPHER_MODE_SIV: /* Additional cipher context for tweak. */ size += 2 * spec->contextsize + 15; break; @@ -661,7 +663,12 @@ _gcry_cipher_open_internal (gcry_cipher_hd_t *handle, tc = h->context.c + spec->contextsize * 2; tc += (16 - (uintptr_t)tc % 16) % 16; h->u_mode.xts.tweak_context = tc; + break; + case GCRY_CIPHER_MODE_SIV: + tc = h->context.c + spec->contextsize * 2; + tc += (16 - (uintptr_t)tc % 16) % 16; + h->u_mode.siv.ctr_context = tc; break; default: @@ -731,6 +738,13 @@ cipher_setkey (gcry_cipher_hd_t c, byte *key, size_t keylen) return GPG_ERR_WEAK_KEY; } } + else if (c->mode == GCRY_CIPHER_MODE_SIV) + { + /* SIV uses two keys. */ + if (keylen % 2) + return GPG_ERR_INV_KEYLEN; + keylen /= 2; + } rc = c->spec->setkey (&c->context.c, key, keylen, &c->bulk); if (!rc || (c->marks.allow_weak_key && rc == GPG_ERR_WEAK_KEY)) @@ -777,9 +791,22 @@ cipher_setkey (gcry_cipher_hd_t c, byte *key, size_t keylen) c->marks.key = 0; break; + case GCRY_CIPHER_MODE_SIV: + /* Setup CTR cipher with second part of SIV key. */ + rc = _gcry_cipher_siv_setkey (c, key + keylen, keylen); + if (!rc || (c->marks.allow_weak_key && rc == GPG_ERR_WEAK_KEY)) + { + /* Duplicate initial CTR context. */ + memcpy (c->u_mode.siv.ctr_context + c->spec->contextsize, + c->u_mode.siv.ctr_context, c->spec->contextsize); + } + else + c->marks.key = 0; + break; + default: break; - }; + } } else c->marks.key = 0; @@ -876,14 +903,25 @@ cipher_reset (gcry_cipher_hd_t c) break; case GCRY_CIPHER_MODE_OCB: - /* Do not clear precalculated L-values */ { + const size_t table_maxblks = 1 << OCB_L_TABLE_SIZE; byte *u_mode_head_pos = (void *)&c->u_mode.ocb; byte *u_mode_tail_pos = (void *)&c->u_mode.ocb.tag; size_t u_mode_head_length = u_mode_tail_pos - u_mode_head_pos; size_t u_mode_tail_length = sizeof(c->u_mode.ocb) - u_mode_head_length; - memset (u_mode_tail_pos, 0, u_mode_tail_length); + if (c->u_mode.ocb.aad_nblocks < table_maxblks) + { + /* Precalculated L-values are still ok after reset, no need + * to clear. */ + memset (u_mode_tail_pos, 0, u_mode_tail_length); + } + else + { + /* Reinitialize L table. */ + memset (&c->u_mode.ocb, 0, sizeof(c->u_mode.ocb)); + _gcry_cipher_ocb_setkey (c); + } /* Setup default taglen. */ c->u_mode.ocb.taglen = 16; @@ -896,6 +934,24 @@ cipher_reset (gcry_cipher_hd_t c) c->spec->contextsize); break; + case GCRY_CIPHER_MODE_SIV: + /* Only clear head of u_mode, keep s2v_cmac and ctr_context. */ + { + byte *u_mode_pos = (void *)&c->u_mode; + byte *tail_pos = (void *)&c->u_mode.siv.s2v_cmac; + size_t u_mode_head_length = tail_pos - u_mode_pos; + + memset (&c->u_mode, 0, u_mode_head_length); + + memcpy (c->u_mode.siv.ctr_context, + c->u_mode.siv.ctr_context + c->spec->contextsize, + c->spec->contextsize); + + memcpy (c->u_mode.siv.s2v_d, c->u_mode.siv.s2v_zero_block, + GCRY_SIV_BLOCK_LEN); + } + break; + default: break; /* u_mode unused by other modes. */ } @@ -1314,6 +1370,11 @@ _gcry_cipher_setup_mode_ops(gcry_cipher_hd_t c, int mode) c->mode_ops.decrypt = _gcry_cipher_xts_decrypt; break; + case GCRY_CIPHER_MODE_SIV: + c->mode_ops.encrypt = _gcry_cipher_siv_encrypt; + c->mode_ops.decrypt = _gcry_cipher_siv_decrypt; + break; + default: c->mode_ops.encrypt = do_encrypt_none_unknown; c->mode_ops.decrypt = do_decrypt_none_unknown; @@ -1343,6 +1404,10 @@ _gcry_cipher_setup_mode_ops(gcry_cipher_hd_t c, int mode) c->mode_ops.setiv = _gcry_cipher_ocb_set_nonce; break; + case GCRY_CIPHER_MODE_SIV: + c->mode_ops.setiv = _gcry_cipher_siv_set_nonce; + break; + default: c->mode_ops.setiv = cipher_setiv; break; @@ -1388,6 +1453,12 @@ _gcry_cipher_setup_mode_ops(gcry_cipher_hd_t c, int mode) c->mode_ops.check_tag = _gcry_cipher_ocb_check_tag; break; + case GCRY_CIPHER_MODE_SIV: + c->mode_ops.authenticate = _gcry_cipher_siv_authenticate; + c->mode_ops.get_tag = _gcry_cipher_siv_get_tag; + c->mode_ops.check_tag = _gcry_cipher_siv_check_tag; + break; + default: c->mode_ops.authenticate = NULL; c->mode_ops.get_tag = NULL; @@ -1462,6 +1533,18 @@ _gcry_cipher_ctl (gcry_cipher_hd_t h, int cmd, void *buffer, size_t buflen) } break; + case GCRYCTL_SET_DECRYPTION_TAG: + { + if (!buffer) + return GPG_ERR_INV_ARG; + + if (h->mode == GCRY_CIPHER_MODE_SIV) + rc = _gcry_cipher_siv_set_decryption_tag (h, buffer, buflen); + else + rc = GPG_ERR_INV_CIPHER_MODE; + } + break; + case GCRYCTL_SET_TAGLEN: if (!h || !buffer || buflen != sizeof(int) ) return GPG_ERR_INV_ARG; @@ -1595,6 +1678,10 @@ _gcry_cipher_info (gcry_cipher_hd_t h, int cmd, void *buffer, size_t *nbytes) *nbytes = POLY1305_TAGLEN; break; + case GCRY_CIPHER_MODE_SIV: + *nbytes = GCRY_SIV_BLOCK_LEN; + break; + default: rc = GPG_ERR_INV_CIPHER_MODE; break; diff --git a/doc/gcrypt.texi b/doc/gcrypt.texi index 148a5fa2..e5c4b64e 100644 --- a/doc/gcrypt.texi +++ b/doc/gcrypt.texi @@ -1760,6 +1760,28 @@ EAX is an Authenticated Encryption with Associated Data (AEAD) block cipher mode by Bellare, Rogaway, and Wagner (see @uref{http://web.cs.ucdavis.edu/~rogaway/papers/eax.html}). + at item GCRY_CIPHER_MODE_SIV + at cindex SIV, SIV mode +Synthetic Initialization Vector (SIV) is an Authenticated Encryption +with Associated Data (AEAD) block cipher mode, which is specified in +RFC-5297. This mode works with block ciphers with block size of 128 +bits and uses tag length of 128 bits. Depending on how it is used, +SIV achieves either the goal of deterministic authenticated encryption +or the goal of nonce-based, misuse-resistant authenticated encryption. + +The SIV mode requires doubling key-length, for example, using 512-bit +key with AES-256 (@code{GCRY_CIPHER_AES256}). Multiple AD instances can +be passed to SIV mode with separate calls to + at code{gcry_cipher_authenticate}. Nonce may be passed either through + at code{gcry_cipher_setiv} or in the last call to + at code{gcry_cipher_authenticate}. Note that use of @code{gcry_cipher_setiv} +blocks any further calls to @code{gcry_cipher_authenticate} as nonce needs +to be the last AD element with the SIV mode. When encrypting or decrypting, +full-sized plaintext or ciphertext needs to be passed to + at code{gcry_cipher_encrypt} or @code{gcry_cipher_decrypt}. Decryption tag +needs to be given to SIV mode before decryption using + at code{gcry_cipher_set_decryption_tag}. + @end table @node Working with cipher handles @@ -1794,8 +1816,9 @@ ChaCha20 stream cipher. The block cipher modes @code{GCRY_CIPHER_MODE_CTR} and @code{GCRY_CIPHER_MODE_EAX}) will work with any block cipher algorithm. GCM mode (@code{GCRY_CIPHER_MODE_GCM}), CCM mode (@code{GCRY_CIPHER_MODE_CCM}), -OCB mode (@code{GCRY_CIPHER_MODE_OCB}), and XTS mode -(@code{GCRY_CIPHER_MODE_XTS}) will only work with block cipher +OCB mode (@code{GCRY_CIPHER_MODE_OCB}), XTS mode +(@code{GCRY_CIPHER_MODE_XTS}) and SIV mode +(@code{GCRY_CIPHER_MODE_SIV}) will only work with block cipher algorithms which have the block size of 16 bytes. The third argument @var{flags} can either be passed as @code{0} or as @@ -1988,6 +2011,16 @@ implemented as a macro. @end deftypefun +The SIV mode requires decryption tag to be input before decryption. +This is done with: + + at deftypefun gcry_error_t gcry_cipher_set_decryption_tag (gcry_cipher_hd_t @var{h}, const void *@var{tag}, size_t @var{taglen}) + +Set decryption tag for the SIV mode decryption. This is implemented +as a macro. + at end deftypefun + + OpenPGP (as defined in RFC-4880) requires a special sync operation in some places. The following function is used for this: diff --git a/src/gcrypt.h.in b/src/gcrypt.h.in index 882f4387..99b21276 100644 --- a/src/gcrypt.h.in +++ b/src/gcrypt.h.in @@ -334,7 +334,8 @@ enum gcry_ctl_cmds GCRYCTL_GET_TAGLEN = 76, GCRYCTL_REINIT_SYSCALL_CLAMP = 77, GCRYCTL_AUTO_EXPAND_SECMEM = 78, - GCRYCTL_SET_ALLOW_WEAK_KEY = 79 + GCRYCTL_SET_ALLOW_WEAK_KEY = 79, + GCRYCTL_SET_DECRYPTION_TAG = 80 }; /* Perform various operations defined by CMD. */ @@ -975,7 +976,8 @@ enum gcry_cipher_modes GCRY_CIPHER_MODE_OCB = 11, /* OCB3 mode. */ GCRY_CIPHER_MODE_CFB8 = 12, /* Cipher feedback (8 bit mode). */ GCRY_CIPHER_MODE_XTS = 13, /* XTS mode. */ - GCRY_CIPHER_MODE_EAX = 14 /* EAX mode. */ + GCRY_CIPHER_MODE_EAX = 14, /* EAX mode. */ + GCRY_CIPHER_MODE_SIV = 15 /* SIV mode. */ }; /* Flags used with the open function. */ @@ -999,6 +1001,9 @@ enum gcry_cipher_flags /* XTS works only with blocks of 128 bits. */ #define GCRY_XTS_BLOCK_LEN (128 / 8) +/* SIV works only with blocks of 128 bits */ +#define GCRY_SIV_BLOCK_LEN (128 / 8) + /* Create a handle for algorithm ALGO to be used in MODE. FLAGS may be given as an bitwise OR of the gcry_cipher_flags values. */ gcry_error_t gcry_cipher_open (gcry_cipher_hd_t *handle, @@ -1101,6 +1106,11 @@ size_t gcry_cipher_get_algo_blklen (int algo); #define gcry_cipher_test_algo(a) \ gcry_cipher_algo_info( (a), GCRYCTL_TEST_ALGO, NULL, NULL ) +/* Setup tag for decryption (for SIV mode). */ +#define gcry_cipher_set_decryption_tag(a, tag, taglen) \ + gcry_cipher_ctl ((a), GCRYCTL_SET_DECRYPTION_TAG, \ + (void *)(tag), (taglen)) + /************************************ * * diff --git a/tests/basic.c b/tests/basic.c index 8d29c14e..989a5aca 100644 --- a/tests/basic.c +++ b/tests/basic.c @@ -4800,6 +4800,426 @@ check_eax_cipher (void) } +static void +check_siv_cipher (void) +{ + static const struct tv + { + int algo; + char key[MAX_DATA_LEN]; + char ad1[MAX_DATA_LEN]; + int ad1len; + char ad2[MAX_DATA_LEN]; + int ad2len; + char nonce[MAX_DATA_LEN]; + int noncelen; + unsigned char plaintext[MAX_DATA_LEN]; + int inlen; + char tag[MAX_DATA_LEN]; + char out[MAX_DATA_LEN]; + } tv[] = + { + /* Test vectors from RFC5297 */ + { + GCRY_CIPHER_AES128, + "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27", + 24, + "", + -1, + "", + -1, + "\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee", + 14, + "\x85\x63\x2d\x07\xc6\xe8\xf3\x7f\x95\x0a\xcd\x32\x0a\x2e\xcc\x93", + "\x40\xc0\x2b\x96\x90\xc4\xdc\x04\xda\xef\x7f\x6a\xfe\x5c" + }, + { + GCRY_CIPHER_AES128, + "\x7f\x7e\x7d\x7c\x7b\x7a\x79\x78\x77\x76\x75\x74\x73\x72\x71\x70" + "\x40\x41\x42\x43\x44\x45\x46\x47\x48\x49\x4a\x4b\x4c\x4d\x4e\x4f", + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff" + "\xde\xad\xda\xda\xde\xad\xda\xda\xff\xee\xdd\xcc\xbb\xaa\x99\x88" + "\x77\x66\x55\x44\x33\x22\x11\x00", + 40, + "\x10\x20\x30\x40\x50\x60\x70\x80\x90\xa0", + 10, + "\x09\xf9\x11\x02\x9d\x74\xe3\x5b\xd8\x41\x56\xc5\x63\x56\x88\xc0", + 16, + "\x74\x68\x69\x73\x20\x69\x73\x20\x73\x6f\x6d\x65\x20\x70\x6c\x61" + "\x69\x6e\x74\x65\x78\x74\x20\x74\x6f\x20\x65\x6e\x63\x72\x79\x70" + "\x74\x20\x75\x73\x69\x6e\x67\x20\x53\x49\x56\x2d\x41\x45\x53", + 47, + "\x7b\xdb\x6e\x3b\x43\x26\x67\xeb\x06\xf4\xd1\x4b\xff\x2f\xbd\x0f", + "\xcb\x90\x0f\x2f\xdd\xbe\x40\x43\x26\x60\x19\x65\xc8\x89\xbf\x17" + "\xdb\xa7\x7c\xeb\x09\x4f\xa6\x63\xb7\xa3\xf7\x48\xba\x8a\xf8\x29" + "\xea\x64\xad\x54\x4a\x27\x2e\x9c\x48\x5b\x62\xa3\xfd\x5c\x0d" + }, + /* From libaes_siv */ + { + GCRY_CIPHER_AES256, + "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff" + "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0", + "\x10\x11\x12\x13\x14\x15\x16\x17\x18\x19\x1a\x1b\x1c\x1d\x1e\x1f" + "\x20\x21\x22\x23\x24\x25\x26\x27", + 24, + "", + -1, + "", + -1, + "\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee", + 14, + "\x72\x4d\xfb\x2e\xaf\x94\xdb\xb1\x9b\x0b\xa3\xa2\x99\xa0\x80\x1e", + "\xf3\xb0\x5a\x55\x49\x8e\xc2\x55\x26\x90\xb8\x98\x10\xe4" + }, + /* From https://github.com/cryptomator/siv-mode */ + { GCRY_CIPHER_AES128, + "\x90\xe5\x90\xae\xca\x19\x70\xed\xd1\x9f\xe5\x0f\xa6\x91\xae\x12" + "\x34\x2c\x49\x7a\x22\xc2\x4f\xaa\x9e\x87\x19\x2e\x34\x00\xfb\xce", + "\x2d\xdf\x87\xac\x97\x5d\x0c", + 7, + "", + -1, + "", + -1, + "\x44", + 1, + "\x7b\x0d\xdd\x88\x74\x39\x43\xc6\x44\xc1\xd1\xa2\x18\xa3\x1e\xdf", + "\x2e" + }, + { + GCRY_CIPHER_AES128, + "\xf6\xde\x98\x19\x31\x1b\xd3\xde\x0b\xd1\x98\x70\x9d\xea\x9f\xdf" + "\xb8\x2e\x80\x44\xe4\x00\x13\x2a\x90\xff\xe9\xa9\xde\x81\x44\x75", + "\x7b\xd3\x6f\x24\x09\xfc\xd0\x0f\x5c\xcd\x9a\xf2\xe3\xf5\x76\x45" + "\xf7\xc5\x3f\x39\xf7\xad\xcb\xf0\x7a\x0e\x43\x30\x7e\x55\xa2\x53" + "\x47\x49\x48\x20\x20\x27\x6c\x8a\x20\x44\x22\xcd\x26\xbf\x7e\x89" + "\x88\x38\x0d\x94\xff\x12\xc5\x18\xfd\x20\x2c\x2a\x1b\x00\xb3", + 63, + "", + -1, + "", + -1, + "", + 0, + "\x4c\x0e\xc2\xcc\x61\x59\xb1\x17\xdb\x98\x6d\x9a\xa5\xb4\xa0\x11", + "" + }, + /* From https://github.com/RustCrypto/AEADs */ + { + GCRY_CIPHER_AES128, + "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + "", + -1, + "", + -1, + "", + -1, + "", + 0, + "\xf2\x00\x7a\x5b\xeb\x2b\x89\x00\xc5\x88\xa7\xad\xf5\x99\xf1\x72", + "" + }, + { + GCRY_CIPHER_AES128, + "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + "", + -1, + "", + -1, + "", + -1, + "\x00\x11\x22\x33\x44\x55\x66\x77\x88\x99\xaa\xbb\xcc\xdd\xee\xff", + 16, + "\xf3\x04\xf9\x12\x86\x3e\x30\x3d\x5b\x54\x0e\x50\x57\xc7\x01\x0c", + "\x94\x2f\xfa\xf4\x5b\x0e\x5c\xa5\xfb\x9a\x56\xa5\x26\x3b\xb0\x65" + }, + /* From nettle */ + { + GCRY_CIPHER_AES128, + "\xff\xfe\xfd\xfc\xfb\xfa\xf9\xf8\xf7\xf6\xf5\xf4\xf3\xf2\xf1\xf0" + "\xf0\xf1\xf2\xf3\xf4\xf5\xf6\xf7\xf8\xf9\xfa\xfb\xfc\xfd\xfe\xff", + "", + 0, + "", + -1, + "\x01", + 1, + "", + 0, + "\xc6\x96\xf8\x4f\xdf\x92\xab\xa3\xc3\x1c\x23\xd5\xf2\x08\x75\x13", + "" + }, + /* From botan */ + { + GCRY_CIPHER_AES128, + "\x2a\x83\xf6\x10\xa1\xd1\x77\xec\x2e\x00\x89\x80\xdc\x02\xa6\x6e" + "\xeb\x75\xaf\x6c\xba\x44\xa4\xe0\x9f\x3d\x93\xea\x1f\xa2\x88\x67", + "", + 0, + "", + -1, + "", + -1, + "", + 0, + "\x6b\xc5\xca\x86\x32\x29\x66\x75\x18\xa9\xab\xbd\x5a\xe6\xc1\xd5", + "" + }, + { + GCRY_CIPHER_AES128, + "\x97\xef\x57\xd4\xe2\xe9\x2f\x14\xdf\x73\x31\xfb\xa3\xd9\xf3\x58" + "\x87\xdd\xe7\xad\x86\x91\xfb\x80\x17\x68\x58\xd6\x59\x20\x14\x27", + "", + 0, + "", + -1, + "", + -1, + "\x75\x73\x97\x4d\x6f\xa7\x65\xbc\xd0\xe6\x23\x2c\x24\x0e\x82\x7e", + 16, + "\x68\x60\xa9\xc7\xbf\x4a\x6b\x21\x92\x44\xd7\xa8\xea\xa1\xf5\x0c", + "\x6f\x97\x93\x82\xcd\xe6\x8d\xe6\x0a\xb2\xad\x09\x53\x60\x64\x85" + } + }; + + gcry_cipher_hd_t hde, hdd; + unsigned char out[MAX_DATA_LEN]; + unsigned char tag[16]; + int i, keylen; + gcry_error_t err = 0; + size_t taglen2; + + if (verbose) + fprintf (stderr, " Starting SIV checks.\n"); + + for (i = 0; i < sizeof (tv) / sizeof (tv[0]); i++) + { + if (gcry_cipher_test_algo (tv[i].algo) && in_fips_mode) + { + if (verbose) + fprintf (stderr, " algorithm %d not available in fips mode\n", + tv[i].algo); + continue; + } + + if (verbose) + fprintf (stderr, " checking SIV mode for %s [%i]\n", + gcry_cipher_algo_name (tv[i].algo), + tv[i].algo); + err = gcry_cipher_open (&hde, tv[i].algo, GCRY_CIPHER_MODE_SIV, 0); + if (!err) + err = gcry_cipher_open (&hdd, tv[i].algo, GCRY_CIPHER_MODE_SIV, 0); + if (err) + { + fail ("aes-siv, gcry_cipher_open failed: %s\n", gpg_strerror (err)); + return; + } + + keylen = gcry_cipher_get_algo_keylen (tv[i].algo) * 2; + if (!keylen) + { + fail ("aes-siv, gcry_cipher_get_algo_keylen failed\n"); + return; + } + + err = gcry_cipher_setkey (hde, tv[i].key, keylen); + if (!err) + err = gcry_cipher_setkey (hdd, tv[i].key, keylen); + if (err) + { + fail ("aes-siv, gcry_cipher_setkey failed: %s\n", + gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (tv[i].ad1len >= 0) + { + err = gcry_cipher_authenticate (hde, tv[i].ad1, tv[i].ad1len); + if (!err) + err = gcry_cipher_authenticate (hdd, tv[i].ad1, tv[i].ad1len); + if (err) + { + fail ("aes-siv, gcry_cipher_authenticate failed: %s\n", + gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + } + + if (tv[i].ad2len >= 0) + { + err = gcry_cipher_authenticate (hde, tv[i].ad2, tv[i].ad2len); + if (!err) + err = gcry_cipher_authenticate (hdd, tv[i].ad2, tv[i].ad2len); + if (err) + { + fail ("aes-siv, gcry_cipher_authenticate failed: %s\n", + gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + } + + if (tv[i].noncelen >= 0) + { + err = gcry_cipher_setiv (hde, tv[i].nonce, tv[i].noncelen); + if (!err) + err = gcry_cipher_setiv (hdd, tv[i].nonce, tv[i].noncelen); + if (err) + { + fail ("aes-siv, gcry_cipher_setiv failed: %s\n", + gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + /* Further AD not allowed after setting nonce. */ + err = gcry_cipher_authenticate (hde, tv[i].nonce, tv[i].noncelen); + if (!err) + { + fail ("aes-siv, gcry_cipher_authenticate after setiv did not fail\n"); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + } + + err = gcry_cipher_info (hde, GCRYCTL_GET_TAGLEN, NULL, &taglen2); + if (err) + { + fail ("cipher-siv, gcryctl_get_taglen failed (tv %d): %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + if (taglen2 != 16) + { + fail ("cipher-siv, gcryctl_get_taglen returned bad length" + " (tv %d): got=%zu want=%d\n", + i, taglen2, 16); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (tv[i].inlen) + { + err = gcry_cipher_encrypt (hde, out, tv[i].inlen, + tv[i].plaintext, tv[i].inlen); + if (err) + { + fail ("aes-siv, gcry_cipher_encrypt (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (memcmp (tv[i].out, out, tv[i].inlen)) + { + mismatch (tv[i].out, tv[i].inlen, out, tv[i].inlen); + fail ("aes-siv, encrypt mismatch entry %d\n", i); + } + + err = gcry_cipher_gettag (hde, tag, taglen2); + if (err) + { + fail ("aes-siv, gcry_cipher_gettag(%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (memcmp (tv[i].tag, tag, taglen2)) + { + mismatch (tv[i].tag, taglen2, tag, taglen2); + fail ("aes-siv, tag mismatch entry %d\n", i); + } + + err = gcry_cipher_set_decryption_tag (hdd, tag, taglen2); + if (err) + { + fail ("aes-siv, gcry_cipher_set_decryption_tag (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + err = gcry_cipher_decrypt (hdd, out, tv[i].inlen, NULL, 0); + if (err) + { + fail ("aes-siv, gcry_cipher_decrypt (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (memcmp (tv[i].plaintext, out, tv[i].inlen)) + fail ("aes-siv, decrypt mismatch entry %d\n", i); + + err = gcry_cipher_checktag (hdd, tag, taglen2); + if (err) + { + fail ("aes-siv, gcry_cipher_checktag (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + } + else + { + err = gcry_cipher_gettag (hde, tag, taglen2); + if (err) + { + fail ("aes-siv, gcry_cipher_gettag(%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (memcmp (tv[i].tag, tag, taglen2)) + { + mismatch (tv[i].tag, taglen2, tag, taglen2); + fail ("aes-siv, tag mismatch entry %d\n", i); + } + + err = gcry_cipher_checktag (hdd, tv[i].tag, taglen2); + if (err) + { + fail ("aes-siv, gcry_cipher_checktag (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + } + + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + } + if (verbose) + fprintf (stderr, " Completed SIV checks.\n"); +} + + static void _check_poly1305_cipher (unsigned int step) { @@ -10133,6 +10553,7 @@ check_cipher_modes(void) check_ocb_cipher (); check_xts_cipher (); check_eax_cipher (); + check_siv_cipher (); check_gost28147_cipher (); check_stream_cipher (); check_stream_cipher_large_block (); diff --git a/tests/bench-slope.c b/tests/bench-slope.c index d1b7f24f..91eb7cc5 100644 --- a/tests/bench-slope.c +++ b/tests/bench-slope.c @@ -966,6 +966,11 @@ bench_encrypt_init (struct bench_obj *obj) } keylen = gcry_cipher_get_algo_keylen (mode->algo); + if (mode->mode == GCRY_CIPHER_MODE_SIV) + { + keylen *= 2; + } + if (keylen) { char key[keylen]; @@ -1290,6 +1295,7 @@ bench_aead_encrypt_do_bench (struct bench_obj *obj, void *buf, size_t buflen, int err; char tag[16]; + gcry_cipher_reset (hd); gcry_cipher_setiv (hd, nonce, noncelen); gcry_cipher_final (hd); @@ -1320,13 +1326,18 @@ bench_aead_decrypt_do_bench (struct bench_obj *obj, void *buf, size_t buflen, int err; char tag[16] = { 0, }; + gcry_cipher_reset (hd); + gcry_cipher_set_decryption_tag (hd, tag, 16); + gcry_cipher_setiv (hd, nonce, noncelen); gcry_cipher_final (hd); err = gcry_cipher_decrypt (hd, buf, buflen, buf, buflen); + if (gpg_err_code (err) == GPG_ERR_CHECKSUM) + err = gpg_error (GPG_ERR_NO_ERROR); if (err) { - fprintf (stderr, PGM ": gcry_cipher_encrypt failed: %s\n", + fprintf (stderr, PGM ": gcry_cipher_decrypt failed: %s\n", gpg_strerror (err)); gcry_cipher_close (hd); exit (1); @@ -1354,13 +1365,18 @@ bench_aead_authenticate_do_bench (struct bench_obj *obj, void *buf, char tag[16] = { 0, }; char data = 0xff; - err = gcry_cipher_setiv (hd, nonce, noncelen); - if (err) + gcry_cipher_reset (hd); + + if (noncelen > 0) { - fprintf (stderr, PGM ": gcry_cipher_setiv failed: %s\n", - gpg_strerror (err)); - gcry_cipher_close (hd); - exit (1); + err = gcry_cipher_setiv (hd, nonce, noncelen); + if (err) + { + fprintf (stderr, PGM ": gcry_cipher_setiv failed: %s\n", + gpg_strerror (err)); + gcry_cipher_close (hd); + exit (1); + } } err = gcry_cipher_authenticate (hd, buf, buflen); @@ -1487,6 +1503,47 @@ static struct bench_ops ocb_authenticate_ops = { &bench_ocb_authenticate_do_bench }; + +static void +bench_siv_encrypt_do_bench (struct bench_obj *obj, void *buf, + size_t buflen) +{ + bench_aead_encrypt_do_bench (obj, buf, buflen, NULL, 0); +} + +static void +bench_siv_decrypt_do_bench (struct bench_obj *obj, void *buf, + size_t buflen) +{ + bench_aead_decrypt_do_bench (obj, buf, buflen, NULL, 0); +} + +static void +bench_siv_authenticate_do_bench (struct bench_obj *obj, void *buf, + size_t buflen) +{ + bench_aead_authenticate_do_bench (obj, buf, buflen, NULL, 0); +} + +static struct bench_ops siv_encrypt_ops = { + &bench_encrypt_init, + &bench_encrypt_free, + &bench_siv_encrypt_do_bench +}; + +static struct bench_ops siv_decrypt_ops = { + &bench_encrypt_init, + &bench_encrypt_free, + &bench_siv_decrypt_do_bench +}; + +static struct bench_ops siv_authenticate_ops = { + &bench_encrypt_init, + &bench_encrypt_free, + &bench_siv_authenticate_do_bench +}; + + static void bench_eax_encrypt_do_bench (struct bench_obj *obj, void *buf, size_t buflen) @@ -1603,6 +1660,9 @@ static struct bench_cipher_mode cipher_modes[] = { {GCRY_CIPHER_MODE_OCB, "OCB enc", &ocb_encrypt_ops}, {GCRY_CIPHER_MODE_OCB, "OCB dec", &ocb_decrypt_ops}, {GCRY_CIPHER_MODE_OCB, "OCB auth", &ocb_authenticate_ops}, + {GCRY_CIPHER_MODE_SIV, "SIV enc", &siv_encrypt_ops}, + {GCRY_CIPHER_MODE_SIV, "SIV dec", &siv_decrypt_ops}, + {GCRY_CIPHER_MODE_SIV, "SIV auth", &siv_authenticate_ops}, {GCRY_CIPHER_MODE_POLY1305, "POLY1305 enc", &poly1305_encrypt_ops}, {GCRY_CIPHER_MODE_POLY1305, "POLY1305 dec", &poly1305_decrypt_ops}, {GCRY_CIPHER_MODE_POLY1305, "POLY1305 auth", &poly1305_authenticate_ops}, @@ -1651,6 +1711,10 @@ cipher_bench_one (int algo, struct bench_cipher_mode *pmode) if (mode.mode == GCRY_CIPHER_MODE_XTS && blklen != GCRY_XTS_BLOCK_LEN) return; + /* SIV has restrictions for block-size */ + if (mode.mode == GCRY_CIPHER_MODE_SIV && blklen != GCRY_SIV_BLOCK_LEN) + return; + /* Our OCB implementation has restrictions for block-size. */ if (mode.mode == GCRY_CIPHER_MODE_OCB && blklen != GCRY_OCB_BLOCK_LEN) return; -- 2.30.2 From jussi.kivilinna at iki.fi Fri Aug 13 17:01:27 2021 From: jussi.kivilinna at iki.fi (Jussi Kivilinna) Date: Fri, 13 Aug 2021 18:01:27 +0300 Subject: [PATCH 2/4] Add AES-GCM-SIV mode (RFC 8452) In-Reply-To: <20210813150129.3175810-1-jussi.kivilinna@iki.fi> References: <20210813150129.3175810-1-jussi.kivilinna@iki.fi> Message-ID: <20210813150129.3175810-2-jussi.kivilinna@iki.fi> * cipher/Makefile.am: Add 'cipher-gcm-siv.c'. * cipher/cipher-gcm-siv.c: New. * cipher/cipher-gcm.c (_gcry_cipher_gcm_setupM): New. * cipher/cipher-internal.h (gcry_cipher_handle): Add 'siv_keylen'. (_gcry_cipher_gcm_setupM, _gcry_cipher_gcm_siv_encrypt) (_gcry_cipher_gcm_siv_decrypt, _gcry_cipher_gcm_siv_set_nonce) (_gcry_cipher_gcm_siv_authenticate) (_gcry_cipher_gcm_siv_set_decryption_tag) (_gcry_cipher_gcm_siv_get_tag, _gcry_cipher_gcm_siv_check_tag) (_gcry_cipher_gcm_siv_setkey): New prototypes. (cipher_block_bswap): New helper function. * cipher/cipher.c (_gcry_cipher_open_internal): Add 'GCRY_CIPHER_MODE_GCM_SIV'; Refactor mode requirement checks for better size optimization (check pointers & blocksize in same order for all). (cipher_setkey, cipher_reset, _gcry_cipher_setup_mode_ops) (_gcry_cipher_setup_mode_ops, _gcry_cipher_info): Add GCM-SIV. (_gcry_cipher_ctl): Handle 'set decryption tag' for GCM-SIV. * doc/gcrypt.texi: Add GCM-SIV. * src/gcrypt.h.in (GCRY_CIPHER_MODE_GCM_SIV): New. (GCRY_SIV_BLOCK_LEN, gcry_cipher_set_decryption_tag): Add to comment that these are also for GCM-SIV in addition to SIV mode. Signed-off-by: Jussi Kivilinna * tests/basic.c (check_gcm_siv_cipher): New. (check_cipher_modes): Check for GCM-SIV. * tests/bench-slope.c (bench_gcm_siv_encrypt_do_bench) (bench_gcm_siv_decrypt_do_bench, bench_gcm_siv_authenticate_do_bench) (gcm_siv_encrypt_ops, gcm_siv_decrypt_ops) (gcm_siv_authenticate_ops): New. (cipher_modes): Add GCM-SIV. (cipher_bench_one): Check key length requirement for GCM-SIV. -- Signed-off-by: Jussi Kivilinna --- cipher/Makefile.am | 1 + cipher/cipher-gcm-siv.c | 637 +++++++++++++++++++++++++++ cipher/cipher-gcm.c | 7 + cipher/cipher-internal.h | 56 ++- cipher/cipher-siv.c | 2 +- cipher/cipher.c | 69 ++- doc/gcrypt.texi | 29 +- src/gcrypt.h.in | 7 +- tests/basic.c | 914 +++++++++++++++++++++++++++++++++++++++ tests/bench-slope.c | 62 +++ 10 files changed, 1759 insertions(+), 25 deletions(-) create mode 100644 cipher/cipher-gcm-siv.c diff --git a/cipher/Makefile.am b/cipher/Makefile.am index 4d3e0d19..801e726a 100644 --- a/cipher/Makefile.am +++ b/cipher/Makefile.am @@ -54,6 +54,7 @@ libcipher_la_SOURCES = \ cipher-xts.c \ cipher-eax.c \ cipher-siv.c \ + cipher-gcm-siv.c \ cipher-selftest.c cipher-selftest.h \ pubkey.c pubkey-internal.h pubkey-util.c \ md.c \ diff --git a/cipher/cipher-gcm-siv.c b/cipher/cipher-gcm-siv.c new file mode 100644 index 00000000..b735d199 --- /dev/null +++ b/cipher/cipher-gcm-siv.c @@ -0,0 +1,637 @@ +/* cipher-gcm-siv.c - GCM-SIV implementation (RFC 8452) + * Copyright (C) 2021 Jussi Kivilinna + * + * This file is part of Libgcrypt. + * + * Libgcrypt is free software; you can redistribute it and/or modify + * it under the terms of the GNU Lesser general Public License as + * published by the Free Software Foundation; either version 2.1 of + * the License, or (at your option) any later version. + * + * Libgcrypt is distributed in the hope that it will be useful, + * but WITHOUT ANY WARRANTY; without even the implied warranty of + * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the + * GNU Lesser General Public License for more details. + * + * You should have received a copy of the GNU Lesser General Public + * License along with this program; if not, see . + */ + +#include +#include +#include +#include +#include + +#include "g10lib.h" +#include "cipher.h" +#include "bufhelp.h" +#include "./cipher-internal.h" + + +#define GCM_SIV_NONCE_LENGTH (96 / 8) + + +static inline void +mulx_ghash (byte *a) +{ + u64 t[2], mask; + + t[0] = buf_get_be64(a + 0); + t[1] = buf_get_be64(a + 8); + mask = -(t[1] & 1) & 0xe1; + mask <<= 56; + + buf_put_be64(a + 8, (t[1] >> 1) ^ (t[0] << 63)); + buf_put_be64(a + 0, (t[0] >> 1) ^ mask); +} + + +static inline void +gcm_siv_bytecounter_add (u32 ctr[2], size_t add) +{ + if (sizeof(add) > sizeof(u32)) + { + u32 high_add = ((add >> 31) >> 1) & 0xffffffff; + ctr[1] += high_add; + } + + ctr[0] += add; + if (ctr[0] >= add) + return; + ++ctr[1]; +} + + +static inline int +gcm_siv_check_len (u32 ctr[2]) +{ + /* len(plaintext/aadlen) <= 2^39-256 bits == 2^36-32 bytes == 2^32-2 blocks */ + if (ctr[1] > 0xfU) + return 0; + if (ctr[1] < 0xfU) + return 1; + + if (ctr[0] <= 0xffffffe0U) + return 1; + + return 0; +} + + +static void +polyval_set_key (gcry_cipher_hd_t c, const byte *auth_key) +{ + cipher_block_bswap (c->u_mode.gcm.u_ghash_key.key, auth_key, + GCRY_SIV_BLOCK_LEN); + mulx_ghash (c->u_mode.gcm.u_ghash_key.key); + _gcry_cipher_gcm_setupM (c); +} + + +static void +do_polyval_buf(gcry_cipher_hd_t c, byte *hash, const byte *buf, + size_t buflen, int do_padding) +{ + unsigned int blocksize = GCRY_SIV_BLOCK_LEN; + unsigned int unused = c->u_mode.gcm.mac_unused; + ghash_fn_t ghash_fn = c->u_mode.gcm.ghash_fn; + byte tmp_blocks[16][GCRY_SIV_BLOCK_LEN]; + size_t nblocks, n; + unsigned int burn = 0, nburn; + unsigned int num_blks_used = 0; + + if (buflen == 0 && (unused == 0 || !do_padding)) + return; + + do + { + if (buflen > 0 && (buflen + unused < blocksize || unused > 0)) + { + n = blocksize - unused; + n = n < buflen ? n : buflen; + + buf_cpy (&c->u_mode.gcm.macbuf[unused], buf, n); + + unused += n; + buf += n; + buflen -= n; + } + if (!buflen) + { + if (!do_padding && unused < blocksize) + { + break; + } + + n = blocksize - unused; + if (n > 0) + { + memset (&c->u_mode.gcm.macbuf[unused], 0, n); + unused = blocksize; + } + } + + if (unused > 0) + { + gcry_assert (unused == blocksize); + + /* Process one block from macbuf. */ + cipher_block_bswap (c->u_mode.gcm.macbuf, c->u_mode.gcm.macbuf, + blocksize); + nburn = ghash_fn (c, hash, c->u_mode.gcm.macbuf, 1); + burn = nburn > burn ? nburn : burn; + unused = 0; + } + + nblocks = buflen / blocksize; + + while (nblocks) + { + for (n = 0; n < (nblocks > 16 ? 16 : nblocks); n++) + cipher_block_bswap (tmp_blocks[n], buf + n * blocksize, blocksize); + + num_blks_used = n > num_blks_used ? n : num_blks_used; + + nburn = ghash_fn (c, hash, tmp_blocks[0], n); + burn = nburn > burn ? nburn : burn; + buf += n * blocksize; + buflen -= n * blocksize; + nblocks -= n; + } + } + while (buflen > 0); + + c->u_mode.gcm.mac_unused = unused; + + if (num_blks_used) + wipememory (tmp_blocks, num_blks_used * blocksize); + if (burn) + _gcry_burn_stack (burn); +} + + +static void +do_ctr_le32 (gcry_cipher_hd_t c, byte *outbuf, const byte *inbuf, + size_t inbuflen) +{ + gcry_cipher_encrypt_t enc_fn = c->spec->encrypt; + unsigned char tmp[GCRY_SIV_BLOCK_LEN]; + unsigned int burn = 0, nburn; + size_t n; + + if (inbuflen == 0) + return; + + n = GCRY_SIV_BLOCK_LEN; + do + { + nburn = enc_fn (c->context.c, tmp, c->u_ctr.ctr); + burn = nburn > burn ? nburn : burn; + + buf_put_le32(c->u_ctr.ctr, buf_get_le32(c->u_ctr.ctr) + 1); + + if (inbuflen < GCRY_SIV_BLOCK_LEN) + break; + cipher_block_xor(outbuf, inbuf, tmp, GCRY_SIV_BLOCK_LEN); + + inbuflen -= n; + outbuf += n; + inbuf += n; + } + while (inbuflen); + + if (inbuflen) + { + n = inbuflen; + buf_xor(outbuf, inbuf, tmp, inbuflen); + + inbuflen -= n; + outbuf += n; + inbuf += n; + } + + wipememory (tmp, sizeof(tmp)); + + if (burn > 0) + _gcry_burn_stack (burn + 4 * sizeof(void *)); +} + + +static int +gcm_siv_selftest (gcry_cipher_hd_t c) +{ + static const byte in1[GCRY_SIV_BLOCK_LEN] = + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"; + static const byte out1[GCRY_SIV_BLOCK_LEN] = + "\x00\x80\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00"; + static const byte in2[GCRY_SIV_BLOCK_LEN] = + "\x9c\x98\xc0\x4d\xf9\x38\x7d\xed\x82\x81\x75\xa9\x2b\xa6\x52\xd8"; + static const byte out2[GCRY_SIV_BLOCK_LEN] = + "\x4e\x4c\x60\x26\xfc\x9c\x3e\xf6\xc1\x40\xba\xd4\x95\xd3\x29\x6c"; + static const byte polyval_key[GCRY_SIV_BLOCK_LEN] = + "\x25\x62\x93\x47\x58\x92\x42\x76\x1d\x31\xf8\x26\xba\x4b\x75\x7b"; + static const byte ghash_key[GCRY_SIV_BLOCK_LEN] = + "\xdc\xba\xa5\xdd\x13\x7c\x18\x8e\xbb\x21\x49\x2c\x23\xc9\xb1\x12"; + static const byte polyval_data[GCRY_SIV_BLOCK_LEN * 2] = + "\x4f\x4f\x95\x66\x8c\x83\xdf\xb6\x40\x17\x62\xbb\x2d\x01\xa2\x62" + "\xd1\xa2\x4d\xdd\x27\x21\xd0\x06\xbb\xe4\x5f\x20\xd3\xc9\xf3\x62"; + static const byte polyval_tag[GCRY_SIV_BLOCK_LEN] = + "\xf7\xa3\xb4\x7b\x84\x61\x19\xfa\xe5\xb7\x86\x6c\xf5\xe5\xb7\x7e"; + byte tmp[GCRY_SIV_BLOCK_LEN]; + + /* Test mulx_ghash */ + memcpy (tmp, in1, GCRY_SIV_BLOCK_LEN); + mulx_ghash (tmp); + if (memcmp (tmp, out1, GCRY_SIV_BLOCK_LEN) != 0) + return -1; + + memcpy (tmp, in2, GCRY_SIV_BLOCK_LEN); + mulx_ghash (tmp); + if (memcmp (tmp, out2, GCRY_SIV_BLOCK_LEN) != 0) + return -1; + + /* Test GHASH key generation */ + memcpy (tmp, polyval_key, GCRY_SIV_BLOCK_LEN); + cipher_block_bswap (tmp, tmp, GCRY_SIV_BLOCK_LEN); + mulx_ghash (tmp); + if (memcmp (tmp, ghash_key, GCRY_SIV_BLOCK_LEN) != 0) + return -1; + + /* Test POLYVAL */ + memset (&c->u_mode.gcm, 0, sizeof(c->u_mode.gcm)); + polyval_set_key (c, polyval_key); + memset (&tmp, 0, sizeof(tmp)); + do_polyval_buf (c, tmp, polyval_data, GCRY_SIV_BLOCK_LEN * 2, 1); + cipher_block_bswap (tmp, tmp, GCRY_SIV_BLOCK_LEN); + if (memcmp (tmp, polyval_tag, GCRY_SIV_BLOCK_LEN) != 0) + return -1; + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_gcm_siv_setkey (gcry_cipher_hd_t c, unsigned int keylen) +{ + static int done; + + if (keylen != 16 && keylen != 32) + return GPG_ERR_INV_KEYLEN; + + if (!done) + { + if (gcm_siv_selftest (c)) + return GPG_ERR_SELFTEST_FAILED; + + done = 1; + } + + c->marks.iv = 0; + c->marks.tag = 0; + memset (&c->u_mode.gcm, 0, sizeof(c->u_mode.gcm)); + c->u_mode.gcm.siv_keylen = keylen; + return 0; +} + + +gcry_err_code_t +_gcry_cipher_gcm_siv_set_nonce (gcry_cipher_hd_t c, const byte *iv, + size_t ivlen) +{ + byte auth_key[GCRY_SIV_BLOCK_LEN]; + byte tmp_in[GCRY_SIV_BLOCK_LEN]; + byte tmp[GCRY_SIV_BLOCK_LEN]; + byte enc_key[32]; + gcry_err_code_t err; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (ivlen != GCM_SIV_NONCE_LENGTH) + return GPG_ERR_INV_ARG; + if (c->u_mode.gcm.siv_keylen == 0) + return GPG_ERR_INV_STATE; + if (c->marks.iv) + { + /* If nonce is already set, use cipher_reset or setkey first to reset + * cipher state. */ + return GPG_ERR_INV_STATE; + } + + memset (c->u_mode.gcm.aadlen, 0, sizeof(c->u_mode.gcm.aadlen)); + memset (c->u_mode.gcm.datalen, 0, sizeof(c->u_mode.gcm.datalen)); + memset (c->u_mode.gcm.u_tag.tag, 0, sizeof(c->u_mode.gcm.u_tag.tag)); + c->u_mode.gcm.datalen_over_limits = 0; + c->u_mode.gcm.ghash_data_finalized = 0; + c->u_mode.gcm.ghash_aad_finalized = 0; + + memset (c->u_iv.iv, 0, GCRY_SIV_BLOCK_LEN); + memcpy (c->u_iv.iv, iv, ivlen); + memcpy (tmp_in + 4, iv, ivlen); + + /* Derive message authentication key */ + buf_put_le32(tmp_in, 0); + c->spec->encrypt (&c->context.c, tmp, tmp_in); + memcpy (auth_key + 0, tmp, 8); + + buf_put_le32(tmp_in, 1); + c->spec->encrypt (&c->context.c, tmp, tmp_in); + memcpy (auth_key + 8, tmp, 8); + + polyval_set_key (c, auth_key); + wipememory (auth_key, sizeof(auth_key)); + + /* Derive message encryption key */ + buf_put_le32(tmp_in, 2); + c->spec->encrypt (&c->context.c, tmp, tmp_in); + memcpy (enc_key + 0, tmp, 8); + + buf_put_le32(tmp_in, 3); + c->spec->encrypt (&c->context.c, tmp, tmp_in); + memcpy (enc_key + 8, tmp, 8); + + if (c->u_mode.gcm.siv_keylen >= 24) + { + buf_put_le32(tmp_in, 4); + c->spec->encrypt (&c->context.c, tmp, tmp_in); + memcpy (enc_key + 16, tmp, 8); + } + + if (c->u_mode.gcm.siv_keylen >= 32) + { + buf_put_le32(tmp_in, 5); + c->spec->encrypt (&c->context.c, tmp, tmp_in); + memcpy (enc_key + 24, tmp, 8); + } + + wipememory (tmp, sizeof(tmp)); + wipememory (tmp_in, sizeof(tmp_in)); + + err = c->spec->setkey (&c->context.c, enc_key, c->u_mode.gcm.siv_keylen, + &c->bulk); + wipememory (enc_key, sizeof(enc_key)); + if (err) + return err; + + c->marks.iv = 1; + return 0; +} + + +gcry_err_code_t +_gcry_cipher_gcm_siv_authenticate (gcry_cipher_hd_t c, + const byte *aadbuf, size_t aadbuflen) +{ + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (c->u_mode.gcm.datalen_over_limits) + return GPG_ERR_INV_LENGTH; + if (c->marks.tag + || !c->marks.iv + || c->u_mode.gcm.ghash_aad_finalized + || c->u_mode.gcm.ghash_data_finalized + || !c->u_mode.gcm.ghash_fn) + return GPG_ERR_INV_STATE; + + gcm_siv_bytecounter_add (c->u_mode.gcm.aadlen, aadbuflen); + if (!gcm_siv_check_len (c->u_mode.gcm.aadlen)) + { + c->u_mode.gcm.datalen_over_limits = 1; + return GPG_ERR_INV_LENGTH; + } + + do_polyval_buf (c, c->u_mode.gcm.u_tag.tag, aadbuf, aadbuflen, 0); + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_gcm_siv_encrypt (gcry_cipher_hd_t c, + byte *outbuf, size_t outbuflen, + const byte *inbuf, size_t inbuflen) +{ + u32 bitlengths[2][2]; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (outbuflen < inbuflen) + return GPG_ERR_BUFFER_TOO_SHORT; + if (c->u_mode.gcm.datalen_over_limits) + return GPG_ERR_INV_LENGTH; + if (c->marks.tag + || !c->marks.iv + || c->u_mode.gcm.ghash_data_finalized + || !c->u_mode.gcm.ghash_fn) + return GPG_ERR_INV_STATE; + + if (!c->u_mode.gcm.ghash_aad_finalized) + { + /* Start of encryption marks end of AAD stream. */ + do_polyval_buf(c, c->u_mode.gcm.u_tag.tag, NULL, 0, 1); + c->u_mode.gcm.ghash_aad_finalized = 1; + } + + gcm_siv_bytecounter_add (c->u_mode.gcm.datalen, inbuflen); + if (!gcm_siv_check_len (c->u_mode.gcm.datalen)) + { + c->u_mode.gcm.datalen_over_limits = 1; + return GPG_ERR_INV_LENGTH; + } + + /* Plaintext and padding to POLYVAL. */ + do_polyval_buf (c, c->u_mode.gcm.u_tag.tag, inbuf, inbuflen, 1); + c->u_mode.gcm.ghash_data_finalized = 1; + + /* aad length */ + bitlengths[0][0] = le_bswap32(c->u_mode.gcm.aadlen[0] << 3); + bitlengths[0][1] = le_bswap32((c->u_mode.gcm.aadlen[0] >> 29) | + (c->u_mode.gcm.aadlen[1] << 3)); + /* data length */ + bitlengths[1][0] = le_bswap32(c->u_mode.gcm.datalen[0] << 3); + bitlengths[1][1] = le_bswap32((c->u_mode.gcm.datalen[0] >> 29) | + (c->u_mode.gcm.datalen[1] << 3)); + + /* Length block to POLYVAL. */ + do_polyval_buf(c, c->u_mode.gcm.u_tag.tag, (byte *)bitlengths, + GCRY_SIV_BLOCK_LEN, 1); + wipememory (bitlengths, sizeof(bitlengths)); + + /* Prepare tag and counter. */ + cipher_block_bswap (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.u_tag.tag, + GCRY_SIV_BLOCK_LEN); + cipher_block_xor (c->u_mode.gcm.tagiv, c->u_iv.iv, c->u_mode.gcm.u_tag.tag, + GCRY_SIV_BLOCK_LEN); + c->u_mode.gcm.tagiv[GCRY_SIV_BLOCK_LEN - 1] &= 0x7f; + c->spec->encrypt (&c->context.c, c->u_mode.gcm.tagiv, c->u_mode.gcm.tagiv); + c->marks.tag = 1; + memcpy (c->u_ctr.ctr, c->u_mode.gcm.tagiv, GCRY_SIV_BLOCK_LEN); + c->u_ctr.ctr[GCRY_SIV_BLOCK_LEN - 1] |= 0x80; + + /* Encrypt data */ + do_ctr_le32 (c, outbuf, inbuf, inbuflen); + return 0; +} + + +gcry_err_code_t +_gcry_cipher_gcm_siv_set_decryption_tag (gcry_cipher_hd_t c, + const byte *tag, size_t taglen) +{ + if (taglen != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_INV_ARG; + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (c->marks.tag) + return GPG_ERR_INV_STATE; + + memcpy (c->u_mode.gcm.tagiv, tag, GCRY_SIV_BLOCK_LEN); + c->marks.tag = 1; + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_gcm_siv_decrypt (gcry_cipher_hd_t c, + byte *outbuf, size_t outbuflen, + const byte *inbuf, size_t inbuflen) +{ + byte expected_tag[GCRY_SIV_BLOCK_LEN]; + u32 bitlengths[2][2]; + gcry_err_code_t rc = 0; + + if (c->spec->blocksize != GCRY_SIV_BLOCK_LEN) + return GPG_ERR_CIPHER_ALGO; + if (outbuflen < inbuflen) + return GPG_ERR_BUFFER_TOO_SHORT; + if (c->u_mode.gcm.datalen_over_limits) + return GPG_ERR_INV_LENGTH; + if (!c->marks.tag + || !c->marks.iv + || c->u_mode.gcm.ghash_data_finalized + || !c->u_mode.gcm.ghash_fn) + return GPG_ERR_INV_STATE; + + if (!c->u_mode.gcm.ghash_aad_finalized) + { + /* Start of encryption marks end of AAD stream. */ + do_polyval_buf(c, c->u_mode.gcm.u_tag.tag, NULL, 0, 1); + c->u_mode.gcm.ghash_aad_finalized = 1; + } + + gcm_siv_bytecounter_add (c->u_mode.gcm.datalen, inbuflen); + if (!gcm_siv_check_len (c->u_mode.gcm.datalen)) + { + c->u_mode.gcm.datalen_over_limits = 1; + return GPG_ERR_INV_LENGTH; + } + + /* Prepare counter. */ + memcpy (c->u_ctr.ctr, c->u_mode.gcm.tagiv, GCRY_SIV_BLOCK_LEN); + c->u_ctr.ctr[GCRY_SIV_BLOCK_LEN - 1] |= 0x80; + + /* Decrypt data. */ + do_ctr_le32 (c, outbuf, inbuf, inbuflen); + + /* Plaintext and padding to POLYVAL. */ + do_polyval_buf (c, c->u_mode.gcm.u_tag.tag, outbuf, inbuflen, 1); + c->u_mode.gcm.ghash_data_finalized = 1; + + /* aad length */ + bitlengths[0][0] = le_bswap32(c->u_mode.gcm.aadlen[0] << 3); + bitlengths[0][1] = le_bswap32((c->u_mode.gcm.aadlen[0] >> 29) | + (c->u_mode.gcm.aadlen[1] << 3)); + /* data length */ + bitlengths[1][0] = le_bswap32(c->u_mode.gcm.datalen[0] << 3); + bitlengths[1][1] = le_bswap32((c->u_mode.gcm.datalen[0] >> 29) | + (c->u_mode.gcm.datalen[1] << 3)); + + /* Length block to POLYVAL. */ + do_polyval_buf(c, c->u_mode.gcm.u_tag.tag, (byte *)bitlengths, + GCRY_SIV_BLOCK_LEN, 1); + wipememory (bitlengths, sizeof(bitlengths)); + + /* Prepare tag. */ + cipher_block_bswap (c->u_mode.gcm.u_tag.tag, c->u_mode.gcm.u_tag.tag, + GCRY_SIV_BLOCK_LEN); + cipher_block_xor (expected_tag, c->u_iv.iv, c->u_mode.gcm.u_tag.tag, + GCRY_SIV_BLOCK_LEN); + expected_tag[GCRY_SIV_BLOCK_LEN - 1] &= 0x7f; + c->spec->encrypt (&c->context.c, expected_tag, expected_tag); + + if (!buf_eq_const(c->u_mode.gcm.tagiv, expected_tag, GCRY_SIV_BLOCK_LEN)) + { + wipememory (outbuf, inbuflen); + rc = GPG_ERR_CHECKSUM; + } + + wipememory (expected_tag, sizeof(expected_tag)); + return rc; +} + + +static gcry_err_code_t +_gcry_cipher_gcm_siv_tag (gcry_cipher_hd_t c, + byte * outbuf, size_t outbuflen, int check) +{ + gcry_err_code_t err; + + if (!c->marks.tag) + { + if (!c->u_mode.gcm.ghash_fn) + return GPG_ERR_INV_STATE; + + if (!c->marks.tag) + { + /* Finalize GCM-SIV with zero-length plaintext. */ + err = _gcry_cipher_gcm_siv_encrypt (c, NULL, 0, NULL, 0); + if (err != 0) + return err; + } + } + + if (c->u_mode.gcm.datalen_over_limits) + return GPG_ERR_INV_LENGTH; + if (!c->u_mode.gcm.ghash_data_finalized) + return GPG_ERR_INV_STATE; + if (!c->marks.tag) + return GPG_ERR_INV_STATE; + + if (!check) + { + if (outbuflen > GCRY_SIV_BLOCK_LEN) + outbuflen = GCRY_SIV_BLOCK_LEN; + + /* NB: We already checked that OUTBUF is large enough to hold + * the result or has valid truncated length. */ + memcpy (outbuf, c->u_mode.gcm.tagiv, outbuflen); + } + else + { + /* OUTBUFLEN gives the length of the user supplied tag in OUTBUF + * and thus we need to compare its length first. */ + if (outbuflen != GCRY_SIV_BLOCK_LEN + || !buf_eq_const (outbuf, c->u_mode.gcm.tagiv, outbuflen)) + return GPG_ERR_CHECKSUM; + } + + return 0; +} + + +gcry_err_code_t +_gcry_cipher_gcm_siv_get_tag (gcry_cipher_hd_t c, unsigned char *outtag, + size_t taglen) +{ + return _gcry_cipher_gcm_siv_tag (c, outtag, taglen, 0); +} + + +gcry_err_code_t +_gcry_cipher_gcm_siv_check_tag (gcry_cipher_hd_t c, + const unsigned char *intag, + size_t taglen) +{ + return _gcry_cipher_gcm_siv_tag (c, (unsigned char *)intag, taglen, 1); +} diff --git a/cipher/cipher-gcm.c b/cipher/cipher-gcm.c index 4ce85408..64b9179c 100644 --- a/cipher/cipher-gcm.c +++ b/cipher/cipher-gcm.c @@ -1005,6 +1005,13 @@ _gcry_cipher_gcm_authenticate (gcry_cipher_hd_t c, } +void +_gcry_cipher_gcm_setupM (gcry_cipher_hd_t c) +{ + setupM (c); +} + + void _gcry_cipher_gcm_setkey (gcry_cipher_hd_t c) { diff --git a/cipher/cipher-internal.h b/cipher/cipher-internal.h index e9f48a2f..8b04cff7 100644 --- a/cipher/cipher-internal.h +++ b/cipher/cipher-internal.h @@ -301,7 +301,7 @@ struct gcry_cipher_handle gcry_cmac_context_t cmac_ciphertext; } eax; - /* Mode specific storage for GCM mode. */ + /* Mode specific storage for GCM mode and GCM-SIV mode. */ struct { /* The interim tag for GCM mode. */ union { @@ -347,6 +347,9 @@ struct gcry_cipher_handle /* GHASH implementation in use. */ ghash_fn_t ghash_fn; + + /* Key length used for GCM-SIV key generating key. */ + unsigned int siv_keylen; } gcm; /* Mode specific storage for OCB mode. */ @@ -583,6 +586,8 @@ gcry_err_code_t _gcry_cipher_gcm_check_tag const unsigned char *intag, size_t taglen); void _gcry_cipher_gcm_setkey /* */ (gcry_cipher_hd_t c); +void _gcry_cipher_gcm_setupM +/* */ (gcry_cipher_hd_t c); /*-- cipher-poly1305.c --*/ @@ -679,6 +684,32 @@ gcry_err_code_t _gcry_cipher_siv_setkey const unsigned char *ctrkey, size_t ctrkeylen); +/*-- cipher-gcm-siv.c --*/ +gcry_err_code_t _gcry_cipher_gcm_siv_encrypt +/* */ (gcry_cipher_hd_t c, + unsigned char *outbuf, size_t outbuflen, + const unsigned char *inbuf, size_t inbuflen); +gcry_err_code_t _gcry_cipher_gcm_siv_decrypt +/* */ (gcry_cipher_hd_t c, + unsigned char *outbuf, size_t outbuflen, + const unsigned char *inbuf, size_t inbuflen); +gcry_err_code_t _gcry_cipher_gcm_siv_set_nonce +/* */ (gcry_cipher_hd_t c, const unsigned char *nonce, + size_t noncelen); +gcry_err_code_t _gcry_cipher_gcm_siv_authenticate +/* */ (gcry_cipher_hd_t c, const unsigned char *abuf, size_t abuflen); +gcry_err_code_t _gcry_cipher_gcm_siv_set_decryption_tag +/* */ (gcry_cipher_hd_t c, const byte *tag, size_t taglen); +gcry_err_code_t _gcry_cipher_gcm_siv_get_tag +/* */ (gcry_cipher_hd_t c, + unsigned char *outtag, size_t taglen); +gcry_err_code_t _gcry_cipher_gcm_siv_check_tag +/* */ (gcry_cipher_hd_t c, + const unsigned char *intag, size_t taglen); +gcry_err_code_t _gcry_cipher_gcm_siv_setkey +/* */ (gcry_cipher_hd_t c, unsigned int keylen); + + /* Return the L-value for block N. Note: 'cipher_ocb.c' ensures that N * will never be multiple of 65536 (1 << OCB_L_TABLE_SIZE), thus N can * be directly passed to _gcry_ctz() function and resulting index will @@ -865,6 +896,29 @@ cipher_block_xor_n_copy_2(void *_dst_xor, const void *_src_xor, } +/* Optimized function for combined cipher block byte-swapping. */ +static inline void +cipher_block_bswap (void *_dst_bswap, const void *_src_bswap, + size_t blocksize) +{ + byte *dst_bswap = _dst_bswap; + const byte *src_bswap = _src_bswap; + u64 t[2]; + + if (blocksize == 8) + { + buf_put_le64(dst_bswap, buf_get_be64(src_bswap)); + } + else + { + t[0] = buf_get_be64(src_bswap + 0); + t[1] = buf_get_be64(src_bswap + 8); + buf_put_le64(dst_bswap + 8, t[0]); + buf_put_le64(dst_bswap + 0, t[1]); + } +} + + /* Optimized function for combined cipher block xoring and copying. Used by mainly CFB mode decryption. */ static inline void diff --git a/cipher/cipher-siv.c b/cipher/cipher-siv.c index 9a71f2ef..11f25340 100644 --- a/cipher/cipher-siv.c +++ b/cipher/cipher-siv.c @@ -1,4 +1,4 @@ -/* cipher-siv.c - SIV implementation +/* cipher-siv.c - SIV implementation (RFC 5297) * Copyright (C) 2021 Jussi Kivilinna * * This file is part of Libgcrypt. diff --git a/cipher/cipher.c b/cipher/cipher.c index a274466f..2bde99ef 100644 --- a/cipher/cipher.c +++ b/cipher/cipher.c @@ -526,33 +526,46 @@ _gcry_cipher_open_internal (gcry_cipher_hd_t *handle, if (! err) switch (mode) { - case GCRY_CIPHER_MODE_CCM: - if (spec->blocksize != GCRY_CCM_BLOCK_LEN) + case GCRY_CIPHER_MODE_ECB: + case GCRY_CIPHER_MODE_CBC: + case GCRY_CIPHER_MODE_CFB: + case GCRY_CIPHER_MODE_CFB8: + case GCRY_CIPHER_MODE_OFB: + case GCRY_CIPHER_MODE_CTR: + case GCRY_CIPHER_MODE_AESWRAP: + case GCRY_CIPHER_MODE_CMAC: + case GCRY_CIPHER_MODE_EAX: + if (!spec->encrypt || !spec->decrypt) err = GPG_ERR_INV_CIPHER_MODE; + break; + + case GCRY_CIPHER_MODE_CCM: if (!spec->encrypt || !spec->decrypt) err = GPG_ERR_INV_CIPHER_MODE; + else if (spec->blocksize != GCRY_CCM_BLOCK_LEN) + err = GPG_ERR_INV_CIPHER_MODE; break; case GCRY_CIPHER_MODE_XTS: - if (spec->blocksize != GCRY_XTS_BLOCK_LEN) - err = GPG_ERR_INV_CIPHER_MODE; if (!spec->encrypt || !spec->decrypt) err = GPG_ERR_INV_CIPHER_MODE; + else if (spec->blocksize != GCRY_XTS_BLOCK_LEN) + err = GPG_ERR_INV_CIPHER_MODE; break; - case GCRY_CIPHER_MODE_ECB: - case GCRY_CIPHER_MODE_CBC: - case GCRY_CIPHER_MODE_CFB: - case GCRY_CIPHER_MODE_CFB8: - case GCRY_CIPHER_MODE_OFB: - case GCRY_CIPHER_MODE_CTR: - case GCRY_CIPHER_MODE_AESWRAP: - case GCRY_CIPHER_MODE_CMAC: - case GCRY_CIPHER_MODE_EAX: case GCRY_CIPHER_MODE_GCM: + if (!spec->encrypt || !spec->decrypt) + err = GPG_ERR_INV_CIPHER_MODE; + else if (spec->blocksize != GCRY_GCM_BLOCK_LEN) + err = GPG_ERR_INV_CIPHER_MODE; + break; + case GCRY_CIPHER_MODE_SIV: + case GCRY_CIPHER_MODE_GCM_SIV: if (!spec->encrypt || !spec->decrypt) err = GPG_ERR_INV_CIPHER_MODE; + else if (spec->blocksize != GCRY_SIV_BLOCK_LEN) + err = GPG_ERR_INV_CIPHER_MODE; break; case GCRY_CIPHER_MODE_POLY1305: @@ -569,7 +582,7 @@ _gcry_cipher_open_internal (gcry_cipher_hd_t *handle, security too much. */ if (!spec->encrypt || !spec->decrypt) err = GPG_ERR_INV_CIPHER_MODE; - else if (spec->blocksize != (128/8)) + else if (spec->blocksize != GCRY_OCB_BLOCK_LEN) err = GPG_ERR_INV_CIPHER_MODE; break; @@ -769,6 +782,12 @@ cipher_setkey (gcry_cipher_hd_t c, byte *key, size_t keylen) _gcry_cipher_gcm_setkey (c); break; + case GCRY_CIPHER_MODE_GCM_SIV: + rc = _gcry_cipher_gcm_siv_setkey (c, keylen); + if (rc) + c->marks.key = 0; + break; + case GCRY_CIPHER_MODE_OCB: _gcry_cipher_ocb_setkey (c); break; @@ -884,6 +903,7 @@ cipher_reset (gcry_cipher_hd_t c) break; case GCRY_CIPHER_MODE_GCM: + case GCRY_CIPHER_MODE_GCM_SIV: /* Only clear head of u_mode, keep ghash_key and gcm_table. */ { byte *u_mode_pos = (void *)&c->u_mode; @@ -1375,6 +1395,11 @@ _gcry_cipher_setup_mode_ops(gcry_cipher_hd_t c, int mode) c->mode_ops.decrypt = _gcry_cipher_siv_decrypt; break; + case GCRY_CIPHER_MODE_GCM_SIV: + c->mode_ops.encrypt = _gcry_cipher_gcm_siv_encrypt; + c->mode_ops.decrypt = _gcry_cipher_gcm_siv_decrypt; + break; + default: c->mode_ops.encrypt = do_encrypt_none_unknown; c->mode_ops.decrypt = do_decrypt_none_unknown; @@ -1408,6 +1433,10 @@ _gcry_cipher_setup_mode_ops(gcry_cipher_hd_t c, int mode) c->mode_ops.setiv = _gcry_cipher_siv_set_nonce; break; + case GCRY_CIPHER_MODE_GCM_SIV: + c->mode_ops.setiv = _gcry_cipher_gcm_siv_set_nonce; + break; + default: c->mode_ops.setiv = cipher_setiv; break; @@ -1459,6 +1488,12 @@ _gcry_cipher_setup_mode_ops(gcry_cipher_hd_t c, int mode) c->mode_ops.check_tag = _gcry_cipher_siv_check_tag; break; + case GCRY_CIPHER_MODE_GCM_SIV: + c->mode_ops.authenticate = _gcry_cipher_gcm_siv_authenticate; + c->mode_ops.get_tag = _gcry_cipher_gcm_siv_get_tag; + c->mode_ops.check_tag = _gcry_cipher_gcm_siv_check_tag; + break; + default: c->mode_ops.authenticate = NULL; c->mode_ops.get_tag = NULL; @@ -1540,6 +1575,8 @@ _gcry_cipher_ctl (gcry_cipher_hd_t h, int cmd, void *buffer, size_t buflen) if (h->mode == GCRY_CIPHER_MODE_SIV) rc = _gcry_cipher_siv_set_decryption_tag (h, buffer, buflen); + else if (h->mode == GCRY_CIPHER_MODE_GCM_SIV) + rc = _gcry_cipher_gcm_siv_set_decryption_tag (h, buffer, buflen); else rc = GPG_ERR_INV_CIPHER_MODE; } @@ -1682,6 +1719,10 @@ _gcry_cipher_info (gcry_cipher_hd_t h, int cmd, void *buffer, size_t *nbytes) *nbytes = GCRY_SIV_BLOCK_LEN; break; + case GCRY_CIPHER_MODE_GCM_SIV: + *nbytes = GCRY_SIV_BLOCK_LEN; + break; + default: rc = GPG_ERR_INV_CIPHER_MODE; break; diff --git a/doc/gcrypt.texi b/doc/gcrypt.texi index e5c4b64e..6ef68884 100644 --- a/doc/gcrypt.texi +++ b/doc/gcrypt.texi @@ -1782,6 +1782,22 @@ full-sized plaintext or ciphertext needs to be passed to needs to be given to SIV mode before decryption using @code{gcry_cipher_set_decryption_tag}. + at item GCRY_CIPHER_MODE_GCM_SIV + at cindex GCM-SIV, GCM-SIV mode, AES-GCM-SIV +This mode implements is GCM-SIV Authenticated Encryption with +Associated Data (AEAD) block cipher mode specified in RFC-5297 +(AES-GCM-SIV: Nonce Misuse-Resistant Authenticated Encryption). +This implementations works with block ciphers with block size of +128 bits and uses tag length of 128 bits. Supported key lengths +by the mode are 128 bits and 256 bits. GCM-SIV is specified as +nonce misuse resistant, so that it does not fail catastrophically +if a nonce is repeated. + +When encrypting or decrypting, full-sized plaintext or ciphertext +needs to be passed to @code{gcry_cipher_encrypt} or + at code{gcry_cipher_decrypt}. Decryption tag needs to be given to +GCM-SIV mode before decryption using @code{gcry_cipher_set_decryption_tag}. + @end table @node Working with cipher handles @@ -1817,8 +1833,9 @@ ChaCha20 stream cipher. The block cipher modes with any block cipher algorithm. GCM mode (@code{GCRY_CIPHER_MODE_GCM}), CCM mode (@code{GCRY_CIPHER_MODE_CCM}), OCB mode (@code{GCRY_CIPHER_MODE_OCB}), XTS mode -(@code{GCRY_CIPHER_MODE_XTS}) and SIV mode -(@code{GCRY_CIPHER_MODE_SIV}) will only work with block cipher +(@code{GCRY_CIPHER_MODE_XTS}), SIV mode +(@code{GCRY_CIPHER_MODE_SIV}) and GCM-SIV mode +(@code{GCRY_CIPHER_MODE_GCM_SIV}) will only work with block cipher algorithms which have the block size of 16 bytes. The third argument @var{flags} can either be passed as @code{0} or as @@ -2011,13 +2028,13 @@ implemented as a macro. @end deftypefun -The SIV mode requires decryption tag to be input before decryption. -This is done with: +The SIV mode and the GCM-SIV mode requires decryption tag to be input +before decryption. This is done with: @deftypefun gcry_error_t gcry_cipher_set_decryption_tag (gcry_cipher_hd_t @var{h}, const void *@var{tag}, size_t @var{taglen}) -Set decryption tag for the SIV mode decryption. This is implemented -as a macro. +Set decryption tag for SIV or GCM-SIV mode decryption. This is +implemented as a macro. @end deftypefun diff --git a/src/gcrypt.h.in b/src/gcrypt.h.in index 99b21276..0540c60a 100644 --- a/src/gcrypt.h.in +++ b/src/gcrypt.h.in @@ -977,7 +977,8 @@ enum gcry_cipher_modes GCRY_CIPHER_MODE_CFB8 = 12, /* Cipher feedback (8 bit mode). */ GCRY_CIPHER_MODE_XTS = 13, /* XTS mode. */ GCRY_CIPHER_MODE_EAX = 14, /* EAX mode. */ - GCRY_CIPHER_MODE_SIV = 15 /* SIV mode. */ + GCRY_CIPHER_MODE_SIV = 15, /* SIV mode. */ + GCRY_CIPHER_MODE_GCM_SIV = 16 /* GCM-SIV mode. */ }; /* Flags used with the open function. */ @@ -1001,7 +1002,7 @@ enum gcry_cipher_flags /* XTS works only with blocks of 128 bits. */ #define GCRY_XTS_BLOCK_LEN (128 / 8) -/* SIV works only with blocks of 128 bits */ +/* SIV and GCM-SIV works only with blocks of 128 bits */ #define GCRY_SIV_BLOCK_LEN (128 / 8) /* Create a handle for algorithm ALGO to be used in MODE. FLAGS may @@ -1106,7 +1107,7 @@ size_t gcry_cipher_get_algo_blklen (int algo); #define gcry_cipher_test_algo(a) \ gcry_cipher_algo_info( (a), GCRYCTL_TEST_ALGO, NULL, NULL ) -/* Setup tag for decryption (for SIV mode). */ +/* Setup tag for decryption (for SIV and GCM-SIV mode). */ #define gcry_cipher_set_decryption_tag(a, tag, taglen) \ gcry_cipher_ctl ((a), GCRYCTL_SET_DECRYPTION_TAG, \ (void *)(tag), (taglen)) diff --git a/tests/basic.c b/tests/basic.c index 989a5aca..148aaec6 100644 --- a/tests/basic.c +++ b/tests/basic.c @@ -5220,6 +5220,919 @@ check_siv_cipher (void) } +static void +check_gcm_siv_cipher (void) +{ + static const struct tv + { + int algo; + char key[MAX_DATA_LEN]; + char nonce[12]; + char ad[MAX_DATA_LEN]; + int adlen; + unsigned char plaintext[MAX_DATA_LEN]; + int inlen; + char out[MAX_DATA_LEN]; + char tag[MAX_DATA_LEN]; + } tv[] = + { + /* Test vectors from RFC8452 */ + { + GCRY_CIPHER_AES128, + "\xee\x8e\x1e\xd9\xff\x25\x40\xae\x8f\x2b\xa9\xf5\x0b\xc2\xf2\x7c", + "\x75\x2a\xba\xd3\xe0\xaf\xb5\xf4\x34\xdc\x43\x10", + "example", + 7, + "Hello world", + 11, + "\x5d\x34\x9e\xad\x17\x5e\xf6\xb1\xde\xf6\xfd", + "\x4f\xbc\xde\xb7\xe4\x79\x3f\x4a\x1d\x7e\x4f\xaa\x70\x10\x0a\xf1" + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + -1, + "", + 0, + "", + "\xdc\x20\xe2\xd8\x3f\x25\x70\x5b\xb4\x9e\x43\x9e\xca\x56\xde\x25" + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "", + 0, + "", + "\xdc\x20\xe2\xd8\x3f\x25\x70\x5b\xb4\x9e\x43\x9e\xca\x56\xde\x25", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00", + 8, + "\xb5\xd8\x39\x33\x0a\xc7\xb7\x86", + "\x57\x87\x82\xff\xf6\x01\x3b\x81\x5b\x28\x7c\x22\x49\x3a\x36\x4c", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 12, + "\x73\x23\xea\x61\xd0\x59\x32\x26\x00\x47\xd9\x42", + "\xa4\x97\x8d\xb3\x57\x39\x1a\x0b\xc4\xfd\xec\x8b\x0d\x10\x66\x39", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 16, + "\x74\x3f\x7c\x80\x77\xab\x25\xf8\x62\x4e\x2e\x94\x85\x79\xcf\x77", + "\x30\x3a\xaf\x90\xf6\xfe\x21\x19\x9c\x60\x68\x57\x74\x37\xa0\xc4", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 32, + "\x84\xe0\x7e\x62\xba\x83\xa6\x58\x54\x17\x24\x5d\x7e\xc4\x13\xa9" + "\xfe\x42\x7d\x63\x15\xc0\x9b\x57\xce\x45\xf2\xe3\x93\x6a\x94\x45", + "\x1a\x8e\x45\xdc\xd4\x57\x8c\x66\x7c\xd8\x68\x47\xbf\x61\x55\xff", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 48, + "\x3f\xd2\x4c\xe1\xf5\xa6\x7b\x75\xbf\x23\x51\xf1\x81\xa4\x75\xc7" + "\xb8\x00\xa5\xb4\xd3\xdc\xf7\x01\x06\xb1\xee\xa8\x2f\xa1\xd6\x4d" + "\xf4\x2b\xf7\x22\x61\x22\xfa\x92\xe1\x7a\x40\xee\xaa\xc1\x20\x1b", + "\x5e\x6e\x31\x1d\xbf\x39\x5d\x35\xb0\xfe\x39\xc2\x71\x43\x88\xf8", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 64, + "\x24\x33\x66\x8f\x10\x58\x19\x0f\x6d\x43\xe3\x60\xf4\xf3\x5c\xd8" + "\xe4\x75\x12\x7c\xfc\xa7\x02\x8e\xa8\xab\x5c\x20\xf7\xab\x2a\xf0" + "\x25\x16\xa2\xbd\xcb\xc0\x8d\x52\x1b\xe3\x7f\xf2\x8c\x15\x2b\xba" + "\x36\x69\x7f\x25\xb4\xcd\x16\x9c\x65\x90\xd1\xdd\x39\x56\x6d\x3f", + "\x8a\x26\x3d\xd3\x17\xaa\x88\xd5\x6b\xdf\x39\x36\xdb\xa7\x5b\xb8", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00", + 8, + "\x1e\x6d\xab\xa3\x56\x69\xf4\x27", + "\x3b\x0a\x1a\x25\x60\x96\x9c\xdf\x79\x0d\x99\x75\x9a\xbd\x15\x08", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 12, + "\x29\x6c\x78\x89\xfd\x99\xf4\x19\x17\xf4\x46\x20", + "\x08\x29\x9c\x51\x02\x74\x5a\xaa\x3a\x0c\x46\x9f\xad\x9e\x07\x5a", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 16, + "\xe2\xb0\xc5\xda\x79\xa9\x01\xc1\x74\x5f\x70\x05\x25\xcb\x33\x5b", + "\x8f\x89\x36\xec\x03\x9e\x4e\x4b\xb9\x7e\xbd\x8c\x44\x57\x44\x1f", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 32, + "\x62\x00\x48\xef\x3c\x1e\x73\xe5\x7e\x02\xbb\x85\x62\xc4\x16\xa3" + "\x19\xe7\x3e\x4c\xaa\xc8\xe9\x6a\x1e\xcb\x29\x33\x14\x5a\x1d\x71", + "\xe6\xaf\x6a\x7f\x87\x28\x7d\xa0\x59\xa7\x16\x84\xed\x34\x98\xe1", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 48, + "\x50\xc8\x30\x3e\xa9\x39\x25\xd6\x40\x90\xd0\x7b\xd1\x09\xdf\xd9" + "\x51\x5a\x5a\x33\x43\x10\x19\xc1\x7d\x93\x46\x59\x99\xa8\xb0\x05" + "\x32\x01\xd7\x23\x12\x0a\x85\x62\xb8\x38\xcd\xff\x25\xbf\x9d\x1e", + "\x6a\x8c\xc3\x86\x5f\x76\x89\x7c\x2e\x4b\x24\x5c\xf3\x1c\x51\xf2", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 64, + "\x2f\x5c\x64\x05\x9d\xb5\x5e\xe0\xfb\x84\x7e\xd5\x13\x00\x37\x46" + "\xac\xa4\xe6\x1c\x71\x1b\x5d\xe2\xe7\xa7\x7f\xfd\x02\xda\x42\xfe" + "\xec\x60\x19\x10\xd3\x46\x7b\xb8\xb3\x6e\xbb\xae\xbc\xe5\xfb\xa3" + "\x0d\x36\xc9\x5f\x48\xa3\xe7\x98\x0f\x0e\x7a\xc2\x99\x33\x2a\x80", + "\xcd\xc4\x6a\xe4\x75\x56\x3d\xe0\x37\x00\x1e\xf8\x4a\xe2\x17\x44", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 12, + "\x02\x00\x00\x00", + 4, + "\xa8\xfe\x3e\x87", + "\x07\xeb\x1f\x84\xfb\x28\xf8\xcb\x73\xde\x8e\x99\xe2\xf4\x8a\x14", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00", + 18, + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00\x00\x00", + 20, + "\x6b\xb0\xfe\xcf\x5d\xed\x9b\x77\xf9\x02\xc7\xd5\xda\x23\x6a\x43" + "\x91\xdd\x02\x97", + "\x24\xaf\xc9\x80\x5e\x97\x6f\x45\x1e\x6d\x87\xf6\xfe\x10\x65\x14", + }, + { + GCRY_CIPHER_AES128, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00", + 20, + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00", + 18, + "\x44\xd0\xaa\xf6\xfb\x2f\x1f\x34\xad\xd5\xe8\x06\x4e\x83\xe1\x2a" + "\x2a\xda", + "\xbf\xf9\xb2\xef\x00\xfb\x47\x92\x0c\xc7\x2a\x0c\x0f\x13\xb9\xfd", + }, + { + GCRY_CIPHER_AES128, + "\xe6\x60\x21\xd5\xeb\x8e\x4f\x40\x66\xd4\xad\xb9\xc3\x35\x60\xe4", + "\xf4\x6e\x44\xbb\x3d\xa0\x01\x5c\x94\xf7\x08\x87", + "", + 0, + "", + 0, + "", + "\xa4\x19\x4b\x79\x07\x1b\x01\xa8\x7d\x65\xf7\x06\xe3\x94\x95\x78", + }, + { + GCRY_CIPHER_AES128, + "\x36\x86\x42\x00\xe0\xea\xf5\x28\x4d\x88\x4a\x0e\x77\xd3\x16\x46", + "\xba\xe8\xe3\x7f\xc8\x34\x41\xb1\x60\x34\x56\x6b", + "\x46\xbb\x91\xc3\xc5", + 5, + "\x7a\x80\x6c", + 3, + "\xaf\x60\xeb", + "\x71\x1b\xd8\x5b\xc1\xe4\xd3\xe0\xa4\x62\xe0\x74\xee\xa4\x28\xa8", + }, + { + GCRY_CIPHER_AES128, + "\xae\xdb\x64\xa6\xc5\x90\xbc\x84\xd1\xa5\xe2\x69\xe4\xb4\x78\x01", + "\xaf\xc0\x57\x7e\x34\x69\x9b\x9e\x67\x1f\xdd\x4f", + "\xfc\x88\x0c\x94\xa9\x51\x98\x87\x42\x96", + 10, + "\xbd\xc6\x6f\x14\x65\x45", + 6, + "\xbb\x93\xa3\xe3\x4d\x3c", + "\xd6\xa9\xc4\x55\x45\xcf\xc1\x1f\x03\xad\x74\x3d\xba\x20\xf9\x66", + }, + { + GCRY_CIPHER_AES128, + "\xd5\xcc\x1f\xd1\x61\x32\x0b\x69\x20\xce\x07\x78\x7f\x86\x74\x3b", + "\x27\x5d\x1a\xb3\x2f\x6d\x1f\x04\x34\xd8\x84\x8c", + "\x04\x67\x87\xf3\xea\x22\xc1\x27\xaa\xf1\x95\xd1\x89\x47\x28", + 15, + "\x11\x77\x44\x1f\x19\x54\x95\x86\x0f", + 9, + "\x4f\x37\x28\x1f\x7a\xd1\x29\x49\xd0", + "\x1d\x02\xfd\x0c\xd1\x74\xc8\x4f\xc5\xda\xe2\xf6\x0f\x52\xfd\x2b", + }, + { + GCRY_CIPHER_AES128, + "\xb3\xfe\xd1\x47\x3c\x52\x8b\x84\x26\xa5\x82\x99\x59\x29\xa1\x49", + "\x9e\x9a\xd8\x78\x0c\x8d\x63\xd0\xab\x41\x49\xc0", + "\xc9\x88\x2e\x53\x86\xfd\x9f\x92\xec\x48\x9c\x8f\xde\x2b\xe2\xcf" + "\x97\xe7\x4e\x93", + 20, + "\x9f\x57\x2c\x61\x4b\x47\x45\x91\x44\x74\xe7\xc7", + 12, + "\xf5\x46\x73\xc5\xdd\xf7\x10\xc7\x45\x64\x1c\x8b", + "\xc1\xdc\x2f\x87\x1f\xb7\x56\x1d\xa1\x28\x6e\x65\x5e\x24\xb7\xb0", + }, + { + GCRY_CIPHER_AES128, + "\x2d\x4e\xd8\x7d\xa4\x41\x02\x95\x2e\xf9\x4b\x02\xb8\x05\x24\x9b", + "\xac\x80\xe6\xf6\x14\x55\xbf\xac\x83\x08\xa2\xd4", + "\x29\x50\xa7\x0d\x5a\x1d\xb2\x31\x6f\xd5\x68\x37\x8d\xa1\x07\xb5" + "\x2b\x0d\xa5\x52\x10\xcc\x1c\x1b\x0a", + 25, + "\x0d\x8c\x84\x51\x17\x80\x82\x35\x5c\x9e\x94\x0f\xea\x2f\x58", + 15, + "\xc9\xff\x54\x5e\x07\xb8\x8a\x01\x5f\x05\xb2\x74\x54\x0a\xa1", + "\x83\xb3\x44\x9b\x9f\x39\x55\x2d\xe9\x9d\xc2\x14\xa1\x19\x0b\x0b", + }, + { + GCRY_CIPHER_AES128, + "\xbd\xe3\xb2\xf2\x04\xd1\xe9\xf8\xb0\x6b\xc4\x7f\x97\x45\xb3\xd1", + "\xae\x06\x55\x6f\xb6\xaa\x78\x90\xbe\xbc\x18\xfe", + "\x18\x60\xf7\x62\xeb\xfb\xd0\x82\x84\xe4\x21\x70\x2d\xe0\xde\x18" + "\xba\xa9\xc9\x59\x62\x91\xb0\x84\x66\xf3\x7d\xe2\x1c\x7f", + 30, + "\x6b\x3d\xb4\xda\x3d\x57\xaa\x94\x84\x2b\x98\x03\xa9\x6e\x07\xfb" + "\x6d\xe7", + 18, + "\x62\x98\xb2\x96\xe2\x4e\x8c\xc3\x5d\xce\x0b\xed\x48\x4b\x7f\x30" + "\xd5\x80", + "\x3e\x37\x70\x94\xf0\x47\x09\xf6\x4d\x7b\x98\x53\x10\xa4\xdb\x84", + }, + { + GCRY_CIPHER_AES128, + "\xf9\x01\xcf\xe8\xa6\x96\x15\xa9\x3f\xdf\x7a\x98\xca\xd4\x81\x79", + "\x62\x45\x70\x9f\xb1\x88\x53\xf6\x8d\x83\x36\x40", + "\x75\x76\xf7\x02\x8e\xc6\xeb\x5e\xa7\xe2\x98\x34\x2a\x94\xd4\xb2" + "\x02\xb3\x70\xef\x97\x68\xec\x65\x61\xc4\xfe\x6b\x7e\x72\x96\xfa" + "\x85\x9c\x21", + 35, + "\xe4\x2a\x3c\x02\xc2\x5b\x64\x86\x9e\x14\x6d\x7b\x23\x39\x87\xbd" + "\xdf\xc2\x40\x87\x1d", + 21, + "\x39\x1c\xc3\x28\xd4\x84\xa4\xf4\x64\x06\x18\x1b\xcd\x62\xef\xd9" + "\xb3\xee\x19\x7d\x05", + "\x2d\x15\x50\x6c\x84\xa9\xed\xd6\x5e\x13\xe9\xd2\x4a\x2a\x6e\x70", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "", + 0, + "", + "\x07\xf5\xf4\x16\x9b\xbf\x55\xa8\x40\x0c\xd4\x7e\xa6\xfd\x40\x0f", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00", + 8, + "\xc2\xef\x32\x8e\x5c\x71\xc8\x3b", + "\x84\x31\x22\x13\x0f\x73\x64\xb7\x61\xe0\xb9\x74\x27\xe3\xdf\x28", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 12, + "\x9a\xab\x2a\xeb\x3f\xaa\x0a\x34\xae\xa8\xe2\xb1", + "\x8c\xa5\x0d\xa9\xae\x65\x59\xe4\x8f\xd1\x0f\x6e\x5c\x9c\xa1\x7e", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 16, + "\x85\xa0\x1b\x63\x02\x5b\xa1\x9b\x7f\xd3\xdd\xfc\x03\x3b\x3e\x76", + "\xc9\xea\xc6\xfa\x70\x09\x42\x70\x2e\x90\x86\x23\x83\xc6\xc3\x66", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 32, + "\x4a\x6a\x9d\xb4\xc8\xc6\x54\x92\x01\xb9\xed\xb5\x30\x06\xcb\xa8" + "\x21\xec\x9c\xf8\x50\x94\x8a\x7c\x86\xc6\x8a\xc7\x53\x9d\x02\x7f", + "\xe8\x19\xe6\x3a\xbc\xd0\x20\xb0\x06\xa9\x76\x39\x76\x32\xeb\x5d", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 48, + "\xc0\x0d\x12\x18\x93\xa9\xfa\x60\x3f\x48\xcc\xc1\xca\x3c\x57\xce" + "\x74\x99\x24\x5e\xa0\x04\x6d\xb1\x6c\x53\xc7\xc6\x6f\xe7\x17\xe3" + "\x9c\xf6\xc7\x48\x83\x7b\x61\xf6\xee\x3a\xdc\xee\x17\x53\x4e\xd5", + "\x79\x0b\xc9\x68\x80\xa9\x9b\xa8\x04\xbd\x12\xc0\xe6\xa2\x2c\xc4", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 64, + "\xc2\xd5\x16\x0a\x1f\x86\x83\x83\x49\x10\xac\xda\xfc\x41\xfb\xb1" + "\x63\x2d\x4a\x35\x3e\x8b\x90\x5e\xc9\xa5\x49\x9a\xc3\x4f\x96\xc7" + "\xe1\x04\x9e\xb0\x80\x88\x38\x91\xa4\xdb\x8c\xaa\xa1\xf9\x9d\xd0" + "\x04\xd8\x04\x87\x54\x07\x35\x23\x4e\x37\x44\x51\x2c\x6f\x90\xce", + "\x11\x28\x64\xc2\x69\xfc\x0d\x9d\x88\xc6\x1f\xa4\x7e\x39\xaa\x08", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00", + 8, + "\x1d\xe2\x29\x67\x23\x7a\x81\x32", + "\x91\x21\x3f\x26\x7e\x3b\x45\x2f\x02\xd0\x1a\xe3\x3e\x4e\xc8\x54", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 12, + "\x16\x3d\x6f\x9c\xc1\xb3\x46\xcd\x45\x3a\x2e\x4c", + "\xc1\xa4\xa1\x9a\xe8\x00\x94\x1c\xcd\xc5\x7c\xc8\x41\x3c\x27\x7f", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 16, + "\xc9\x15\x45\x82\x3c\xc2\x4f\x17\xdb\xb0\xe9\xe8\x07\xd5\xec\x17", + "\xb2\x92\xd2\x8f\xf6\x11\x89\xe8\xe4\x9f\x38\x75\xef\x91\xaf\xf7", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 32, + "\x07\xda\xd3\x64\xbf\xc2\xb9\xda\x89\x11\x6d\x7b\xef\x6d\xaa\xaf" + "\x6f\x25\x55\x10\xaa\x65\x4f\x92\x0a\xc8\x1b\x94\xe8\xba\xd3\x65", + "\xae\xa1\xba\xd1\x27\x02\xe1\x96\x56\x04\x37\x4a\xab\x96\xdb\xbc", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 48, + "\xc6\x7a\x1f\x0f\x56\x7a\x51\x98\xaa\x1f\xcc\x8e\x3f\x21\x31\x43" + "\x36\xf7\xf5\x1c\xa8\xb1\xaf\x61\xfe\xac\x35\xa8\x64\x16\xfa\x47" + "\xfb\xca\x3b\x5f\x74\x9c\xdf\x56\x45\x27\xf2\x31\x4f\x42\xfe\x25", + "\x03\x33\x27\x42\xb2\x28\xc6\x47\x17\x36\x16\xcf\xd4\x4c\x54\xeb", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01", + 1, + "\x02\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x05\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 64, + "\x67\xfd\x45\xe1\x26\xbf\xb9\xa7\x99\x30\xc4\x3a\xad\x2d\x36\x96" + "\x7d\x3f\x0e\x4d\x21\x7c\x1e\x55\x1f\x59\x72\x78\x70\xbe\xef\xc9" + "\x8c\xb9\x33\xa8\xfc\xe9\xde\x88\x7b\x1e\x40\x79\x99\x88\xdb\x1f" + "\xc3\xf9\x18\x80\xed\x40\x5b\x2d\xd2\x98\x31\x88\x58\x46\x7c\x89", + "\x5b\xde\x02\x85\x03\x7c\x5d\xe8\x1e\x5b\x57\x0a\x04\x9b\x62\xa0", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + 12, + "\x02\x00\x00\x00", + 4, + "\x22\xb3\xf4\xcd", + "\x18\x35\xe5\x17\x74\x1d\xfd\xdc\xcf\xa0\x7f\xa4\x66\x1b\x74\xcf", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00", + 18, + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00\x00\x00", + 20, + "\x43\xdd\x01\x63\xcd\xb4\x8f\x9f\xe3\x21\x2b\xf6\x1b\x20\x19\x76" + "\x06\x7f\x34\x2b", + "\xb8\x79\xad\x97\x6d\x82\x42\xac\xc1\x88\xab\x59\xca\xbf\xe3\x07", + }, + { + GCRY_CIPHER_AES256, + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x01\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x02\x00\x00\x00", + 20, + "\x03\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x04\x00", + 18, + "\x46\x24\x01\x72\x4b\x5c\xe6\x58\x8d\x5a\x54\xaa\xe5\x37\x55\x13" + "\xa0\x75", + "\xcf\xcd\xf5\x04\x21\x12\xaa\x29\x68\x5c\x91\x2f\xc2\x05\x65\x43", + }, + { + GCRY_CIPHER_AES256, + "\xe6\x60\x21\xd5\xeb\x8e\x4f\x40\x66\xd4\xad\xb9\xc3\x35\x60\xe4" + "\xf4\x6e\x44\xbb\x3d\xa0\x01\x5c\x94\xf7\x08\x87\x36\x86\x42\x00", + "\xe0\xea\xf5\x28\x4d\x88\x4a\x0e\x77\xd3\x16\x46", + "", + 0, + "", + 0, + "", + "\x16\x9f\xbb\x2f\xbf\x38\x9a\x99\x5f\x63\x90\xaf\x22\x22\x8a\x62", + }, + { + GCRY_CIPHER_AES256, + "\xba\xe8\xe3\x7f\xc8\x34\x41\xb1\x60\x34\x56\x6b\x7a\x80\x6c\x46" + "\xbb\x91\xc3\xc5\xae\xdb\x64\xa6\xc5\x90\xbc\x84\xd1\xa5\xe2\x69", + "\xe4\xb4\x78\x01\xaf\xc0\x57\x7e\x34\x69\x9b\x9e", + "\x4f\xbd\xc6\x6f\x14", + 5, + "\x67\x1f\xdd", + 3, + "\x0e\xac\xcb", + "\x93\xda\x9b\xb8\x13\x33\xae\xe0\xc7\x85\xb2\x40\xd3\x19\x71\x9d", + }, + { + GCRY_CIPHER_AES256, + "\x65\x45\xfc\x88\x0c\x94\xa9\x51\x98\x87\x42\x96\xd5\xcc\x1f\xd1" + "\x61\x32\x0b\x69\x20\xce\x07\x78\x7f\x86\x74\x3b\x27\x5d\x1a\xb3", + "\x2f\x6d\x1f\x04\x34\xd8\x84\x8c\x11\x77\x44\x1f", + "\x67\x87\xf3\xea\x22\xc1\x27\xaa\xf1\x95", + 10, + "\x19\x54\x95\x86\x0f\x04", + 6, + "\xa2\x54\xda\xd4\xf3\xf9", + "\x6b\x62\xb8\x4d\xc4\x0c\x84\x63\x6a\x5e\xc1\x20\x20\xec\x8c\x2c", + }, + { + GCRY_CIPHER_AES256, + "\xd1\x89\x47\x28\xb3\xfe\xd1\x47\x3c\x52\x8b\x84\x26\xa5\x82\x99" + "\x59\x29\xa1\x49\x9e\x9a\xd8\x78\x0c\x8d\x63\xd0\xab\x41\x49\xc0", + "\x9f\x57\x2c\x61\x4b\x47\x45\x91\x44\x74\xe7\xc7", + "\x48\x9c\x8f\xde\x2b\xe2\xcf\x97\xe7\x4e\x93\x2d\x4e\xd8\x7d", + 15, + "\xc9\x88\x2e\x53\x86\xfd\x9f\x92\xec", + 9, + "\x0d\xf9\xe3\x08\x67\x82\x44\xc4\x4b", + "\xc0\xfd\x3d\xc6\x62\x8d\xfe\x55\xeb\xb0\xb9\xfb\x22\x95\xc8\xc2", + }, + { + GCRY_CIPHER_AES256, + "\xa4\x41\x02\x95\x2e\xf9\x4b\x02\xb8\x05\x24\x9b\xac\x80\xe6\xf6" + "\x14\x55\xbf\xac\x83\x08\xa2\xd4\x0d\x8c\x84\x51\x17\x80\x82\x35", + "\x5c\x9e\x94\x0f\xea\x2f\x58\x29\x50\xa7\x0d\x5a", + "\x0d\xa5\x52\x10\xcc\x1c\x1b\x0a\xbd\xe3\xb2\xf2\x04\xd1\xe9\xf8" + "\xb0\x6b\xc4\x7f", + 20, + "\x1d\xb2\x31\x6f\xd5\x68\x37\x8d\xa1\x07\xb5\x2b", + 12, + "\x8d\xbe\xb9\xf7\x25\x5b\xf5\x76\x9d\xd5\x66\x92", + "\x40\x40\x99\xc2\x58\x7f\x64\x97\x9f\x21\x82\x67\x06\xd4\x97\xd5", + }, + { + GCRY_CIPHER_AES256, + "\x97\x45\xb3\xd1\xae\x06\x55\x6f\xb6\xaa\x78\x90\xbe\xbc\x18\xfe" + "\x6b\x3d\xb4\xda\x3d\x57\xaa\x94\x84\x2b\x98\x03\xa9\x6e\x07\xfb", + "\x6d\xe7\x18\x60\xf7\x62\xeb\xfb\xd0\x82\x84\xe4", + "\xf3\x7d\xe2\x1c\x7f\xf9\x01\xcf\xe8\xa6\x96\x15\xa9\x3f\xdf\x7a" + "\x98\xca\xd4\x81\x79\x62\x45\x70\x9f", + 25, + "\x21\x70\x2d\xe0\xde\x18\xba\xa9\xc9\x59\x62\x91\xb0\x84\x66", + 15, + "\x79\x35\x76\xdf\xa5\xc0\xf8\x87\x29\xa7\xed\x3c\x2f\x1b\xff", + "\xb3\x08\x0d\x28\xf6\xeb\xb5\xd3\x64\x8c\xe9\x7b\xd5\xba\x67\xfd", + }, + { + GCRY_CIPHER_AES256, + "\xb1\x88\x53\xf6\x8d\x83\x36\x40\xe4\x2a\x3c\x02\xc2\x5b\x64\x86" + "\x9e\x14\x6d\x7b\x23\x39\x87\xbd\xdf\xc2\x40\x87\x1d\x75\x76\xf7", + "\x02\x8e\xc6\xeb\x5e\xa7\xe2\x98\x34\x2a\x94\xd4", + "\x9c\x21\x59\x05\x8b\x1f\x0f\xe9\x14\x33\xa5\xbd\xc2\x0e\x21\x4e" + "\xab\x7f\xec\xef\x44\x54\xa1\x0e\xf0\x65\x7d\xf2\x1a\xc7", + 30, + "\xb2\x02\xb3\x70\xef\x97\x68\xec\x65\x61\xc4\xfe\x6b\x7e\x72\x96" + "\xfa\x85", + 18, + "\x85\x7e\x16\xa6\x49\x15\xa7\x87\x63\x76\x87\xdb\x4a\x95\x19\x63" + "\x5c\xdd", + "\x45\x4f\xc2\xa1\x54\xfe\xa9\x1f\x83\x63\xa3\x9f\xec\x7d\x0a\x49", + }, + { + GCRY_CIPHER_AES256, + "\x3c\x53\x5d\xe1\x92\xea\xed\x38\x22\xa2\xfb\xbe\x2c\xa9\xdf\xc8" + "\x82\x55\xe1\x4a\x66\x1b\x8a\xa8\x2c\xc5\x42\x36\x09\x3b\xbc\x23", + "\x68\x80\x89\xe5\x55\x40\xdb\x18\x72\x50\x4e\x1c", + "\x73\x43\x20\xcc\xc9\xd9\xbb\xbb\x19\xcb\x81\xb2\xaf\x4e\xcb\xc3" + "\xe7\x28\x34\x32\x1f\x7a\xa0\xf7\x0b\x72\x82\xb4\xf3\x3d\xf2\x3f" + "\x16\x75\x41", + 35, + "\xce\xd5\x32\xce\x41\x59\xb0\x35\x27\x7d\x4d\xfb\xb7\xdb\x62\x96" + "\x8b\x13\xcd\x4e\xec", + 21, + "\x62\x66\x60\xc2\x6e\xa6\x61\x2f\xb1\x7a\xd9\x1e\x8e\x76\x76\x39" + "\xed\xd6\xc9\xfa\xee", + "\x9d\x6c\x70\x29\x67\x5b\x89\xea\xf4\xba\x1d\xed\x1a\x28\x65\x94", + }, + { + GCRY_CIPHER_AES256, + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x4d\xb9\x23\xdc\x79\x3e\xe6\x49\x7c\x76\xdc\xc0\x3a\x98\xe1\x08", + 32, + "\xf3\xf8\x0f\x2c\xf0\xcb\x2d\xd9\xc5\x98\x4f\xcd\xa9\x08\x45\x6c" + "\xc5\x37\x70\x3b\x5b\xa7\x03\x24\xa6\x79\x3a\x7b\xf2\x18\xd3\xea", + "\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + }, + { + GCRY_CIPHER_AES256, + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00" + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + "", + 0, + "\xeb\x36\x40\x27\x7c\x7f\xfd\x13\x03\xc7\xa5\x42\xd0\x2d\x3e\x4c" + "\x00\x00\x00\x00\x00\x00\x00\x00", + 24, + "\x18\xce\x4f\x0b\x8c\xb4\xd0\xca\xc6\x5f\xea\x8f\x79\x25\x7b\x20" + "\x88\x8e\x53\xe7\x22\x99\xe5\x6d", + "\xff\xff\xff\xff\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00\x00", + } + }; + + gcry_cipher_hd_t hde, hdd; + unsigned char out[MAX_DATA_LEN]; + unsigned char tag[16]; + int i, keylen; + gcry_error_t err = 0; + size_t taglen2; + + if (verbose) + fprintf (stderr, " Starting GCM-SIV checks.\n"); + + for (i = 0; i < sizeof (tv) / sizeof (tv[0]); i++) + { + if (gcry_cipher_test_algo (tv[i].algo) && in_fips_mode) + { + if (verbose) + fprintf (stderr, " algorithm %d not available in fips mode\n", + tv[i].algo); + continue; + } + + if (verbose) + fprintf (stderr, " checking GCM-SIV mode for %s [%i]\n", + gcry_cipher_algo_name (tv[i].algo), + tv[i].algo); + err = gcry_cipher_open (&hde, tv[i].algo, GCRY_CIPHER_MODE_GCM_SIV, 0); + if (!err) + err = gcry_cipher_open (&hdd, tv[i].algo, GCRY_CIPHER_MODE_GCM_SIV, 0); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_open failed: %s\n", gpg_strerror (err)); + return; + } + + keylen = gcry_cipher_get_algo_keylen (tv[i].algo); + if (!keylen) + { + fail ("aes-gcm-siv, gcry_cipher_get_algo_keylen failed\n"); + return; + } + + err = gcry_cipher_setkey (hde, tv[i].key, keylen); + if (!err) + err = gcry_cipher_setkey (hdd, tv[i].key, keylen); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_setkey failed: %s\n", + gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + err = gcry_cipher_setiv (hde, tv[i].nonce, 12); + if (!err) + err = gcry_cipher_setiv (hdd, tv[i].nonce, 12); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_setiv failed: %s\n", + gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (tv[i].adlen >= 0) + { + err = gcry_cipher_authenticate (hde, tv[i].ad, tv[i].adlen); + if (!err) + err = gcry_cipher_authenticate (hdd, tv[i].ad, tv[i].adlen); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_authenticate failed: %s\n", + gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + } + + err = gcry_cipher_info (hde, GCRYCTL_GET_TAGLEN, NULL, &taglen2); + if (err) + { + fail ("cipher-siv, gcryctl_get_taglen failed (tv %d): %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + if (taglen2 != 16) + { + fail ("cipher-siv, gcryctl_get_taglen returned bad length" + " (tv %d): got=%zu want=%d\n", + i, taglen2, 16); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (tv[i].inlen) + { + err = gcry_cipher_encrypt (hde, out, tv[i].inlen, + tv[i].plaintext, tv[i].inlen); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_encrypt (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (memcmp (tv[i].out, out, tv[i].inlen)) + { + mismatch (tv[i].out, tv[i].inlen, out, tv[i].inlen); + fail ("aes-gcm-siv, encrypt mismatch entry %d\n", i); + } + + err = gcry_cipher_gettag (hde, tag, taglen2); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_gettag(%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (memcmp (tv[i].tag, tag, taglen2)) + { + mismatch (tv[i].tag, taglen2, tag, taglen2); + fail ("aes-gcm-siv, tag mismatch entry %d\n", i); + } + + err = gcry_cipher_set_decryption_tag (hdd, tag, taglen2); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_set_decryption_tag (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + err = gcry_cipher_decrypt (hdd, out, tv[i].inlen, NULL, 0); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_decrypt (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (memcmp (tv[i].plaintext, out, tv[i].inlen)) + fail ("aes-gcm-siv, decrypt mismatch entry %d\n", i); + + err = gcry_cipher_checktag (hdd, tag, taglen2); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_checktag (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + } + else + { + err = gcry_cipher_gettag (hde, tag, taglen2); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_gettag(%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + if (memcmp (tv[i].tag, tag, taglen2)) + { + mismatch (tv[i].tag, taglen2, tag, taglen2); + fail ("aes-gcm-siv, tag mismatch entry %d\n", i); + } + + err = gcry_cipher_checktag (hdd, tv[i].tag, taglen2); + if (err) + { + fail ("aes-gcm-siv, gcry_cipher_checktag (%d) failed: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + + tag[13] ^= 0x4; + err = gcry_cipher_checktag (hdd, tag, taglen2); + if (gpg_err_code (err) != GPG_ERR_CHECKSUM) + { + fail ("aes-gcm-siv, gcry_cipher_checktag (%d) expected checksum fail: %s\n", + i, gpg_strerror (err)); + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + return; + } + } + + gcry_cipher_close (hde); + gcry_cipher_close (hdd); + } + if (verbose) + fprintf (stderr, " Completed GCM-SIV checks.\n"); +} + + static void _check_poly1305_cipher (unsigned int step) { @@ -10554,6 +11467,7 @@ check_cipher_modes(void) check_xts_cipher (); check_eax_cipher (); check_siv_cipher (); + check_gcm_siv_cipher (); check_gost28147_cipher (); check_stream_cipher (); check_stream_cipher_large_block (); diff --git a/tests/bench-slope.c b/tests/bench-slope.c index 91eb7cc5..00cb11de 100644 --- a/tests/bench-slope.c +++ b/tests/bench-slope.c @@ -1544,6 +1544,52 @@ static struct bench_ops siv_authenticate_ops = { }; +static void +bench_gcm_siv_encrypt_do_bench (struct bench_obj *obj, void *buf, + size_t buflen) +{ + char nonce[12] = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, + 0xdb, 0xad, 0xde, 0xca, 0xf8, 0x88 }; + bench_aead_encrypt_do_bench (obj, buf, buflen, nonce, sizeof(nonce)); +} + +static void +bench_gcm_siv_decrypt_do_bench (struct bench_obj *obj, void *buf, + size_t buflen) +{ + char nonce[12] = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, + 0xdb, 0xad, 0xde, 0xca, 0xf8, 0x88 }; + bench_aead_decrypt_do_bench (obj, buf, buflen, nonce, sizeof(nonce)); +} + +static void +bench_gcm_siv_authenticate_do_bench (struct bench_obj *obj, void *buf, + size_t buflen) +{ + char nonce[12] = { 0xca, 0xfe, 0xba, 0xbe, 0xfa, 0xce, + 0xdb, 0xad, 0xde, 0xca, 0xf8, 0x88 }; + bench_aead_authenticate_do_bench (obj, buf, buflen, nonce, sizeof(nonce)); +} + +static struct bench_ops gcm_siv_encrypt_ops = { + &bench_encrypt_init, + &bench_encrypt_free, + &bench_gcm_siv_encrypt_do_bench +}; + +static struct bench_ops gcm_siv_decrypt_ops = { + &bench_encrypt_init, + &bench_encrypt_free, + &bench_gcm_siv_decrypt_do_bench +}; + +static struct bench_ops gcm_siv_authenticate_ops = { + &bench_encrypt_init, + &bench_encrypt_free, + &bench_gcm_siv_authenticate_do_bench +}; + + static void bench_eax_encrypt_do_bench (struct bench_obj *obj, void *buf, size_t buflen) @@ -1663,6 +1709,9 @@ static struct bench_cipher_mode cipher_modes[] = { {GCRY_CIPHER_MODE_SIV, "SIV enc", &siv_encrypt_ops}, {GCRY_CIPHER_MODE_SIV, "SIV dec", &siv_decrypt_ops}, {GCRY_CIPHER_MODE_SIV, "SIV auth", &siv_authenticate_ops}, + {GCRY_CIPHER_MODE_GCM_SIV, "GCM-SIV enc", &gcm_siv_encrypt_ops}, + {GCRY_CIPHER_MODE_GCM_SIV, "GCM-SIV dec", &gcm_siv_decrypt_ops}, + {GCRY_CIPHER_MODE_GCM_SIV, "GCM-SIV auth", &gcm_siv_authenticate_ops}, {GCRY_CIPHER_MODE_POLY1305, "POLY1305 enc", &poly1305_encrypt_ops}, {GCRY_CIPHER_MODE_POLY1305, "POLY1305 dec", &poly1305_decrypt_ops}, {GCRY_CIPHER_MODE_POLY1305, "POLY1305 auth", &poly1305_authenticate_ops}, @@ -1677,6 +1726,7 @@ cipher_bench_one (int algo, struct bench_cipher_mode *pmode) struct bench_obj obj = { 0 }; double result; unsigned int blklen; + unsigned int keylen; mode.algo = algo; @@ -1685,6 +1735,10 @@ cipher_bench_one (int algo, struct bench_cipher_mode *pmode) if (!blklen) return; + keylen = gcry_cipher_get_algo_keylen (algo); + if (!keylen) + return; + /* Stream cipher? Only test with "ECB" and POLY1305. */ if (blklen == 1 && (mode.mode != GCRY_CIPHER_MODE_ECB && mode.mode != GCRY_CIPHER_MODE_POLY1305)) @@ -1715,6 +1769,14 @@ cipher_bench_one (int algo, struct bench_cipher_mode *pmode) if (mode.mode == GCRY_CIPHER_MODE_SIV && blklen != GCRY_SIV_BLOCK_LEN) return; + /* GCM-SIV has restrictions for block-size */ + if (mode.mode == GCRY_CIPHER_MODE_GCM_SIV && blklen != GCRY_SIV_BLOCK_LEN) + return; + + /* GCM-SIV has restrictions for key length */ + if (mode.mode == GCRY_CIPHER_MODE_GCM_SIV && !(keylen == 16 || keylen == 32)) + return; + /* Our OCB implementation has restrictions for block-size. */ if (mode.mode == GCRY_CIPHER_MODE_OCB && blklen != GCRY_OCB_BLOCK_LEN) return; -- 2.30.2 From jussi.kivilinna at iki.fi Sun Aug 22 21:51:16 2021 From: jussi.kivilinna at iki.fi (Jussi Kivilinna) Date: Sun, 22 Aug 2021 22:51:16 +0300 Subject: [PATCH] mpi/longlong: fix variable shadowing from MIPS umul_ppmm macros Message-ID: <20210822195116.439761-1-jussi.kivilinna@iki.fi> * mpi/longlong.h [__mips__ && W_TIPE_SIZE == 32] (umul_ppmm): Rename temporary variable '_r' to '__r'. [__mips && W_TIPE_SIZE == 64] (umul_ppmm): Ditto. -- Signed-off-by: Jussi Kivilinna --- mpi/longlong.h | 18 +++++++++--------- 1 file changed, 9 insertions(+), 9 deletions(-) diff --git a/mpi/longlong.h b/mpi/longlong.h index 630993b4..39cdd0c2 100644 --- a/mpi/longlong.h +++ b/mpi/longlong.h @@ -864,10 +864,10 @@ extern USItype __udiv_qrnnd (); __GNUC_MINOR__ >= 4) # define umul_ppmm(w1, w0, u, v) \ do { \ - UDItype _r; \ - _r = (UDItype)(u) * (v); \ - (w1) = _r >> 32; \ - (w0) = (USItype) _r; \ + UDItype __r; \ + __r = (UDItype)(u) * (v); \ + (w1) = __r >> 32; \ + (w0) = (USItype) __r; \ } while (0) # elif __GNUC__ > 2 || __GNUC_MINOR__ >= 7 # define umul_ppmm(w1, w0, u, v) \ @@ -898,11 +898,11 @@ extern USItype __udiv_qrnnd (); __GNUC_MINOR__ >= 4) typedef unsigned int UTItype __attribute__ ((mode (TI))); # define umul_ppmm(w1, w0, u, v) \ - do { \ - UTItype _r; \ - _r = (UTItype)(u) * (v); \ - (w1) = _r >> 64; \ - (w0) = (UDItype) _r; \ + do { \ + UTItype __r; \ + __r = (UTItype)(u) * (v); \ + (w1) = __r >> 64; \ + (w0) = (UDItype) __r; \ } while (0) # elif __GNUC__ > 2 || __GNUC_MINOR__ >= 7 # define umul_ppmm(w1, w0, u, v) \ -- 2.30.2