Merge branch 'aes-ecb'

Adds support for AES in ECB mode (where supported by the underlying
library/API) and uses it for the CTR-DRBG implementation.
This commit is contained in:
Tobias Brunner 2019-11-28 17:16:36 +01:00
commit 8b6aadae9c
20 changed files with 1075 additions and 46 deletions

View File

@ -47,13 +47,14 @@ ENUM_NEXT(encryption_algorithm_names, ENCR_CAMELLIA_CBC, ENCR_CHACHA20_POLY1305,
"CAMELLIA_CCM_12",
"CAMELLIA_CCM_16",
"CHACHA20_POLY1305");
ENUM_NEXT(encryption_algorithm_names, ENCR_UNDEFINED, ENCR_RC2_CBC, ENCR_CHACHA20_POLY1305,
ENUM_NEXT(encryption_algorithm_names, ENCR_UNDEFINED, ENCR_AES_ECB, ENCR_CHACHA20_POLY1305,
"UNDEFINED",
"DES_ECB",
"SERPENT_CBC",
"TWOFISH_CBC",
"RC2_CBC");
ENUM_END(encryption_algorithm_names, ENCR_RC2_CBC);
"RC2_CBC",
"AES_ECB");
ENUM_END(encryption_algorithm_names, ENCR_AES_ECB);
/*
* Described in header.

View File

@ -64,6 +64,7 @@ enum encryption_algorithm_t {
ENCR_TWOFISH_CBC = 1027,
/* see macros below to handle RC2 (effective) key length */
ENCR_RC2_CBC = 1028,
ENCR_AES_ECB = 1029,
};
#define DES_BLOCK_SIZE 8

View File

@ -61,6 +61,7 @@ iv_gen_t* iv_gen_create_for_alg(encryption_algorithm_t alg)
case ENCR_DES_ECB:
case ENCR_DES_IV32:
case ENCR_DES_IV64:
case ENCR_AES_ECB:
break;
}
return NULL;

View File

@ -70,6 +70,11 @@ struct private_aes_crypter_t {
* Key size of this AES cypher object.
*/
uint32_t key_size;
/**
* Does AES mode require an IV
*/
bool has_iv;
};
/**
@ -804,26 +809,29 @@ METHOD(crypter_t, decrypt, bool,
in = data.ptr;
pos = data.len-16;
in += pos;
in += pos;
out += pos;
while (pos >= 0)
{
decrypt_block(this, in, out);
if (pos==0)
if (this->has_iv)
{
iv_i=(const uint32_t*) (iv.ptr);
if (pos == 0)
{
iv_i = (const uint32_t*) (iv.ptr);
}
else
{
iv_i = (const uint32_t*) (in-16);
}
*((uint32_t *)(&out[ 0])) ^= iv_i[0];
*((uint32_t *)(&out[ 4])) ^= iv_i[1];
*((uint32_t *)(&out[ 8])) ^= iv_i[2];
*((uint32_t *)(&out[12])) ^= iv_i[3];
}
else
{
iv_i=(const uint32_t*) (in-16);
}
*((uint32_t *)(&out[ 0])) ^= iv_i[0];
*((uint32_t *)(&out[ 4])) ^= iv_i[1];
*((uint32_t *)(&out[ 8])) ^= iv_i[2];
*((uint32_t *)(&out[12])) ^= iv_i[3];
in-=16;
out-=16;
pos-=16;
in-= 16;
out-= 16;
pos-= 16;
}
return TRUE;
}
@ -835,7 +843,7 @@ METHOD(crypter_t, encrypt, bool,
const uint32_t *iv_i;
uint8_t *in, *out;
in = data.ptr;
in = data.ptr;
out = data.ptr;
if (encrypted)
{
@ -843,25 +851,36 @@ METHOD(crypter_t, encrypt, bool,
out = encrypted->ptr;
}
pos=0;
while(pos<data.len)
pos = 0;
while (pos < data.len)
{
if (pos==0)
if (this->has_iv)
{
iv_i=(const uint32_t*) iv.ptr;
if (pos == 0)
{
iv_i = (const uint32_t*) iv.ptr;
}
else
{
iv_i = (const uint32_t*) (out-16);
}
*((uint32_t *)(&out[ 0])) = iv_i[0]^*((const uint32_t *)(&in[ 0]));
*((uint32_t *)(&out[ 4])) = iv_i[1]^*((const uint32_t *)(&in[ 4]));
*((uint32_t *)(&out[ 8])) = iv_i[2]^*((const uint32_t *)(&in[ 8]));
*((uint32_t *)(&out[12])) = iv_i[3]^*((const uint32_t *)(&in[12]));
}
else
{
iv_i=(const uint32_t*) (out-16);
*((uint32_t *)(&out[ 0])) = *((const uint32_t *)(&in[ 0]));
*((uint32_t *)(&out[ 4])) = *((const uint32_t *)(&in[ 4]));
*((uint32_t *)(&out[ 8])) = *((const uint32_t *)(&in[ 8]));
*((uint32_t *)(&out[12])) = *((const uint32_t *)(&in[12]));
}
*((uint32_t *)(&out[ 0])) = iv_i[0]^*((const uint32_t *)(&in[ 0]));
*((uint32_t *)(&out[ 4])) = iv_i[1]^*((const uint32_t *)(&in[ 4]));
*((uint32_t *)(&out[ 8])) = iv_i[2]^*((const uint32_t *)(&in[ 8]));
*((uint32_t *)(&out[12])) = iv_i[3]^*((const uint32_t *)(&in[12]));
encrypt_block(this, out, out);
in+=16;
out+=16;
pos+=16;
in+= 16;
out+= 16;
pos+= 16;
}
return TRUE;
}
@ -875,7 +894,7 @@ METHOD(crypter_t, get_block_size, size_t,
METHOD(crypter_t, get_iv_size, size_t,
private_aes_crypter_t *this)
{
return AES_BLOCK_SIZE;
return this->has_iv ? AES_BLOCK_SIZE : 0;
}
METHOD(crypter_t, get_key_size, size_t,
@ -978,11 +997,20 @@ METHOD(crypter_t, destroy, void,
aes_crypter_t *aes_crypter_create(encryption_algorithm_t algo, size_t key_size)
{
private_aes_crypter_t *this;
bool has_iv;
if (algo != ENCR_AES_CBC)
switch (algo)
{
return NULL;
case ENCR_AES_CBC:
has_iv = TRUE;
break;
case ENCR_AES_ECB:
has_iv = FALSE;
break;
default:
return NULL;
}
switch (key_size)
{
case 0:
@ -1010,6 +1038,7 @@ aes_crypter_t *aes_crypter_create(encryption_algorithm_t algo, size_t key_size)
},
.key_size = key_size,
.aes_Nkey = key_size / 4,
.has_iv = has_iv,
);
return &this->public;

View File

@ -45,6 +45,9 @@ METHOD(plugin_t, get_features, int,
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 24),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 32),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 24),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 32),
};
*features = f;
return countof(f);

View File

@ -16,6 +16,7 @@ endif
libstrongswan_aesni_la_SOURCES = \
aesni_key.h aesni_key.c \
aesni_cbc.h aesni_cbc.c \
aesni_ecb.h aesni_ecb.c \
aesni_ctr.h aesni_ctr.c \
aesni_ccm.h aesni_ccm.c \
aesni_gcm.h aesni_gcm.c \

View File

@ -0,0 +1,836 @@
/*
* Copyright (C) 2015 Martin Willi
* Copyright (C) 2015 revosec AG
*
* Copyright (C) 2019 Andreas Steffen
* HSR Hochschule fuer Technik Rapperswil
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
#include "aesni_ecb.h"
#include "aesni_key.h"
/**
* Pipeline parallelism we use for ECB encryption/decryption
*/
#define ECB_PARALLELISM 4
typedef struct private_aesni_ecb_t private_aesni_ecb_t;
/**
* ECB en/decryption method type
*/
typedef void (*aesni_ecb_fn_t)(aesni_key_t*, u_int, u_char*, u_char*);
/**
* Private data of an aesni_ecb_t object.
*/
struct private_aesni_ecb_t {
/**
* Public aesni_ecb_t interface.
*/
aesni_ecb_t public;
/**
* Key size
*/
u_int key_size;
/**
* Encryption key schedule
*/
aesni_key_t *ekey;
/**
* Decryption key schedule
*/
aesni_key_t *dkey;
/**
* Encryption method
*/
aesni_ecb_fn_t encrypt;
/**
* Decryption method
*/
aesni_ecb_fn_t decrypt;
};
/**
* AES-128 ECB encryption
*/
static void encrypt_ecb128(aesni_key_t *key, u_int blocks, u_char *in,
u_char *out)
{
__m128i *ks, *bi, *bo;
__m128i t1, t2, t3, t4;
u_int i, pblocks;
ks = key->schedule;
bi = (__m128i*)in;
bo = (__m128i*)out;
pblocks = blocks - (blocks % ECB_PARALLELISM);
for (i = 0; i < pblocks; i += ECB_PARALLELISM)
{
t1 = _mm_loadu_si128(bi + i + 0);
t2 = _mm_loadu_si128(bi + i + 1);
t3 = _mm_loadu_si128(bi + i + 2);
t4 = _mm_loadu_si128(bi + i + 3);
t1 = _mm_xor_si128(t1, ks[0]);
t2 = _mm_xor_si128(t2, ks[0]);
t3 = _mm_xor_si128(t3, ks[0]);
t4 = _mm_xor_si128(t4, ks[0]);
t1 = _mm_aesenc_si128(t1, ks[1]);
t2 = _mm_aesenc_si128(t2, ks[1]);
t3 = _mm_aesenc_si128(t3, ks[1]);
t4 = _mm_aesenc_si128(t4, ks[1]);
t1 = _mm_aesenc_si128(t1, ks[2]);
t2 = _mm_aesenc_si128(t2, ks[2]);
t3 = _mm_aesenc_si128(t3, ks[2]);
t4 = _mm_aesenc_si128(t4, ks[2]);
t1 = _mm_aesenc_si128(t1, ks[3]);
t2 = _mm_aesenc_si128(t2, ks[3]);
t3 = _mm_aesenc_si128(t3, ks[3]);
t4 = _mm_aesenc_si128(t4, ks[3]);
t1 = _mm_aesenc_si128(t1, ks[4]);
t2 = _mm_aesenc_si128(t2, ks[4]);
t3 = _mm_aesenc_si128(t3, ks[4]);
t4 = _mm_aesenc_si128(t4, ks[4]);
t1 = _mm_aesenc_si128(t1, ks[5]);
t2 = _mm_aesenc_si128(t2, ks[5]);
t3 = _mm_aesenc_si128(t3, ks[5]);
t4 = _mm_aesenc_si128(t4, ks[5]);
t1 = _mm_aesenc_si128(t1, ks[6]);
t2 = _mm_aesenc_si128(t2, ks[6]);
t3 = _mm_aesenc_si128(t3, ks[6]);
t4 = _mm_aesenc_si128(t4, ks[6]);
t1 = _mm_aesenc_si128(t1, ks[7]);
t2 = _mm_aesenc_si128(t2, ks[7]);
t3 = _mm_aesenc_si128(t3, ks[7]);
t4 = _mm_aesenc_si128(t4, ks[7]);
t1 = _mm_aesenc_si128(t1, ks[8]);
t2 = _mm_aesenc_si128(t2, ks[8]);
t3 = _mm_aesenc_si128(t3, ks[8]);
t4 = _mm_aesenc_si128(t4, ks[8]);
t1 = _mm_aesenc_si128(t1, ks[9]);
t2 = _mm_aesenc_si128(t2, ks[9]);
t3 = _mm_aesenc_si128(t3, ks[9]);
t4 = _mm_aesenc_si128(t4, ks[9]);
t1 = _mm_aesenclast_si128(t1, ks[10]);
t2 = _mm_aesenclast_si128(t2, ks[10]);
t3 = _mm_aesenclast_si128(t3, ks[10]);
t4 = _mm_aesenclast_si128(t4, ks[10]);
_mm_storeu_si128(bo + i + 0, t1);
_mm_storeu_si128(bo + i + 1, t2);
_mm_storeu_si128(bo + i + 2, t3);
_mm_storeu_si128(bo + i + 3, t4);
}
for (i = pblocks; i < blocks; i++)
{
t1 = _mm_loadu_si128(bi + i);
t1 = _mm_xor_si128(t1, ks[0]);
t1 = _mm_aesenc_si128(t1, ks[1]);
t1 = _mm_aesenc_si128(t1, ks[2]);
t1 = _mm_aesenc_si128(t1, ks[3]);
t1 = _mm_aesenc_si128(t1, ks[4]);
t1 = _mm_aesenc_si128(t1, ks[5]);
t1 = _mm_aesenc_si128(t1, ks[6]);
t1 = _mm_aesenc_si128(t1, ks[7]);
t1 = _mm_aesenc_si128(t1, ks[8]);
t1 = _mm_aesenc_si128(t1, ks[9]);
t1 = _mm_aesenclast_si128(t1, ks[10]);
_mm_storeu_si128(bo + i, t1);
}
}
/**
* AES-128 ECB decryption
*/
static void decrypt_ecb128(aesni_key_t *key, u_int blocks, u_char *in,
u_char *out)
{
__m128i *ks, *bi, *bo;
__m128i t1, t2, t3, t4;
u_int i, pblocks;
ks = key->schedule;
bi = (__m128i*)in;
bo = (__m128i*)out;
pblocks = blocks - (blocks % ECB_PARALLELISM);
for (i = 0; i < pblocks; i += ECB_PARALLELISM)
{
t1 = _mm_loadu_si128(bi + i + 0);
t2 = _mm_loadu_si128(bi + i + 1);
t3 = _mm_loadu_si128(bi + i + 2);
t4 = _mm_loadu_si128(bi + i + 3);
t1 = _mm_xor_si128(t1, ks[0]);
t2 = _mm_xor_si128(t2, ks[0]);
t3 = _mm_xor_si128(t3, ks[0]);
t4 = _mm_xor_si128(t4, ks[0]);
t1 = _mm_aesdec_si128(t1, ks[1]);
t2 = _mm_aesdec_si128(t2, ks[1]);
t3 = _mm_aesdec_si128(t3, ks[1]);
t4 = _mm_aesdec_si128(t4, ks[1]);
t1 = _mm_aesdec_si128(t1, ks[2]);
t2 = _mm_aesdec_si128(t2, ks[2]);
t3 = _mm_aesdec_si128(t3, ks[2]);
t4 = _mm_aesdec_si128(t4, ks[2]);
t1 = _mm_aesdec_si128(t1, ks[3]);
t2 = _mm_aesdec_si128(t2, ks[3]);
t3 = _mm_aesdec_si128(t3, ks[3]);
t4 = _mm_aesdec_si128(t4, ks[3]);
t1 = _mm_aesdec_si128(t1, ks[4]);
t2 = _mm_aesdec_si128(t2, ks[4]);
t3 = _mm_aesdec_si128(t3, ks[4]);
t4 = _mm_aesdec_si128(t4, ks[4]);
t1 = _mm_aesdec_si128(t1, ks[5]);
t2 = _mm_aesdec_si128(t2, ks[5]);
t3 = _mm_aesdec_si128(t3, ks[5]);
t4 = _mm_aesdec_si128(t4, ks[5]);
t1 = _mm_aesdec_si128(t1, ks[6]);
t2 = _mm_aesdec_si128(t2, ks[6]);
t3 = _mm_aesdec_si128(t3, ks[6]);
t4 = _mm_aesdec_si128(t4, ks[6]);
t1 = _mm_aesdec_si128(t1, ks[7]);
t2 = _mm_aesdec_si128(t2, ks[7]);
t3 = _mm_aesdec_si128(t3, ks[7]);
t4 = _mm_aesdec_si128(t4, ks[7]);
t1 = _mm_aesdec_si128(t1, ks[8]);
t2 = _mm_aesdec_si128(t2, ks[8]);
t3 = _mm_aesdec_si128(t3, ks[8]);
t4 = _mm_aesdec_si128(t4, ks[8]);
t1 = _mm_aesdec_si128(t1, ks[9]);
t2 = _mm_aesdec_si128(t2, ks[9]);
t3 = _mm_aesdec_si128(t3, ks[9]);
t4 = _mm_aesdec_si128(t4, ks[9]);
t1 = _mm_aesdeclast_si128(t1, ks[10]);
t2 = _mm_aesdeclast_si128(t2, ks[10]);
t3 = _mm_aesdeclast_si128(t3, ks[10]);
t4 = _mm_aesdeclast_si128(t4, ks[10]);
_mm_storeu_si128(bo + i + 0, t1);
_mm_storeu_si128(bo + i + 1, t2);
_mm_storeu_si128(bo + i + 2, t3);
_mm_storeu_si128(bo + i + 3, t4);
}
for (i = pblocks; i < blocks; i++)
{
t1 = _mm_loadu_si128(bi + i);
t1 = _mm_xor_si128(t1, ks[0]);
t1 = _mm_aesdec_si128(t1, ks[1]);
t1 = _mm_aesdec_si128(t1, ks[2]);
t1 = _mm_aesdec_si128(t1, ks[3]);
t1 = _mm_aesdec_si128(t1, ks[4]);
t1 = _mm_aesdec_si128(t1, ks[5]);
t1 = _mm_aesdec_si128(t1, ks[6]);
t1 = _mm_aesdec_si128(t1, ks[7]);
t1 = _mm_aesdec_si128(t1, ks[8]);
t1 = _mm_aesdec_si128(t1, ks[9]);
t1 = _mm_aesdeclast_si128(t1, ks[10]);
_mm_storeu_si128(bo + i, t1);
}
}
/**
* AES-192 ECB encryption
*/
static void encrypt_ecb192(aesni_key_t *key, u_int blocks, u_char *in,
u_char *out)
{
__m128i *ks, *bi, *bo;
__m128i t1, t2, t3, t4;
u_int i, pblocks;
ks = key->schedule;
bi = (__m128i*)in;
bo = (__m128i*)out;
pblocks = blocks - (blocks % ECB_PARALLELISM);
for (i = 0; i < pblocks; i += ECB_PARALLELISM)
{
t1 = _mm_loadu_si128(bi + i + 0);
t2 = _mm_loadu_si128(bi + i + 1);
t3 = _mm_loadu_si128(bi + i + 2);
t4 = _mm_loadu_si128(bi + i + 3);
t1 = _mm_xor_si128(t1, ks[0]);
t2 = _mm_xor_si128(t2, ks[0]);
t3 = _mm_xor_si128(t3, ks[0]);
t4 = _mm_xor_si128(t4, ks[0]);
t1 = _mm_aesenc_si128(t1, ks[1]);
t2 = _mm_aesenc_si128(t2, ks[1]);
t3 = _mm_aesenc_si128(t3, ks[1]);
t4 = _mm_aesenc_si128(t4, ks[1]);
t1 = _mm_aesenc_si128(t1, ks[2]);
t2 = _mm_aesenc_si128(t2, ks[2]);
t3 = _mm_aesenc_si128(t3, ks[2]);
t4 = _mm_aesenc_si128(t4, ks[2]);
t1 = _mm_aesenc_si128(t1, ks[3]);
t2 = _mm_aesenc_si128(t2, ks[3]);
t3 = _mm_aesenc_si128(t3, ks[3]);
t4 = _mm_aesenc_si128(t4, ks[3]);
t1 = _mm_aesenc_si128(t1, ks[4]);
t2 = _mm_aesenc_si128(t2, ks[4]);
t3 = _mm_aesenc_si128(t3, ks[4]);
t4 = _mm_aesenc_si128(t4, ks[4]);
t1 = _mm_aesenc_si128(t1, ks[5]);
t2 = _mm_aesenc_si128(t2, ks[5]);
t3 = _mm_aesenc_si128(t3, ks[5]);
t4 = _mm_aesenc_si128(t4, ks[5]);
t1 = _mm_aesenc_si128(t1, ks[6]);
t2 = _mm_aesenc_si128(t2, ks[6]);
t3 = _mm_aesenc_si128(t3, ks[6]);
t4 = _mm_aesenc_si128(t4, ks[6]);
t1 = _mm_aesenc_si128(t1, ks[7]);
t2 = _mm_aesenc_si128(t2, ks[7]);
t3 = _mm_aesenc_si128(t3, ks[7]);
t4 = _mm_aesenc_si128(t4, ks[7]);
t1 = _mm_aesenc_si128(t1, ks[8]);
t2 = _mm_aesenc_si128(t2, ks[8]);
t3 = _mm_aesenc_si128(t3, ks[8]);
t4 = _mm_aesenc_si128(t4, ks[8]);
t1 = _mm_aesenc_si128(t1, ks[9]);
t2 = _mm_aesenc_si128(t2, ks[9]);
t3 = _mm_aesenc_si128(t3, ks[9]);
t4 = _mm_aesenc_si128(t4, ks[9]);
t1 = _mm_aesenc_si128(t1, ks[10]);
t2 = _mm_aesenc_si128(t2, ks[10]);
t3 = _mm_aesenc_si128(t3, ks[10]);
t4 = _mm_aesenc_si128(t4, ks[10]);
t1 = _mm_aesenc_si128(t1, ks[11]);
t2 = _mm_aesenc_si128(t2, ks[11]);
t3 = _mm_aesenc_si128(t3, ks[11]);
t4 = _mm_aesenc_si128(t4, ks[11]);
t1 = _mm_aesenclast_si128(t1, ks[12]);
t2 = _mm_aesenclast_si128(t2, ks[12]);
t3 = _mm_aesenclast_si128(t3, ks[12]);
t4 = _mm_aesenclast_si128(t4, ks[12]);
_mm_storeu_si128(bo + i + 0, t1);
_mm_storeu_si128(bo + i + 1, t2);
_mm_storeu_si128(bo + i + 2, t3);
_mm_storeu_si128(bo + i + 3, t4);
}
for (i = pblocks; i < blocks; i++)
{
t1 = _mm_loadu_si128(bi + i);
t1 = _mm_xor_si128(t1, ks[0]);
t1 = _mm_aesenc_si128(t1, ks[1]);
t1 = _mm_aesenc_si128(t1, ks[2]);
t1 = _mm_aesenc_si128(t1, ks[3]);
t1 = _mm_aesenc_si128(t1, ks[4]);
t1 = _mm_aesenc_si128(t1, ks[5]);
t1 = _mm_aesenc_si128(t1, ks[6]);
t1 = _mm_aesenc_si128(t1, ks[7]);
t1 = _mm_aesenc_si128(t1, ks[8]);
t1 = _mm_aesenc_si128(t1, ks[9]);
t1 = _mm_aesenc_si128(t1, ks[10]);
t1 = _mm_aesenc_si128(t1, ks[11]);
t1 = _mm_aesenclast_si128(t1, ks[12]);
_mm_storeu_si128(bo + i, t1);
}
}
/**
* AES-192 ECB decryption
*/
static void decrypt_ecb192(aesni_key_t *key, u_int blocks, u_char *in,
u_char *out)
{
__m128i *ks, *bi, *bo;
__m128i t1, t2, t3, t4;
u_int i, pblocks;
ks = key->schedule;
bi = (__m128i*)in;
bo = (__m128i*)out;
pblocks = blocks - (blocks % ECB_PARALLELISM);
for (i = 0; i < pblocks; i += ECB_PARALLELISM)
{
t1 = _mm_loadu_si128(bi + i + 0);
t2 = _mm_loadu_si128(bi + i + 1);
t3 = _mm_loadu_si128(bi + i + 2);
t4 = _mm_loadu_si128(bi + i + 3);
t1 = _mm_xor_si128(t1, ks[0]);
t2 = _mm_xor_si128(t2, ks[0]);
t3 = _mm_xor_si128(t3, ks[0]);
t4 = _mm_xor_si128(t4, ks[0]);
t1 = _mm_aesdec_si128(t1, ks[1]);
t2 = _mm_aesdec_si128(t2, ks[1]);
t3 = _mm_aesdec_si128(t3, ks[1]);
t4 = _mm_aesdec_si128(t4, ks[1]);
t1 = _mm_aesdec_si128(t1, ks[2]);
t2 = _mm_aesdec_si128(t2, ks[2]);
t3 = _mm_aesdec_si128(t3, ks[2]);
t4 = _mm_aesdec_si128(t4, ks[2]);
t1 = _mm_aesdec_si128(t1, ks[3]);
t2 = _mm_aesdec_si128(t2, ks[3]);
t3 = _mm_aesdec_si128(t3, ks[3]);
t4 = _mm_aesdec_si128(t4, ks[3]);
t1 = _mm_aesdec_si128(t1, ks[4]);
t2 = _mm_aesdec_si128(t2, ks[4]);
t3 = _mm_aesdec_si128(t3, ks[4]);
t4 = _mm_aesdec_si128(t4, ks[4]);
t1 = _mm_aesdec_si128(t1, ks[5]);
t2 = _mm_aesdec_si128(t2, ks[5]);
t3 = _mm_aesdec_si128(t3, ks[5]);
t4 = _mm_aesdec_si128(t4, ks[5]);
t1 = _mm_aesdec_si128(t1, ks[6]);
t2 = _mm_aesdec_si128(t2, ks[6]);
t3 = _mm_aesdec_si128(t3, ks[6]);
t4 = _mm_aesdec_si128(t4, ks[6]);
t1 = _mm_aesdec_si128(t1, ks[7]);
t2 = _mm_aesdec_si128(t2, ks[7]);
t3 = _mm_aesdec_si128(t3, ks[7]);
t4 = _mm_aesdec_si128(t4, ks[7]);
t1 = _mm_aesdec_si128(t1, ks[8]);
t2 = _mm_aesdec_si128(t2, ks[8]);
t3 = _mm_aesdec_si128(t3, ks[8]);
t4 = _mm_aesdec_si128(t4, ks[8]);
t1 = _mm_aesdec_si128(t1, ks[9]);
t2 = _mm_aesdec_si128(t2, ks[9]);
t3 = _mm_aesdec_si128(t3, ks[9]);
t4 = _mm_aesdec_si128(t4, ks[9]);
t1 = _mm_aesdec_si128(t1, ks[10]);
t2 = _mm_aesdec_si128(t2, ks[10]);
t3 = _mm_aesdec_si128(t3, ks[10]);
t4 = _mm_aesdec_si128(t4, ks[10]);
t1 = _mm_aesdec_si128(t1, ks[11]);
t2 = _mm_aesdec_si128(t2, ks[11]);
t3 = _mm_aesdec_si128(t3, ks[11]);
t4 = _mm_aesdec_si128(t4, ks[11]);
t1 = _mm_aesdeclast_si128(t1, ks[12]);
t2 = _mm_aesdeclast_si128(t2, ks[12]);
t3 = _mm_aesdeclast_si128(t3, ks[12]);
t4 = _mm_aesdeclast_si128(t4, ks[12]);
_mm_storeu_si128(bo + i + 0, t1);
_mm_storeu_si128(bo + i + 1, t2);
_mm_storeu_si128(bo + i + 2, t3);
_mm_storeu_si128(bo + i + 3, t4);
}
for (i = pblocks; i < blocks; i++)
{
t1 = _mm_loadu_si128(bi + i);
t1 = _mm_xor_si128(t1, ks[0]);
t1 = _mm_aesdec_si128(t1, ks[1]);
t1 = _mm_aesdec_si128(t1, ks[2]);
t1 = _mm_aesdec_si128(t1, ks[3]);
t1 = _mm_aesdec_si128(t1, ks[4]);
t1 = _mm_aesdec_si128(t1, ks[5]);
t1 = _mm_aesdec_si128(t1, ks[6]);
t1 = _mm_aesdec_si128(t1, ks[7]);
t1 = _mm_aesdec_si128(t1, ks[8]);
t1 = _mm_aesdec_si128(t1, ks[9]);
t1 = _mm_aesdec_si128(t1, ks[10]);
t1 = _mm_aesdec_si128(t1, ks[11]);
t1 = _mm_aesdeclast_si128(t1, ks[12]);
_mm_storeu_si128(bo + i, t1);
}
}
/**
* AES-256 ECB encryption
*/
static void encrypt_ecb256(aesni_key_t *key, u_int blocks, u_char *in,
u_char *out)
{
__m128i *ks, *bi, *bo;
__m128i t1, t2, t3, t4;
u_int i, pblocks;
ks = key->schedule;
bi = (__m128i*)in;
bo = (__m128i*)out;
pblocks = blocks - (blocks % ECB_PARALLELISM);
for (i = 0; i < pblocks; i += ECB_PARALLELISM)
{
t1 = _mm_loadu_si128(bi + i + 0);
t2 = _mm_loadu_si128(bi + i + 1);
t3 = _mm_loadu_si128(bi + i + 2);
t4 = _mm_loadu_si128(bi + i + 3);
t1 = _mm_xor_si128(t1, ks[0]);
t2 = _mm_xor_si128(t2, ks[0]);
t3 = _mm_xor_si128(t3, ks[0]);
t4 = _mm_xor_si128(t4, ks[0]);
t1 = _mm_aesenc_si128(t1, ks[1]);
t2 = _mm_aesenc_si128(t2, ks[1]);
t3 = _mm_aesenc_si128(t3, ks[1]);
t4 = _mm_aesenc_si128(t4, ks[1]);
t1 = _mm_aesenc_si128(t1, ks[2]);
t2 = _mm_aesenc_si128(t2, ks[2]);
t3 = _mm_aesenc_si128(t3, ks[2]);
t4 = _mm_aesenc_si128(t4, ks[2]);
t1 = _mm_aesenc_si128(t1, ks[3]);
t2 = _mm_aesenc_si128(t2, ks[3]);
t3 = _mm_aesenc_si128(t3, ks[3]);
t4 = _mm_aesenc_si128(t4, ks[3]);
t1 = _mm_aesenc_si128(t1, ks[4]);
t2 = _mm_aesenc_si128(t2, ks[4]);
t3 = _mm_aesenc_si128(t3, ks[4]);
t4 = _mm_aesenc_si128(t4, ks[4]);
t1 = _mm_aesenc_si128(t1, ks[5]);
t2 = _mm_aesenc_si128(t2, ks[5]);
t3 = _mm_aesenc_si128(t3, ks[5]);
t4 = _mm_aesenc_si128(t4, ks[5]);
t1 = _mm_aesenc_si128(t1, ks[6]);
t2 = _mm_aesenc_si128(t2, ks[6]);
t3 = _mm_aesenc_si128(t3, ks[6]);
t4 = _mm_aesenc_si128(t4, ks[6]);
t1 = _mm_aesenc_si128(t1, ks[7]);
t2 = _mm_aesenc_si128(t2, ks[7]);
t3 = _mm_aesenc_si128(t3, ks[7]);
t4 = _mm_aesenc_si128(t4, ks[7]);
t1 = _mm_aesenc_si128(t1, ks[8]);
t2 = _mm_aesenc_si128(t2, ks[8]);
t3 = _mm_aesenc_si128(t3, ks[8]);
t4 = _mm_aesenc_si128(t4, ks[8]);
t1 = _mm_aesenc_si128(t1, ks[9]);
t2 = _mm_aesenc_si128(t2, ks[9]);
t3 = _mm_aesenc_si128(t3, ks[9]);
t4 = _mm_aesenc_si128(t4, ks[9]);
t1 = _mm_aesenc_si128(t1, ks[10]);
t2 = _mm_aesenc_si128(t2, ks[10]);
t3 = _mm_aesenc_si128(t3, ks[10]);
t4 = _mm_aesenc_si128(t4, ks[10]);
t1 = _mm_aesenc_si128(t1, ks[11]);
t2 = _mm_aesenc_si128(t2, ks[11]);
t3 = _mm_aesenc_si128(t3, ks[11]);
t4 = _mm_aesenc_si128(t4, ks[11]);
t1 = _mm_aesenc_si128(t1, ks[12]);
t2 = _mm_aesenc_si128(t2, ks[12]);
t3 = _mm_aesenc_si128(t3, ks[12]);
t4 = _mm_aesenc_si128(t4, ks[12]);
t1 = _mm_aesenc_si128(t1, ks[13]);
t2 = _mm_aesenc_si128(t2, ks[13]);
t3 = _mm_aesenc_si128(t3, ks[13]);
t4 = _mm_aesenc_si128(t4, ks[13]);
t1 = _mm_aesenclast_si128(t1, ks[14]);
t2 = _mm_aesenclast_si128(t2, ks[14]);
t3 = _mm_aesenclast_si128(t3, ks[14]);
t4 = _mm_aesenclast_si128(t4, ks[14]);
_mm_storeu_si128(bo + i + 0, t1);
_mm_storeu_si128(bo + i + 1, t2);
_mm_storeu_si128(bo + i + 2, t3);
_mm_storeu_si128(bo + i + 3, t4);
}
for (i = pblocks; i < blocks; i++)
{
t1 = _mm_loadu_si128(bi + i);
t1 = _mm_xor_si128(t1, ks[0]);
t1 = _mm_aesenc_si128(t1, ks[1]);
t1 = _mm_aesenc_si128(t1, ks[2]);
t1 = _mm_aesenc_si128(t1, ks[3]);
t1 = _mm_aesenc_si128(t1, ks[4]);
t1 = _mm_aesenc_si128(t1, ks[5]);
t1 = _mm_aesenc_si128(t1, ks[6]);
t1 = _mm_aesenc_si128(t1, ks[7]);
t1 = _mm_aesenc_si128(t1, ks[8]);
t1 = _mm_aesenc_si128(t1, ks[9]);
t1 = _mm_aesenc_si128(t1, ks[10]);
t1 = _mm_aesenc_si128(t1, ks[11]);
t1 = _mm_aesenc_si128(t1, ks[12]);
t1 = _mm_aesenc_si128(t1, ks[13]);
t1 = _mm_aesenclast_si128(t1, ks[14]);
_mm_storeu_si128(bo + i, t1);
}
}
/**
* AES-256 ECB decryption
*/
static void decrypt_ecb256(aesni_key_t *key, u_int blocks, u_char *in,
u_char *out)
{
__m128i *ks, *bi, *bo;
__m128i t1, t2, t3, t4;
u_int i, pblocks;
ks = key->schedule;
bi = (__m128i*)in;
bo = (__m128i*)out;
pblocks = blocks - (blocks % ECB_PARALLELISM);
for (i = 0; i < pblocks; i += ECB_PARALLELISM)
{
t1 = _mm_loadu_si128(bi + i + 0);
t2 = _mm_loadu_si128(bi + i + 1);
t3 = _mm_loadu_si128(bi + i + 2);
t4 = _mm_loadu_si128(bi + i + 3);
t1 = _mm_xor_si128(t1, ks[0]);
t2 = _mm_xor_si128(t2, ks[0]);
t3 = _mm_xor_si128(t3, ks[0]);
t4 = _mm_xor_si128(t4, ks[0]);
t1 = _mm_aesdec_si128(t1, ks[1]);
t2 = _mm_aesdec_si128(t2, ks[1]);
t3 = _mm_aesdec_si128(t3, ks[1]);
t4 = _mm_aesdec_si128(t4, ks[1]);
t1 = _mm_aesdec_si128(t1, ks[2]);
t2 = _mm_aesdec_si128(t2, ks[2]);
t3 = _mm_aesdec_si128(t3, ks[2]);
t4 = _mm_aesdec_si128(t4, ks[2]);
t1 = _mm_aesdec_si128(t1, ks[3]);
t2 = _mm_aesdec_si128(t2, ks[3]);
t3 = _mm_aesdec_si128(t3, ks[3]);
t4 = _mm_aesdec_si128(t4, ks[3]);
t1 = _mm_aesdec_si128(t1, ks[4]);
t2 = _mm_aesdec_si128(t2, ks[4]);
t3 = _mm_aesdec_si128(t3, ks[4]);
t4 = _mm_aesdec_si128(t4, ks[4]);
t1 = _mm_aesdec_si128(t1, ks[5]);
t2 = _mm_aesdec_si128(t2, ks[5]);
t3 = _mm_aesdec_si128(t3, ks[5]);
t4 = _mm_aesdec_si128(t4, ks[5]);
t1 = _mm_aesdec_si128(t1, ks[6]);
t2 = _mm_aesdec_si128(t2, ks[6]);
t3 = _mm_aesdec_si128(t3, ks[6]);
t4 = _mm_aesdec_si128(t4, ks[6]);
t1 = _mm_aesdec_si128(t1, ks[7]);
t2 = _mm_aesdec_si128(t2, ks[7]);
t3 = _mm_aesdec_si128(t3, ks[7]);
t4 = _mm_aesdec_si128(t4, ks[7]);
t1 = _mm_aesdec_si128(t1, ks[8]);
t2 = _mm_aesdec_si128(t2, ks[8]);
t3 = _mm_aesdec_si128(t3, ks[8]);
t4 = _mm_aesdec_si128(t4, ks[8]);
t1 = _mm_aesdec_si128(t1, ks[9]);
t2 = _mm_aesdec_si128(t2, ks[9]);
t3 = _mm_aesdec_si128(t3, ks[9]);
t4 = _mm_aesdec_si128(t4, ks[9]);
t1 = _mm_aesdec_si128(t1, ks[10]);
t2 = _mm_aesdec_si128(t2, ks[10]);
t3 = _mm_aesdec_si128(t3, ks[10]);
t4 = _mm_aesdec_si128(t4, ks[10]);
t1 = _mm_aesdec_si128(t1, ks[11]);
t2 = _mm_aesdec_si128(t2, ks[11]);
t3 = _mm_aesdec_si128(t3, ks[11]);
t4 = _mm_aesdec_si128(t4, ks[11]);
t1 = _mm_aesdec_si128(t1, ks[12]);
t2 = _mm_aesdec_si128(t2, ks[12]);
t3 = _mm_aesdec_si128(t3, ks[12]);
t4 = _mm_aesdec_si128(t4, ks[12]);
t1 = _mm_aesdec_si128(t1, ks[13]);
t2 = _mm_aesdec_si128(t2, ks[13]);
t3 = _mm_aesdec_si128(t3, ks[13]);
t4 = _mm_aesdec_si128(t4, ks[13]);
t1 = _mm_aesdeclast_si128(t1, ks[14]);
t2 = _mm_aesdeclast_si128(t2, ks[14]);
t3 = _mm_aesdeclast_si128(t3, ks[14]);
t4 = _mm_aesdeclast_si128(t4, ks[14]);
_mm_storeu_si128(bo + i + 0, t1);
_mm_storeu_si128(bo + i + 1, t2);
_mm_storeu_si128(bo + i + 2, t3);
_mm_storeu_si128(bo + i + 3, t4);
}
for (i = pblocks; i < blocks; i++)
{
t1 = _mm_loadu_si128(bi + i);
t1 = _mm_xor_si128(t1, ks[0]);
t1 = _mm_aesdec_si128(t1, ks[1]);
t1 = _mm_aesdec_si128(t1, ks[2]);
t1 = _mm_aesdec_si128(t1, ks[3]);
t1 = _mm_aesdec_si128(t1, ks[4]);
t1 = _mm_aesdec_si128(t1, ks[5]);
t1 = _mm_aesdec_si128(t1, ks[6]);
t1 = _mm_aesdec_si128(t1, ks[7]);
t1 = _mm_aesdec_si128(t1, ks[8]);
t1 = _mm_aesdec_si128(t1, ks[9]);
t1 = _mm_aesdec_si128(t1, ks[10]);
t1 = _mm_aesdec_si128(t1, ks[11]);
t1 = _mm_aesdec_si128(t1, ks[12]);
t1 = _mm_aesdec_si128(t1, ks[13]);
t1 = _mm_aesdeclast_si128(t1, ks[14]);
_mm_storeu_si128(bo + i, t1);
}
}
/**
* Do inline or allocated de/encryption using key schedule
*/
static bool crypt(aesni_ecb_fn_t fn, aesni_key_t *key, chunk_t data,
chunk_t *out)
{
u_char *buf;
if (!key || data.len % AES_BLOCK_SIZE)
{
return FALSE;
}
if (out)
{
*out = chunk_alloc(data.len);
buf = out->ptr;
}
else
{
buf = data.ptr;
}
fn(key, data.len / AES_BLOCK_SIZE, data.ptr, buf);
return TRUE;
}
METHOD(crypter_t, encrypt, bool,
private_aesni_ecb_t *this, chunk_t data, chunk_t iv, chunk_t *encrypted)
{
return crypt(this->encrypt, this->ekey, data, encrypted);
}
METHOD(crypter_t, decrypt, bool,
private_aesni_ecb_t *this, chunk_t data, chunk_t iv, chunk_t *decrypted)
{
return crypt(this->decrypt, this->dkey, data, decrypted);
}
METHOD(crypter_t, get_block_size, size_t,
private_aesni_ecb_t *this)
{
return AES_BLOCK_SIZE;
}
METHOD(crypter_t, get_iv_size, size_t,
private_aesni_ecb_t *this)
{
return 0;
}
METHOD(crypter_t, get_key_size, size_t,
private_aesni_ecb_t *this)
{
return this->key_size;
}
METHOD(crypter_t, set_key, bool,
private_aesni_ecb_t *this, chunk_t key)
{
if (key.len != this->key_size)
{
return FALSE;
}
DESTROY_IF(this->ekey);
DESTROY_IF(this->dkey);
this->ekey = aesni_key_create(TRUE, key);
this->dkey = aesni_key_create(FALSE, key);
return this->ekey && this->dkey;
}
METHOD(crypter_t, destroy, void,
private_aesni_ecb_t *this)
{
DESTROY_IF(this->ekey);
DESTROY_IF(this->dkey);
free_align(this);
}
/**
* See header
*/
aesni_ecb_t *aesni_ecb_create(encryption_algorithm_t algo, size_t key_size)
{
private_aesni_ecb_t *this;
if (algo != ENCR_AES_ECB)
{
return NULL;
}
switch (key_size)
{
case 0:
key_size = 16;
break;
case 16:
case 24:
case 32:
break;
default:
return NULL;
}
INIT_ALIGN(this, sizeof(__m128i),
.public = {
.crypter = {
.encrypt = _encrypt,
.decrypt = _decrypt,
.get_block_size = _get_block_size,
.get_iv_size = _get_iv_size,
.get_key_size = _get_key_size,
.set_key = _set_key,
.destroy = _destroy,
},
},
.key_size = key_size,
);
switch (key_size)
{
case 16:
this->encrypt = encrypt_ecb128;
this->decrypt = decrypt_ecb128;
break;
case 24:
this->encrypt = encrypt_ecb192;
this->decrypt = decrypt_ecb192;
break;
case 32:
this->encrypt = encrypt_ecb256;
this->decrypt = decrypt_ecb256;
break;
}
return &this->public;
}

View File

@ -0,0 +1,48 @@
/*
* Copyright (C) 2019 Andreas Steffen
* HSR Hochschule fuer Technik Rapperswil
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
* option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>.
*
* This program is distributed in the hope that it will be useful, but
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
/**
* @defgroup aesni_ecb aesni_ecb
* @{ @ingroup aesni
*/
#ifndef AESNI_ECB_H_
#define AESNI_ECB_H_
#include <library.h>
typedef struct aesni_ecb_t aesni_ecb_t;
/**
* ECB mode crypter using AES-NI
*/
struct aesni_ecb_t {
/**
* Implements crypter interface
*/
crypter_t crypter;
};
/**
* Create a aesni_ecb instance.
*
* @param algo encryption algorithm, AES_ENCR_ECB
* @param key_size AES key size, in bytes
* @return AES-ECB crypter, NULL if not supported
*/
aesni_ecb_t *aesni_ecb_create(encryption_algorithm_t algo, size_t key_size);
#endif /** AESNI_ECB_H_ @}*/

View File

@ -15,6 +15,7 @@
#include "aesni_plugin.h"
#include "aesni_cbc.h"
#include "aesni_ecb.h"
#include "aesni_ctr.h"
#include "aesni_ccm.h"
#include "aesni_gcm.h"
@ -55,6 +56,10 @@ METHOD(plugin_t, get_features, int,
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 24),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 32),
PLUGIN_REGISTER(CRYPTER, aesni_ecb_create),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 24),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 32),
PLUGIN_REGISTER(CRYPTER, aesni_ctr_create),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CTR, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CTR, 24),

View File

@ -2,6 +2,9 @@
* Copyright (C) 2010 Martin Willi
* Copyright (C) 2010 revosec AG
*
* Copyright (C) 2019 Andreas Steffen
* HSR Hochschule fuer Technik Rapperswil
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the License, or (at your
@ -68,6 +71,9 @@ static struct {
{ENCR_AES_CBC, "cbc(aes)", 16, 16, 16, 16, },
{ENCR_AES_CBC, "cbc(aes)", 16, 24, 24, 16, },
{ENCR_AES_CBC, "cbc(aes)", 16, 32, 32, 16, },
{ENCR_AES_ECB, "ecb(aes)", 16, 16, 16, 0, },
{ENCR_AES_ECB, "ecb(aes)", 16, 24, 24, 0, },
{ENCR_AES_ECB, "ecb(aes)", 16, 32, 32, 0, },
{ENCR_AES_CTR, "rfc3686(ctr(aes))", 1, 16, 20, 8, },
{ENCR_AES_CTR, "rfc3686(ctr(aes))", 1, 24, 28, 8, },
{ENCR_AES_CTR, "rfc3686(ctr(aes))", 1, 32, 36, 8, },

View File

@ -27,7 +27,7 @@ typedef struct af_alg_crypter_t af_alg_crypter_t;
#include <crypto/crypters/crypter.h>
/** Number of crypters */
#define AF_ALG_CRYPTER 25
#define AF_ALG_CRYPTER 28
/**
* Implementation of signers using AF_ALG.

View File

@ -90,14 +90,10 @@ METHOD(drbg_t, get_strength, uint32_t,
static bool encrypt_ctr(private_drbg_ctr_t *this, chunk_t out)
{
chunk_t iv = chunk_alloca(this->value.len);
chunk_t bl = chunk_alloca(this->value.len);
chunk_t block;
size_t delta, pos = 0;
/* Initialize IV to all zeroes for ECB mode */
memset(iv.ptr, 0x00, iv.len);
if (!this->crypter->set_key(this->crypter, this->key))
{
return FALSE;
@ -115,7 +111,7 @@ static bool encrypt_ctr(private_drbg_ctr_t *this, chunk_t out)
memcpy(block.ptr, this->value.ptr, this->value.len);
/* ECB encryption */
if (!this->crypter->encrypt(this->crypter, block, iv, NULL))
if (!this->crypter->encrypt(this->crypter, block, chunk_empty, NULL))
{
return FALSE;
}
@ -261,15 +257,15 @@ drbg_ctr_t *drbg_ctr_create(drbg_type_t type, uint32_t strength,
switch (type)
{
case DRBG_CTR_AES128:
crypter_type = ENCR_AES_CBC;
crypter_type = ENCR_AES_ECB;
key_len = 16;
break;
case DRBG_CTR_AES192:
crypter_type = ENCR_AES_CBC;
crypter_type = ENCR_AES_ECB;
key_len = 24;
break;
case DRBG_CTR_AES256:
crypter_type = ENCR_AES_CBC;
crypter_type = ENCR_AES_ECB;
key_len = 32;
break;
default:

View File

@ -45,11 +45,11 @@ METHOD(plugin_t, get_features, int,
/* NIST CTR DRBG */
PLUGIN_REGISTER(DRBG, drbg_ctr_create),
PLUGIN_PROVIDE(DRBG, DRBG_CTR_AES128),
PLUGIN_DEPENDS(CRYPTER, ENCR_AES_CBC, 16),
PLUGIN_DEPENDS(CRYPTER, ENCR_AES_ECB, 16),
PLUGIN_PROVIDE(DRBG, DRBG_CTR_AES192),
PLUGIN_DEPENDS(CRYPTER, ENCR_AES_CBC, 24),
PLUGIN_DEPENDS(CRYPTER, ENCR_AES_ECB, 24),
PLUGIN_PROVIDE(DRBG, DRBG_CTR_AES256),
PLUGIN_DEPENDS(CRYPTER, ENCR_AES_CBC, 32),
PLUGIN_DEPENDS(CRYPTER, ENCR_AES_ECB, 32),
/* NIST HMAC DRBG */
PLUGIN_REGISTER(DRBG, drbg_hmac_create),
PLUGIN_PROVIDE(DRBG, DRBG_HMAC_SHA1),

View File

@ -198,7 +198,9 @@ gcrypt_crypter_t *gcrypt_crypter_create(encryption_algorithm_t algo,
gcrypt_alg = GCRY_CIPHER_BLOWFISH;
break;
case ENCR_AES_CTR:
mode = GCRY_CIPHER_MODE_CTR;
case ENCR_AES_ECB:
mode = (algo == ENCR_AES_CTR) ? GCRY_CIPHER_MODE_CTR :
GCRY_CIPHER_MODE_ECB;
/* fall */
case ENCR_AES_CBC:
switch (key_size)

View File

@ -70,6 +70,9 @@ METHOD(plugin_t, get_features, int,
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 24),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 32),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 24),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 32),
/* gcrypt only supports 128 bit blowfish */
PLUGIN_PROVIDE(CRYPTER, ENCR_BLOWFISH, 16),
#ifdef HAVE_GCRY_CIPHER_CAMELLIA

View File

@ -211,6 +211,26 @@ openssl_crypter_t *openssl_crypter_create(encryption_algorithm_t algo,
return NULL;
}
break;
case ENCR_AES_ECB:
switch (key_size)
{
case 0:
key_size = 16;
/* FALL */
case 16: /* AES 128 */
this->cipher = EVP_get_cipherbyname("aes-128-ecb");
break;
case 24: /* AES-192 */
this->cipher = EVP_get_cipherbyname("aes-192-ecb");
break;
case 32: /* AES-256 */
this->cipher = EVP_get_cipherbyname("aes-256-ecb");
break;
default:
free(this);
return NULL;
}
break;
case ENCR_CAMELLIA_CBC:
switch (key_size)
{

View File

@ -497,6 +497,9 @@ METHOD(plugin_t, get_features, int,
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 24),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_CBC, 32),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 16),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 24),
PLUGIN_PROVIDE(CRYPTER, ENCR_AES_ECB, 32),
#endif
#ifndef OPENSSL_NO_CAMELLIA
PLUGIN_PROVIDE(CRYPTER, ENCR_CAMELLIA_CBC, 16),

View File

@ -14,6 +14,7 @@ libstrongswan_test_vectors_la_SOURCES = \
test_vectors_plugin.h test_vectors_plugin.c test_vectors.h \
test_vectors/3des_cbc.c \
test_vectors/aes_cbc.c \
test_vectors/aes_ecb.c \
test_vectors/aes_ctr.c \
test_vectors/aes_xcbc.c \
test_vectors/aes_cmac.c \

View File

@ -20,6 +20,9 @@ TEST_VECTOR_CRYPTER(aes_cbc3)
TEST_VECTOR_CRYPTER(aes_cbc4)
TEST_VECTOR_CRYPTER(aes_cbc5)
TEST_VECTOR_CRYPTER(aes_cbc6)
TEST_VECTOR_CRYPTER(aes_ecb1)
TEST_VECTOR_CRYPTER(aes_ecb2)
TEST_VECTOR_CRYPTER(aes_ecb3)
TEST_VECTOR_CRYPTER(aes_ctr1)
TEST_VECTOR_CRYPTER(aes_ctr2)
TEST_VECTOR_CRYPTER(aes_ctr3)

View File

@ -0,0 +1,70 @@
/*
* Copyright (C) 2019 Andreas Steffen
* HSR Hochschule fuer Technik Rapperswil
*
* This program is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License as published by the
* Free Software Foundation; either version 2 of the Licenseor (at your
* option) any later version. See <http://www.fsf.org/copyleft/gpl.txt>.
*
* This program is distributed in the hope that it will be usefulbut
* WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
* or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* for more details.
*/
#include <crypto/crypto_tester.h>
/**
* Test F.1.1 of NIST SP 800-38A 2001
*/
crypter_test_vector_t aes_ecb1 = {
.alg = ENCR_AES_ECB, .key_size = 16, .len = 64,
.key = "\x2b\x7e\x15\x16\x28\xae\xd2\xa6\xab\xf7\x15\x88\x09\xcf\x4f\x3c",
.iv = "",
.plain = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
.cipher = "\x3a\xd7\x7b\xb4\x0d\x7a\x36\x60\xa8\x9e\xca\xf3\x24\x66\xef\x97"
"\xf5\xd3\xd5\x85\x03\xb9\x69\x9d\xe7\x85\x89\x5a\x96\xfd\xba\xaf"
"\x43\xb1\xcd\x7f\x59\x8e\xce\x23\x88\x1b\x00\xe3\xed\x03\x06\x88"
"\x7b\x0c\x78\x5e\x27\xe8\xad\x3f\x82\x23\x20\x71\x04\x72\x5d\xd4"
};
/**
* Test F.1.3 of NIST SP 800-38A 2001
*/
crypter_test_vector_t aes_ecb2 = {
.alg = ENCR_AES_ECB, .key_size = 24, .len = 64,
.key = "\x8e\x73\xb0\xf7\xda\x0e\x64\x52\xc8\x10\xf3\x2b\x80\x90\x79\xe5"
"\x62\xf8\xea\xd2\x52\x2c\x6b\x7b",
.iv = "",
.plain = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
.cipher = "\xbd\x33\x4f\x1d\x6e\x45\xf2\x5f\xf7\x12\xa2\x14\x57\x1f\xa5\xcc"
"\x97\x41\x04\x84\x6d\x0a\xd3\xad\x77\x34\xec\xb3\xec\xee\x4e\xef"
"\xef\x7a\xfd\x22\x70\xe2\xe6\x0a\xdc\xe0\xba\x2f\xac\xe6\x44\x4e"
"\x9a\x4b\x41\xba\x73\x8d\x6c\x72\xfb\x16\x69\x16\x03\xc1\x8e\x0e"
};
/**
* Test F.1.5 of NIST SP 800-38A 2001
*/
crypter_test_vector_t aes_ecb3 = {
.alg = ENCR_AES_ECB, .key_size = 32, .len = 64,
.key = "\x60\x3d\xeb\x10\x15\xca\x71\xbe\x2b\x73\xae\xf0\x85\x7d\x77\x81"
"\x1f\x35\x2c\x07\x3b\x61\x08\xd7\x2d\x98\x10\xa3\x09\x14\xdf\xf4",
.iv = "",
.plain = "\x6b\xc1\xbe\xe2\x2e\x40\x9f\x96\xe9\x3d\x7e\x11\x73\x93\x17\x2a"
"\xae\x2d\x8a\x57\x1e\x03\xac\x9c\x9e\xb7\x6f\xac\x45\xaf\x8e\x51"
"\x30\xc8\x1c\x46\xa3\x5c\xe4\x11\xe5\xfb\xc1\x19\x1a\x0a\x52\xef"
"\xf6\x9f\x24\x45\xdf\x4f\x9b\x17\xad\x2b\x41\x7b\xe6\x6c\x37\x10",
.cipher = "\xf3\xee\xd1\xbd\xb5\xd2\xa0\x3c\x06\x4b\x5a\x7e\x3d\xb1\x81\xf8"
"\x59\x1c\xcb\x10\xd4\x10\xed\x26\xdc\x5b\xa7\x4a\x31\x36\x28\x70"
"\xb6\xed\x21\xb9\x9c\xa6\xf4\xf9\xf1\x53\xe7\xb1\xbe\xaf\xed\x1d"
"\x23\x30\x4b\x7a\x39\xf9\xf3\xff\x06\x7d\x8d\x8f\x9e\x24\xec\xc7"
};