aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/include/aesni
diff options
context:
space:
mode:
authorEgor Tensin <Egor.Tensin@gmail.com>2015-06-03 22:46:18 +0300
committerEgor Tensin <Egor.Tensin@gmail.com>2015-06-03 22:46:18 +0300
commit91923133903515c98973543c0e17705537449c22 (patch)
tree3848b63fe8e6c2d2e5ff7f564bacadfd5652a783 /include/aesni
parentadd OFB mode (diff)
downloadaes-tools-91923133903515c98973543c0e17705537449c22.tar.gz
aes-tools-91923133903515c98973543c0e17705537449c22.zip
init vectors are calculated by API functions
Diffstat (limited to 'include/aesni')
-rw-r--r--include/aesni/api.h126
1 files changed, 90 insertions, 36 deletions
diff --git a/include/aesni/api.h b/include/aesni/api.h
index 6d275a5..fff3672 100644
--- a/include/aesni/api.h
+++ b/include/aesni/api.h
@@ -42,49 +42,67 @@ static __inline AesBlock128 __fastcall aes128ecb_decrypt(
static __inline AesBlock128 __fastcall aes128cbc_encrypt(
AesBlock128 plain,
Aes128KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return raw_aes128ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ AesBlock128 cypher = raw_aes128ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return cypher;
}
static __inline AesBlock128 __fastcall aes128cbc_decrypt(
AesBlock128 cypher,
Aes128KeySchedule* inverted_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes128ecb_decrypt(cypher, inverted_schedule), init_vector);
+ AesBlock128 plain = _mm_xor_si128(raw_aes128ecb_decrypt(cypher, inverted_schedule), init_vector);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return plain;
}
static __inline AesBlock128 __fastcall aes128cfb_encrypt(
AesBlock128 plain,
Aes128KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 cypher = _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), plain);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return cypher;
}
static __inline AesBlock128 __fastcall aes128cfb_decrypt(
AesBlock128 cypher,
Aes128KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), cypher);
+ AesBlock128 plain = _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), cypher);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return plain;
}
static __inline AesBlock128 __fastcall aes128ofb_encrypt(
AesBlock128 plain,
Aes128KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 tmp = raw_aes128ecb_encrypt(init_vector, key_schedule);
+ _mm_storeu_si128(next_init_vector, tmp);
+ return _mm_xor_si128(tmp, plain);
}
static __inline AesBlock128 __fastcall aes128ofb_decrypt(
AesBlock128 cypher,
Aes128KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), cypher);
+ AesBlock128 tmp = raw_aes128ecb_encrypt(init_vector, key_schedule);
+ _mm_storeu_si128(next_init_vector, tmp);
+ return _mm_xor_si128(tmp, cypher);
}
static __inline void __fastcall aes192_expand_key_schedule(
@@ -118,49 +136,67 @@ static __inline AesBlock128 __fastcall aes192ecb_decrypt(
static __inline AesBlock128 __fastcall aes192cbc_encrypt(
AesBlock128 plain,
Aes192KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return raw_aes192ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ AesBlock128 cypher = raw_aes192ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return cypher;
}
static __inline AesBlock128 __fastcall aes192cbc_decrypt(
AesBlock128 cypher,
Aes192KeySchedule* inverted_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes192ecb_decrypt(cypher, inverted_schedule), init_vector);
+ AesBlock128 plain = _mm_xor_si128(raw_aes192ecb_decrypt(cypher, inverted_schedule), init_vector);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return plain;
}
static __inline AesBlock128 __fastcall aes192cfb_encrypt(
AesBlock128 plain,
Aes192KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 cypher = _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), plain);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return cypher;
}
static __inline AesBlock128 __fastcall aes192cfb_decrypt(
AesBlock128 cypher,
Aes192KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), cypher);
+ AesBlock128 plain = _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), cypher);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return plain;
}
static __inline AesBlock128 __fastcall aes192ofb_encrypt(
AesBlock128 plain,
Aes192KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 tmp = raw_aes192ecb_encrypt(init_vector, key_schedule);
+ _mm_storeu_si128(next_init_vector, tmp);
+ return _mm_xor_si128(tmp, plain);
}
static __inline AesBlock128 __fastcall aes192ofb_decrypt(
AesBlock128 cypher,
Aes192KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), cypher);
+ AesBlock128 tmp = raw_aes192ecb_encrypt(init_vector, key_schedule);
+ _mm_storeu_si128(next_init_vector, tmp);
+ return _mm_xor_si128(tmp, cypher);
}
static __inline void __fastcall aes256_expand_key_schedule(
@@ -194,47 +230,65 @@ static __inline AesBlock128 __fastcall aes256ecb_decrypt(
static __inline AesBlock128 __fastcall aes256cbc_encrypt(
AesBlock128 plain,
Aes256KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return raw_aes256ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ AesBlock128 cypher = raw_aes256ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return cypher;
}
static __inline AesBlock128 __fastcall aes256cbc_decrypt(
AesBlock128 cypher,
Aes256KeySchedule* inverted_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes256ecb_decrypt(cypher, inverted_schedule), init_vector);
+ AesBlock128 plain = _mm_xor_si128(raw_aes256ecb_decrypt(cypher, inverted_schedule), init_vector);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return plain;
}
static __inline AesBlock128 __fastcall aes256cfb_encrypt(
AesBlock128 plain,
Aes256KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 cypher = _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), plain);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return cypher;
}
static __inline AesBlock128 __fastcall aes256cfb_decrypt(
AesBlock128 cypher,
Aes256KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), cypher);
+ AesBlock128 plain = _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), cypher);
+ _mm_storeu_si128(next_init_vector, cypher);
+ return plain;
}
static __inline AesBlock128 __fastcall aes256ofb_encrypt(
AesBlock128 plain,
Aes256KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 tmp = raw_aes256ecb_encrypt(init_vector, key_schedule);
+ _mm_storeu_si128(next_init_vector, tmp);
+ return _mm_xor_si128(tmp, plain);
}
static __inline AesBlock128 __fastcall aes256ofb_decrypt(
AesBlock128 cypher,
Aes256KeySchedule* key_schedule,
- AesBlock128 init_vector)
+ AesBlock128 init_vector,
+ AesBlock128* next_init_vector)
{
- return _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), cypher);
+ AesBlock128 tmp = raw_aes256ecb_encrypt(init_vector, key_schedule);
+ _mm_storeu_si128(next_init_vector, tmp);
+ return _mm_xor_si128(tmp, cypher);
}