aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/include/aesni
diff options
context:
space:
mode:
authorEgor Tensin <Egor.Tensin@gmail.com>2015-06-10 03:56:20 +0300
committerEgor Tensin <Egor.Tensin@gmail.com>2015-06-10 03:56:20 +0300
commit7468cbc7c4e69bbd2593c71c8a22953f41536963 (patch)
treebbc0b0ebb62ceb7385a418701897cb5577f035f9 /include/aesni
parentutils: usage message update (diff)
downloadaes-tools-7468cbc7c4e69bbd2593c71c8a22953f41536963.tar.gz
aes-tools-7468cbc7c4e69bbd2593c71c8a22953f41536963.zip
refactoring
Diffstat (limited to 'include/aesni')
-rw-r--r--include/aesni/api.h120
-rw-r--r--include/aesni/file.h4
-rw-r--r--include/aesni/raw.h12
3 files changed, 68 insertions, 68 deletions
diff --git a/include/aesni/api.h b/include/aesni/api.h
index e021fed..ea11817 100644
--- a/include/aesni/api.h
+++ b/include/aesni/api.h
@@ -25,87 +25,87 @@ static __inline void __fastcall aes128_invert_key_schedule(
raw_aes128_invert_key_schedule(key_schedule, inverted_schedule);
}
-static __inline AesBlock128 __fastcall aes128ecb_encrypt(
+static __inline AesBlock128 __fastcall aes128ecb_encrypt_block(
AesBlock128 plain,
Aes128KeySchedule* key_schedule)
{
- return raw_aes128_encrypt(plain, key_schedule);
+ return raw_aes128_encrypt_block(plain, key_schedule);
}
-static __inline AesBlock128 __fastcall aes128ecb_decrypt(
+static __inline AesBlock128 __fastcall aes128ecb_decrypt_block(
AesBlock128 cipher,
Aes128KeySchedule* inverted_schedule)
{
- return raw_aes128_decrypt(cipher, inverted_schedule);
+ return raw_aes128_decrypt_block(cipher, inverted_schedule);
}
-static __inline AesBlock128 __fastcall aes128cbc_encrypt(
+static __inline AesBlock128 __fastcall aes128cbc_encrypt_block(
AesBlock128 plain,
Aes128KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 cipher = raw_aes128_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ AesBlock128 cipher = raw_aes128_encrypt_block(_mm_xor_si128(plain, init_vector), key_schedule);
*next_init_vector = cipher;
return cipher;
}
-static __inline AesBlock128 __fastcall aes128cbc_decrypt(
+static __inline AesBlock128 __fastcall aes128cbc_decrypt_block(
AesBlock128 cipher,
Aes128KeySchedule* inverted_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 plain = _mm_xor_si128(raw_aes128_decrypt(cipher, inverted_schedule), init_vector);
+ AesBlock128 plain = _mm_xor_si128(raw_aes128_decrypt_block(cipher, inverted_schedule), init_vector);
*next_init_vector = cipher;
return plain;
}
-static __inline AesBlock128 __fastcall aes128cfb_encrypt(
+static __inline AesBlock128 __fastcall aes128cfb_encrypt_block(
AesBlock128 plain,
Aes128KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 cipher = _mm_xor_si128(raw_aes128_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 cipher = _mm_xor_si128(raw_aes128_encrypt_block(init_vector, key_schedule), plain);
*next_init_vector = cipher;
return cipher;
}
-static __inline AesBlock128 __fastcall aes128cfb_decrypt(
+static __inline AesBlock128 __fastcall aes128cfb_decrypt_block(
AesBlock128 cipher,
Aes128KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 plain = _mm_xor_si128(raw_aes128_encrypt(init_vector, key_schedule), cipher);
+ AesBlock128 plain = _mm_xor_si128(raw_aes128_encrypt_block(init_vector, key_schedule), cipher);
*next_init_vector = cipher;
return plain;
}
-static __inline AesBlock128 __fastcall aes128ofb_encrypt(
+static __inline AesBlock128 __fastcall aes128ofb_encrypt_block(
AesBlock128 plain,
Aes128KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 tmp = raw_aes128_encrypt(init_vector, key_schedule);
+ AesBlock128 tmp = raw_aes128_encrypt_block(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, plain);
}
-static __inline AesBlock128 __fastcall aes128ofb_decrypt(
+static __inline AesBlock128 __fastcall aes128ofb_decrypt_block(
AesBlock128 cipher,
Aes128KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 tmp = raw_aes128_encrypt(init_vector, key_schedule);
+ AesBlock128 tmp = raw_aes128_encrypt_block(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, cipher);
}
-static __inline AesBlock128 __fastcall aes128ctr_encrypt(
+static __inline AesBlock128 __fastcall aes128ctr_encrypt_block(
AesBlock128 plain,
Aes128KeySchedule* key_schedule,
AesBlock128 init_vector,
@@ -114,10 +114,10 @@ static __inline AesBlock128 __fastcall aes128ctr_encrypt(
init_vector = aes128_le2be(init_vector);
init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter));
init_vector = aes128_be2le(init_vector);
- return _mm_xor_si128(plain, raw_aes128_encrypt(init_vector, key_schedule));
+ return _mm_xor_si128(plain, raw_aes128_encrypt_block(init_vector, key_schedule));
}
-static __inline AesBlock128 __fastcall aes128ctr_decrypt(
+static __inline AesBlock128 __fastcall aes128ctr_decrypt_block(
AesBlock128 cipher,
Aes128KeySchedule* key_schedule,
AesBlock128 init_vector,
@@ -126,7 +126,7 @@ static __inline AesBlock128 __fastcall aes128ctr_decrypt(
init_vector = aes128_le2be(init_vector);
init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter));
init_vector = aes128_be2le(init_vector);
- return _mm_xor_si128(cipher, raw_aes128_encrypt(init_vector, key_schedule));
+ return _mm_xor_si128(cipher, raw_aes128_encrypt_block(init_vector, key_schedule));
}
static __inline void __fastcall aes192_expand_key_schedule(
@@ -143,87 +143,87 @@ static __inline void __fastcall aes192_invert_key_schedule(
raw_aes192_invert_key_schedule(key_schedule, inverted_schedule);
}
-static __inline AesBlock128 __fastcall aes192ecb_encrypt(
+static __inline AesBlock128 __fastcall aes192ecb_encrypt_block(
AesBlock128 plain,
Aes192KeySchedule* key_schedule)
{
- return raw_aes192_encrypt(plain, key_schedule);
+ return raw_aes192_encrypt_block(plain, key_schedule);
}
-static __inline AesBlock128 __fastcall aes192ecb_decrypt(
+static __inline AesBlock128 __fastcall aes192ecb_decrypt_block(
AesBlock128 cipher,
Aes192KeySchedule* inverted_schedule)
{
- return raw_aes192_decrypt(cipher, inverted_schedule);
+ return raw_aes192_decrypt_block(cipher, inverted_schedule);
}
-static __inline AesBlock128 __fastcall aes192cbc_encrypt(
+static __inline AesBlock128 __fastcall aes192cbc_encrypt_block(
AesBlock128 plain,
Aes192KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 cipher = raw_aes192_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ AesBlock128 cipher = raw_aes192_encrypt_block(_mm_xor_si128(plain, init_vector), key_schedule);
*next_init_vector = cipher;
return cipher;
}
-static __inline AesBlock128 __fastcall aes192cbc_decrypt(
+static __inline AesBlock128 __fastcall aes192cbc_decrypt_block(
AesBlock128 cipher,
Aes192KeySchedule* inverted_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 plain = _mm_xor_si128(raw_aes192_decrypt(cipher, inverted_schedule), init_vector);
+ AesBlock128 plain = _mm_xor_si128(raw_aes192_decrypt_block(cipher, inverted_schedule), init_vector);
*next_init_vector = cipher;
return plain;
}
-static __inline AesBlock128 __fastcall aes192cfb_encrypt(
+static __inline AesBlock128 __fastcall aes192cfb_encrypt_block(
AesBlock128 plain,
Aes192KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 cipher = _mm_xor_si128(raw_aes192_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 cipher = _mm_xor_si128(raw_aes192_encrypt_block(init_vector, key_schedule), plain);
*next_init_vector = cipher;
return cipher;
}
-static __inline AesBlock128 __fastcall aes192cfb_decrypt(
+static __inline AesBlock128 __fastcall aes192cfb_decrypt_block(
AesBlock128 cipher,
Aes192KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 plain = _mm_xor_si128(raw_aes192_encrypt(init_vector, key_schedule), cipher);
+ AesBlock128 plain = _mm_xor_si128(raw_aes192_encrypt_block(init_vector, key_schedule), cipher);
*next_init_vector = cipher;
return plain;
}
-static __inline AesBlock128 __fastcall aes192ofb_encrypt(
+static __inline AesBlock128 __fastcall aes192ofb_encrypt_block(
AesBlock128 plain,
Aes192KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 tmp = raw_aes192_encrypt(init_vector, key_schedule);
+ AesBlock128 tmp = raw_aes192_encrypt_block(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, plain);
}
-static __inline AesBlock128 __fastcall aes192ofb_decrypt(
+static __inline AesBlock128 __fastcall aes192ofb_decrypt_block(
AesBlock128 cipher,
Aes192KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 tmp = raw_aes192_encrypt(init_vector, key_schedule);
+ AesBlock128 tmp = raw_aes192_encrypt_block(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, cipher);
}
-static __inline AesBlock128 __fastcall aes192ctr_encrypt(
+static __inline AesBlock128 __fastcall aes192ctr_encrypt_block(
AesBlock128 plain,
Aes192KeySchedule* key_schedule,
AesBlock128 init_vector,
@@ -232,10 +232,10 @@ static __inline AesBlock128 __fastcall aes192ctr_encrypt(
init_vector = aes128_le2be(init_vector);
init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter));
init_vector = aes128_be2le(init_vector);
- return _mm_xor_si128(plain, raw_aes192_encrypt(init_vector, key_schedule));
+ return _mm_xor_si128(plain, raw_aes192_encrypt_block(init_vector, key_schedule));
}
-static __inline AesBlock128 __fastcall aes192ctr_decrypt(
+static __inline AesBlock128 __fastcall aes192ctr_decrypt_block(
AesBlock128 cipher,
Aes192KeySchedule* key_schedule,
AesBlock128 init_vector,
@@ -244,7 +244,7 @@ static __inline AesBlock128 __fastcall aes192ctr_decrypt(
init_vector = aes128_le2be(init_vector);
init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter));
init_vector = aes128_be2le(init_vector);
- return _mm_xor_si128(cipher, raw_aes192_encrypt(init_vector, key_schedule));
+ return _mm_xor_si128(cipher, raw_aes192_encrypt_block(init_vector, key_schedule));
}
static __inline void __fastcall aes256_expand_key_schedule(
@@ -261,87 +261,87 @@ static __inline void __fastcall aes256_invert_key_schedule(
raw_aes256_invert_key_schedule(key_schedule, inverted_schedule);
}
-static __inline AesBlock128 __fastcall aes256ecb_encrypt(
+static __inline AesBlock128 __fastcall aes256ecb_encrypt_block(
AesBlock128 plain,
Aes256KeySchedule* key_schedule)
{
- return raw_aes256_encrypt(plain, key_schedule);
+ return raw_aes256_encrypt_block(plain, key_schedule);
}
-static __inline AesBlock128 __fastcall aes256ecb_decrypt(
+static __inline AesBlock128 __fastcall aes256ecb_decrypt_block(
AesBlock128 cipher,
Aes256KeySchedule* inverted_schedule)
{
- return raw_aes256_decrypt(cipher, inverted_schedule);
+ return raw_aes256_decrypt_block(cipher, inverted_schedule);
}
-static __inline AesBlock128 __fastcall aes256cbc_encrypt(
+static __inline AesBlock128 __fastcall aes256cbc_encrypt_block(
AesBlock128 plain,
Aes256KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 cipher = raw_aes256_encrypt(_mm_xor_si128(plain, init_vector), key_schedule);
+ AesBlock128 cipher = raw_aes256_encrypt_block(_mm_xor_si128(plain, init_vector), key_schedule);
*next_init_vector = cipher;
return cipher;
}
-static __inline AesBlock128 __fastcall aes256cbc_decrypt(
+static __inline AesBlock128 __fastcall aes256cbc_decrypt_block(
AesBlock128 cipher,
Aes256KeySchedule* inverted_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 plain = _mm_xor_si128(raw_aes256_decrypt(cipher, inverted_schedule), init_vector);
+ AesBlock128 plain = _mm_xor_si128(raw_aes256_decrypt_block(cipher, inverted_schedule), init_vector);
*next_init_vector = cipher;
return plain;
}
-static __inline AesBlock128 __fastcall aes256cfb_encrypt(
+static __inline AesBlock128 __fastcall aes256cfb_encrypt_block(
AesBlock128 plain,
Aes256KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 cipher = _mm_xor_si128(raw_aes256_encrypt(init_vector, key_schedule), plain);
+ AesBlock128 cipher = _mm_xor_si128(raw_aes256_encrypt_block(init_vector, key_schedule), plain);
*next_init_vector = cipher;
return cipher;
}
-static __inline AesBlock128 __fastcall aes256cfb_decrypt(
+static __inline AesBlock128 __fastcall aes256cfb_decrypt_block(
AesBlock128 cipher,
Aes256KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 plain = _mm_xor_si128(raw_aes256_encrypt(init_vector, key_schedule), cipher);
+ AesBlock128 plain = _mm_xor_si128(raw_aes256_encrypt_block(init_vector, key_schedule), cipher);
*next_init_vector = cipher;
return plain;
}
-static __inline AesBlock128 __fastcall aes256ofb_encrypt(
+static __inline AesBlock128 __fastcall aes256ofb_encrypt_block(
AesBlock128 plain,
Aes256KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 tmp = raw_aes256_encrypt(init_vector, key_schedule);
+ AesBlock128 tmp = raw_aes256_encrypt_block(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, plain);
}
-static __inline AesBlock128 __fastcall aes256ofb_decrypt(
+static __inline AesBlock128 __fastcall aes256ofb_decrypt_block(
AesBlock128 cipher,
Aes256KeySchedule* key_schedule,
AesBlock128 init_vector,
AesBlock128* next_init_vector)
{
- AesBlock128 tmp = raw_aes256_encrypt(init_vector, key_schedule);
+ AesBlock128 tmp = raw_aes256_encrypt_block(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, cipher);
}
-static __inline AesBlock128 __fastcall aes256ctr_encrypt(
+static __inline AesBlock128 __fastcall aes256ctr_encrypt_block(
AesBlock128 plain,
Aes256KeySchedule* key_schedule,
AesBlock128 init_vector,
@@ -350,10 +350,10 @@ static __inline AesBlock128 __fastcall aes256ctr_encrypt(
init_vector = aes128_le2be(init_vector);
init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter));
init_vector = aes128_be2le(init_vector);
- return _mm_xor_si128(plain, raw_aes256_encrypt(init_vector, key_schedule));
+ return _mm_xor_si128(plain, raw_aes256_encrypt_block(init_vector, key_schedule));
}
-static __inline AesBlock128 __fastcall aes256ctr_decrypt(
+static __inline AesBlock128 __fastcall aes256ctr_decrypt_block(
AesBlock128 cipher,
Aes256KeySchedule* key_schedule,
AesBlock128 init_vector,
@@ -362,5 +362,5 @@ static __inline AesBlock128 __fastcall aes256ctr_decrypt(
init_vector = aes128_le2be(init_vector);
init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter));
init_vector = aes128_be2le(init_vector);
- return _mm_xor_si128(cipher, raw_aes256_encrypt(init_vector, key_schedule));
+ return _mm_xor_si128(cipher, raw_aes256_encrypt_block(init_vector, key_schedule));
}
diff --git a/include/aesni/file.h b/include/aesni/file.h
index eb84d09..f3c845d 100644
--- a/include/aesni/file.h
+++ b/include/aesni/file.h
@@ -10,11 +10,11 @@
#include <stdio.h>
-size_t aes128ecb_encrypt_file(const unsigned char* src,
+size_t aes128ecb_encrypt_buffer(const unsigned char* src,
size_t src_size,
unsigned char* dest,
Aes128KeySchedule* key_schedule);
-size_t aes128ecb_decrypt_file(const unsigned char* src,
+size_t aes128ecb_decrypt_buffer(const unsigned char* src,
size_t src_size,
unsigned char* dest,
Aes128KeySchedule* inverted_schedule);
diff --git a/include/aesni/raw.h b/include/aesni/raw.h
index d570ecc..6ea0c8a 100644
--- a/include/aesni/raw.h
+++ b/include/aesni/raw.h
@@ -17,10 +17,10 @@ void __fastcall raw_aes128_invert_key_schedule(
Aes128KeySchedule* key_schedule,
Aes128KeySchedule* inverted_schedule);
-AesBlock128 __fastcall raw_aes128_encrypt(
+AesBlock128 __fastcall raw_aes128_encrypt_block(
AesBlock128 plain,
Aes128KeySchedule* key_schedule);
-AesBlock128 __fastcall raw_aes128_decrypt(
+AesBlock128 __fastcall raw_aes128_decrypt_block(
AesBlock128 cipher,
Aes128KeySchedule* inverted_schedule);
@@ -32,10 +32,10 @@ void __fastcall raw_aes192_invert_key_schedule(
Aes192KeySchedule* key_schedule,
Aes192KeySchedule* inverted_schedule);
-AesBlock128 __fastcall raw_aes192_encrypt(
+AesBlock128 __fastcall raw_aes192_encrypt_block(
AesBlock128 plain,
Aes192KeySchedule* key_schedule);
-AesBlock128 __fastcall raw_aes192_decrypt(
+AesBlock128 __fastcall raw_aes192_decrypt_block(
AesBlock128 cipher,
Aes192KeySchedule* inverted_schedule);
@@ -47,9 +47,9 @@ void __fastcall raw_aes256_invert_key_schedule(
Aes256KeySchedule* key_schedule,
Aes256KeySchedule* inverted_schedule);
-AesBlock128 __fastcall raw_aes256_encrypt(
+AesBlock128 __fastcall raw_aes256_encrypt_block(
AesBlock128 plain,
Aes256KeySchedule* key_schedule);
-AesBlock128 __fastcall raw_aes256_decrypt(
+AesBlock128 __fastcall raw_aes256_decrypt_block(
AesBlock128 cipher,
Aes256KeySchedule* inverted_schedule);