aboutsummaryrefslogtreecommitdiffstatshomepage
path: root/include
diff options
context:
space:
mode:
authorEgor Tensin <Egor.Tensin@gmail.com>2015-06-11 03:24:02 +0300
committerEgor Tensin <Egor.Tensin@gmail.com>2015-06-11 03:24:02 +0300
commitd899adaa20c3242200867b4a8b41af08c60f2abe (patch)
treeacf0d71272349f7ef2d29d9aa6957c94f852136c /include
parentcode style (diff)
downloadaes-tools-d899adaa20c3242200867b4a8b41af08c60f2abe.tar.gz
aes-tools-d899adaa20c3242200867b4a8b41af08c60f2abe.zip
add `assert`s
Diffstat (limited to 'include')
-rw-r--r--include/aesni/block.h97
1 files changed, 97 insertions, 0 deletions
diff --git a/include/aesni/block.h b/include/aesni/block.h
index 7f8be7b..0909744 100644
--- a/include/aesni/block.h
+++ b/include/aesni/block.h
@@ -11,6 +11,8 @@
#include "data.h"
#include "raw.h"
+#include <assert.h>
+
#ifdef __cplusplus
extern "C"
{
@@ -20,6 +22,8 @@ static __inline void __fastcall aesni_expand_key_schedule128(
AesNI_Block128 key,
AesNI_KeySchedule128* key_schedule)
{
+ assert(key_schedule);
+
aesni_raw_expand_key_schedule128(key, key_schedule);
}
@@ -27,6 +31,9 @@ static __inline void __fastcall aesni_invert_key_schedule128(
AesNI_KeySchedule128* key_schedule,
AesNI_KeySchedule128* inverted_schedule)
{
+ assert(key_schedule);
+ assert(inverted_schedule);
+
aesni_raw_invert_key_schedule128(key_schedule, inverted_schedule);
}
@@ -34,6 +41,8 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ecb128(
AesNI_Block128 plain,
AesNI_KeySchedule128* key_schedule)
{
+ assert(key_schedule);
+
return aesni_raw_encrypt_block128(plain, key_schedule);
}
@@ -41,6 +50,8 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ecb128(
AesNI_Block128 cipher,
AesNI_KeySchedule128* inverted_schedule)
{
+ assert(inverted_schedule);
+
return aesni_raw_decrypt_block128(cipher, inverted_schedule);
}
@@ -50,6 +61,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_cbc128(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 cipher = aesni_raw_encrypt_block128(_mm_xor_si128(plain, init_vector), key_schedule);
*next_init_vector = cipher;
return cipher;
@@ -61,6 +75,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_cbc128(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(inverted_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 plain = _mm_xor_si128(aesni_raw_decrypt_block128(cipher, inverted_schedule), init_vector);
*next_init_vector = cipher;
return plain;
@@ -72,6 +89,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_cfb128(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 cipher = _mm_xor_si128(aesni_raw_encrypt_block128(init_vector, key_schedule), plain);
*next_init_vector = cipher;
return cipher;
@@ -83,6 +103,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_cfb128(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 plain = _mm_xor_si128(aesni_raw_encrypt_block128(init_vector, key_schedule), cipher);
*next_init_vector = cipher;
return plain;
@@ -94,6 +117,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ofb128(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 tmp = aesni_raw_encrypt_block128(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, plain);
@@ -105,6 +131,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ofb128(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 tmp = aesni_raw_encrypt_block128(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, cipher);
@@ -116,6 +145,8 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ctr128(
AesNI_Block128 init_vector,
int counter)
{
+ assert(key_schedule);
+
init_vector = aesni_le2be128(init_vector);
init_vector = _mm_add_epi32(init_vector, aesni_make_block128(0, 0, 0, counter));
init_vector = aesni_be2le128(init_vector);
@@ -128,6 +159,8 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ctr128(
AesNI_Block128 init_vector,
int counter)
{
+ assert(key_schedule);
+
init_vector = aesni_le2be128(init_vector);
init_vector = _mm_add_epi32(init_vector, aesni_make_block128(0, 0, 0, counter));
init_vector = aesni_be2le128(init_vector);
@@ -138,6 +171,9 @@ static __inline void __fastcall aesni_expand_key_schedule192(
AesNI_Block192* key,
AesNI_KeySchedule192* key_schedule)
{
+ assert(key);
+ assert(key_schedule);
+
aesni_raw_expand_key_schedule192(key->lo, key->hi, key_schedule);
}
@@ -145,6 +181,9 @@ static __inline void __fastcall aesni_invert_key_schedule192(
AesNI_KeySchedule192* key_schedule,
AesNI_KeySchedule192* inverted_schedule)
{
+ assert(key_schedule);
+ assert(inverted_schedule);
+
aesni_raw_invert_key_schedule192(key_schedule, inverted_schedule);
}
@@ -152,6 +191,8 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ecb192(
AesNI_Block128 plain,
AesNI_KeySchedule192* key_schedule)
{
+ assert(key_schedule);
+
return aesni_raw_encrypt_block192(plain, key_schedule);
}
@@ -159,6 +200,8 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ecb192(
AesNI_Block128 cipher,
AesNI_KeySchedule192* inverted_schedule)
{
+ assert(inverted_schedule);
+
return aesni_raw_decrypt_block192(cipher, inverted_schedule);
}
@@ -168,6 +211,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_cbc192(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 cipher = aesni_raw_encrypt_block192(_mm_xor_si128(plain, init_vector), key_schedule);
*next_init_vector = cipher;
return cipher;
@@ -179,6 +225,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_cbc192(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(inverted_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 plain = _mm_xor_si128(aesni_raw_decrypt_block192(cipher, inverted_schedule), init_vector);
*next_init_vector = cipher;
return plain;
@@ -190,6 +239,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_cfb192(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 cipher = _mm_xor_si128(aesni_raw_encrypt_block192(init_vector, key_schedule), plain);
*next_init_vector = cipher;
return cipher;
@@ -201,6 +253,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_cfb192(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 plain = _mm_xor_si128(aesni_raw_encrypt_block192(init_vector, key_schedule), cipher);
*next_init_vector = cipher;
return plain;
@@ -212,6 +267,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ofb192(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 tmp = aesni_raw_encrypt_block192(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, plain);
@@ -223,6 +281,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ofb192(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 tmp = aesni_raw_encrypt_block192(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, cipher);
@@ -234,6 +295,8 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ctr192(
AesNI_Block128 init_vector,
int counter)
{
+ assert(key_schedule);
+
init_vector = aesni_le2be128(init_vector);
init_vector = _mm_add_epi32(init_vector, aesni_make_block128(0, 0, 0, counter));
init_vector = aesni_be2le128(init_vector);
@@ -246,6 +309,8 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ctr192(
AesNI_Block128 init_vector,
int counter)
{
+ assert(key_schedule);
+
init_vector = aesni_le2be128(init_vector);
init_vector = _mm_add_epi32(init_vector, aesni_make_block128(0, 0, 0, counter));
init_vector = aesni_be2le128(init_vector);
@@ -256,6 +321,9 @@ static __inline void __fastcall aesni_expand_key_schedule256(
AesNI_Block256* key,
AesNI_KeySchedule256* key_schedule)
{
+ assert(key);
+ assert(key_schedule);
+
aesni_raw_expand_key_schedule256(key->lo, key->hi, key_schedule);
}
@@ -263,6 +331,9 @@ static __inline void __fastcall aesni_invert_key_schedule256(
AesNI_KeySchedule256* key_schedule,
AesNI_KeySchedule256* inverted_schedule)
{
+ assert(key_schedule);
+ assert(inverted_schedule);
+
aesni_raw_invert_key_schedule256(key_schedule, inverted_schedule);
}
@@ -270,6 +341,8 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ecb256(
AesNI_Block128 plain,
AesNI_KeySchedule256* key_schedule)
{
+ assert(key_schedule);
+
return aesni_raw_encrypt_block256(plain, key_schedule);
}
@@ -277,6 +350,8 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ecb256(
AesNI_Block128 cipher,
AesNI_KeySchedule256* inverted_schedule)
{
+ assert(inverted_schedule);
+
return aesni_raw_decrypt_block256(cipher, inverted_schedule);
}
@@ -286,6 +361,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_cbc256(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 cipher = aesni_raw_encrypt_block256(_mm_xor_si128(plain, init_vector), key_schedule);
*next_init_vector = cipher;
return cipher;
@@ -297,6 +375,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_cbc256(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(inverted_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 plain = _mm_xor_si128(aesni_raw_decrypt_block256(cipher, inverted_schedule), init_vector);
*next_init_vector = cipher;
return plain;
@@ -308,6 +389,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_cfb256(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 cipher = _mm_xor_si128(aesni_raw_encrypt_block256(init_vector, key_schedule), plain);
*next_init_vector = cipher;
return cipher;
@@ -319,6 +403,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_cfb256(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 plain = _mm_xor_si128(aesni_raw_encrypt_block256(init_vector, key_schedule), cipher);
*next_init_vector = cipher;
return plain;
@@ -330,6 +417,9 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ofb256(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 tmp = aesni_raw_encrypt_block256(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, plain);
@@ -341,6 +431,9 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ofb256(
AesNI_Block128 init_vector,
AesNI_Block128* next_init_vector)
{
+ assert(key_schedule);
+ assert(next_init_vector);
+
AesNI_Block128 tmp = aesni_raw_encrypt_block256(init_vector, key_schedule);
*next_init_vector = tmp;
return _mm_xor_si128(tmp, cipher);
@@ -352,6 +445,8 @@ static __inline AesNI_Block128 __fastcall aesni_encrypt_block_ctr256(
AesNI_Block128 init_vector,
int counter)
{
+ assert(key_schedule);
+
init_vector = aesni_le2be128(init_vector);
init_vector = _mm_add_epi32(init_vector, aesni_make_block128(0, 0, 0, counter));
init_vector = aesni_be2le128(init_vector);
@@ -364,6 +459,8 @@ static __inline AesNI_Block128 __fastcall aesni_decrypt_block_ctr256(
AesNI_Block128 init_vector,
int counter)
{
+ assert(key_schedule);
+
init_vector = aesni_le2be128(init_vector);
init_vector = _mm_add_epi32(init_vector, aesni_make_block128(0, 0, 0, counter));
init_vector = aesni_be2le128(init_vector);