diff options
Diffstat (limited to 'include/aesni/api.h')
-rw-r--r-- | include/aesni/api.h | 126 |
1 files changed, 90 insertions, 36 deletions
diff --git a/include/aesni/api.h b/include/aesni/api.h index 6d275a5..fff3672 100644 --- a/include/aesni/api.h +++ b/include/aesni/api.h @@ -42,49 +42,67 @@ static __inline AesBlock128 __fastcall aes128ecb_decrypt( static __inline AesBlock128 __fastcall aes128cbc_encrypt( AesBlock128 plain, Aes128KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return raw_aes128ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + AesBlock128 cypher = raw_aes128ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + _mm_storeu_si128(next_init_vector, cypher); + return cypher; } static __inline AesBlock128 __fastcall aes128cbc_decrypt( AesBlock128 cypher, Aes128KeySchedule* inverted_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes128ecb_decrypt(cypher, inverted_schedule), init_vector); + AesBlock128 plain = _mm_xor_si128(raw_aes128ecb_decrypt(cypher, inverted_schedule), init_vector); + _mm_storeu_si128(next_init_vector, cypher); + return plain; } static __inline AesBlock128 __fastcall aes128cfb_encrypt( AesBlock128 plain, Aes128KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), plain); + AesBlock128 cypher = _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), plain); + _mm_storeu_si128(next_init_vector, cypher); + return cypher; } static __inline AesBlock128 __fastcall aes128cfb_decrypt( AesBlock128 cypher, Aes128KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), cypher); + _mm_storeu_si128(next_init_vector, cypher); + return plain; } static __inline AesBlock128 __fastcall aes128ofb_encrypt( AesBlock128 plain, Aes128KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), plain); + AesBlock128 tmp = raw_aes128ecb_encrypt(init_vector, key_schedule); + _mm_storeu_si128(next_init_vector, tmp); + return _mm_xor_si128(tmp, plain); } static __inline AesBlock128 __fastcall aes128ofb_decrypt( AesBlock128 cypher, Aes128KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), cypher); + AesBlock128 tmp = raw_aes128ecb_encrypt(init_vector, key_schedule); + _mm_storeu_si128(next_init_vector, tmp); + return _mm_xor_si128(tmp, cypher); } static __inline void __fastcall aes192_expand_key_schedule( @@ -118,49 +136,67 @@ static __inline AesBlock128 __fastcall aes192ecb_decrypt( static __inline AesBlock128 __fastcall aes192cbc_encrypt( AesBlock128 plain, Aes192KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return raw_aes192ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + AesBlock128 cypher = raw_aes192ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + _mm_storeu_si128(next_init_vector, cypher); + return cypher; } static __inline AesBlock128 __fastcall aes192cbc_decrypt( AesBlock128 cypher, Aes192KeySchedule* inverted_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes192ecb_decrypt(cypher, inverted_schedule), init_vector); + AesBlock128 plain = _mm_xor_si128(raw_aes192ecb_decrypt(cypher, inverted_schedule), init_vector); + _mm_storeu_si128(next_init_vector, cypher); + return plain; } static __inline AesBlock128 __fastcall aes192cfb_encrypt( AesBlock128 plain, Aes192KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), plain); + AesBlock128 cypher = _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), plain); + _mm_storeu_si128(next_init_vector, cypher); + return cypher; } static __inline AesBlock128 __fastcall aes192cfb_decrypt( AesBlock128 cypher, Aes192KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), cypher); + _mm_storeu_si128(next_init_vector, cypher); + return plain; } static __inline AesBlock128 __fastcall aes192ofb_encrypt( AesBlock128 plain, Aes192KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), plain); + AesBlock128 tmp = raw_aes192ecb_encrypt(init_vector, key_schedule); + _mm_storeu_si128(next_init_vector, tmp); + return _mm_xor_si128(tmp, plain); } static __inline AesBlock128 __fastcall aes192ofb_decrypt( AesBlock128 cypher, Aes192KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), cypher); + AesBlock128 tmp = raw_aes192ecb_encrypt(init_vector, key_schedule); + _mm_storeu_si128(next_init_vector, tmp); + return _mm_xor_si128(tmp, cypher); } static __inline void __fastcall aes256_expand_key_schedule( @@ -194,47 +230,65 @@ static __inline AesBlock128 __fastcall aes256ecb_decrypt( static __inline AesBlock128 __fastcall aes256cbc_encrypt( AesBlock128 plain, Aes256KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return raw_aes256ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + AesBlock128 cypher = raw_aes256ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + _mm_storeu_si128(next_init_vector, cypher); + return cypher; } static __inline AesBlock128 __fastcall aes256cbc_decrypt( AesBlock128 cypher, Aes256KeySchedule* inverted_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes256ecb_decrypt(cypher, inverted_schedule), init_vector); + AesBlock128 plain = _mm_xor_si128(raw_aes256ecb_decrypt(cypher, inverted_schedule), init_vector); + _mm_storeu_si128(next_init_vector, cypher); + return plain; } static __inline AesBlock128 __fastcall aes256cfb_encrypt( AesBlock128 plain, Aes256KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), plain); + AesBlock128 cypher = _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), plain); + _mm_storeu_si128(next_init_vector, cypher); + return cypher; } static __inline AesBlock128 __fastcall aes256cfb_decrypt( AesBlock128 cypher, Aes256KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), cypher); + _mm_storeu_si128(next_init_vector, cypher); + return plain; } static __inline AesBlock128 __fastcall aes256ofb_encrypt( AesBlock128 plain, Aes256KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), plain); + AesBlock128 tmp = raw_aes256ecb_encrypt(init_vector, key_schedule); + _mm_storeu_si128(next_init_vector, tmp); + return _mm_xor_si128(tmp, plain); } static __inline AesBlock128 __fastcall aes256ofb_decrypt( AesBlock128 cypher, Aes256KeySchedule* key_schedule, - AesBlock128 init_vector) + AesBlock128 init_vector, + AesBlock128* next_init_vector) { - return _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), cypher); + AesBlock128 tmp = raw_aes256ecb_encrypt(init_vector, key_schedule); + _mm_storeu_si128(next_init_vector, tmp); + return _mm_xor_si128(tmp, cypher); } |