diff options
author | Egor Tensin <Egor.Tensin@gmail.com> | 2015-06-04 03:25:57 +0300 |
---|---|---|
committer | Egor Tensin <Egor.Tensin@gmail.com> | 2015-06-04 03:25:57 +0300 |
commit | f89d5f29a90d6c71068a759e5891e925131a5e65 (patch) | |
tree | 0eef5f382f350542d9fa5a4288a285422ae61296 /include/aesni | |
parent | add CTR mode (diff) | |
download | aes-tools-f89d5f29a90d6c71068a759e5891e925131a5e65.tar.gz aes-tools-f89d5f29a90d6c71068a759e5891e925131a5e65.zip |
grammar tweaks
Diffstat (limited to 'include/aesni')
-rw-r--r-- | include/aesni/api.h | 108 | ||||
-rw-r--r-- | include/aesni/raw.h | 6 |
2 files changed, 57 insertions, 57 deletions
diff --git a/include/aesni/api.h b/include/aesni/api.h index 3318aef..0d45fd8 100644 --- a/include/aesni/api.h +++ b/include/aesni/api.h @@ -33,10 +33,10 @@ static __inline AesBlock128 __fastcall aes128ecb_encrypt( } static __inline AesBlock128 __fastcall aes128ecb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes128KeySchedule* inverted_schedule) { - return raw_aes128ecb_decrypt(cypher, inverted_schedule); + return raw_aes128ecb_decrypt(cipher, inverted_schedule); } static __inline AesBlock128 __fastcall aes128cbc_encrypt( @@ -45,19 +45,19 @@ static __inline AesBlock128 __fastcall aes128cbc_encrypt( AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 cypher = raw_aes128ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); - _mm_storeu_si128(next_init_vector, cypher); - return cypher; + AesBlock128 cipher = raw_aes128ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + _mm_storeu_si128(next_init_vector, cipher); + return cipher; } static __inline AesBlock128 __fastcall aes128cbc_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes128KeySchedule* inverted_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 plain = _mm_xor_si128(raw_aes128ecb_decrypt(cypher, inverted_schedule), init_vector); - _mm_storeu_si128(next_init_vector, cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes128ecb_decrypt(cipher, inverted_schedule), init_vector); + _mm_storeu_si128(next_init_vector, cipher); return plain; } @@ -67,19 +67,19 @@ static __inline AesBlock128 __fastcall aes128cfb_encrypt( AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 cypher = _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), plain); - _mm_storeu_si128(next_init_vector, cypher); - return cypher; + AesBlock128 cipher = _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), plain); + _mm_storeu_si128(next_init_vector, cipher); + return cipher; } static __inline AesBlock128 __fastcall aes128cfb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes128KeySchedule* key_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 plain = _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), cypher); - _mm_storeu_si128(next_init_vector, cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes128ecb_encrypt(init_vector, key_schedule), cipher); + _mm_storeu_si128(next_init_vector, cipher); return plain; } @@ -95,14 +95,14 @@ static __inline AesBlock128 __fastcall aes128ofb_encrypt( } static __inline AesBlock128 __fastcall aes128ofb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes128KeySchedule* key_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { AesBlock128 tmp = raw_aes128ecb_encrypt(init_vector, key_schedule); _mm_storeu_si128(next_init_vector, tmp); - return _mm_xor_si128(tmp, cypher); + return _mm_xor_si128(tmp, cipher); } static __inline AesBlock128 __fastcall aes128ctr_encrypt( @@ -118,7 +118,7 @@ static __inline AesBlock128 __fastcall aes128ctr_encrypt( } static __inline AesBlock128 __fastcall aes128ctr_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes128KeySchedule* key_schedule, AesBlock128 init_vector, int counter) @@ -126,7 +126,7 @@ static __inline AesBlock128 __fastcall aes128ctr_decrypt( init_vector = aes128_le2be(init_vector); init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter)); init_vector = aes128_be2le(init_vector); - return _mm_xor_si128(cypher, raw_aes128ecb_encrypt(init_vector, key_schedule)); + return _mm_xor_si128(cipher, raw_aes128ecb_encrypt(init_vector, key_schedule)); } static __inline void __fastcall aes192_expand_key_schedule( @@ -151,10 +151,10 @@ static __inline AesBlock128 __fastcall aes192ecb_encrypt( } static __inline AesBlock128 __fastcall aes192ecb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes192KeySchedule* inverted_schedule) { - return raw_aes192ecb_decrypt(cypher, inverted_schedule); + return raw_aes192ecb_decrypt(cipher, inverted_schedule); } static __inline AesBlock128 __fastcall aes192cbc_encrypt( @@ -163,19 +163,19 @@ static __inline AesBlock128 __fastcall aes192cbc_encrypt( AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 cypher = raw_aes192ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); - _mm_storeu_si128(next_init_vector, cypher); - return cypher; + AesBlock128 cipher = raw_aes192ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + _mm_storeu_si128(next_init_vector, cipher); + return cipher; } static __inline AesBlock128 __fastcall aes192cbc_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes192KeySchedule* inverted_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 plain = _mm_xor_si128(raw_aes192ecb_decrypt(cypher, inverted_schedule), init_vector); - _mm_storeu_si128(next_init_vector, cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes192ecb_decrypt(cipher, inverted_schedule), init_vector); + _mm_storeu_si128(next_init_vector, cipher); return plain; } @@ -185,19 +185,19 @@ static __inline AesBlock128 __fastcall aes192cfb_encrypt( AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 cypher = _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), plain); - _mm_storeu_si128(next_init_vector, cypher); - return cypher; + AesBlock128 cipher = _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), plain); + _mm_storeu_si128(next_init_vector, cipher); + return cipher; } static __inline AesBlock128 __fastcall aes192cfb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes192KeySchedule* key_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 plain = _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), cypher); - _mm_storeu_si128(next_init_vector, cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes192ecb_encrypt(init_vector, key_schedule), cipher); + _mm_storeu_si128(next_init_vector, cipher); return plain; } @@ -213,14 +213,14 @@ static __inline AesBlock128 __fastcall aes192ofb_encrypt( } static __inline AesBlock128 __fastcall aes192ofb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes192KeySchedule* key_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { AesBlock128 tmp = raw_aes192ecb_encrypt(init_vector, key_schedule); _mm_storeu_si128(next_init_vector, tmp); - return _mm_xor_si128(tmp, cypher); + return _mm_xor_si128(tmp, cipher); } static __inline AesBlock128 __fastcall aes192ctr_encrypt( @@ -236,7 +236,7 @@ static __inline AesBlock128 __fastcall aes192ctr_encrypt( } static __inline AesBlock128 __fastcall aes192ctr_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes192KeySchedule* key_schedule, AesBlock128 init_vector, int counter) @@ -244,7 +244,7 @@ static __inline AesBlock128 __fastcall aes192ctr_decrypt( init_vector = aes128_le2be(init_vector); init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter)); init_vector = aes128_be2le(init_vector); - return _mm_xor_si128(cypher, raw_aes192ecb_encrypt(init_vector, key_schedule)); + return _mm_xor_si128(cipher, raw_aes192ecb_encrypt(init_vector, key_schedule)); } static __inline void __fastcall aes256_expand_key_schedule( @@ -269,10 +269,10 @@ static __inline AesBlock128 __fastcall aes256ecb_encrypt( } static __inline AesBlock128 __fastcall aes256ecb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes256KeySchedule* inverted_schedule) { - return raw_aes256ecb_decrypt(cypher, inverted_schedule); + return raw_aes256ecb_decrypt(cipher, inverted_schedule); } static __inline AesBlock128 __fastcall aes256cbc_encrypt( @@ -281,19 +281,19 @@ static __inline AesBlock128 __fastcall aes256cbc_encrypt( AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 cypher = raw_aes256ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); - _mm_storeu_si128(next_init_vector, cypher); - return cypher; + AesBlock128 cipher = raw_aes256ecb_encrypt(_mm_xor_si128(plain, init_vector), key_schedule); + _mm_storeu_si128(next_init_vector, cipher); + return cipher; } static __inline AesBlock128 __fastcall aes256cbc_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes256KeySchedule* inverted_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 plain = _mm_xor_si128(raw_aes256ecb_decrypt(cypher, inverted_schedule), init_vector); - _mm_storeu_si128(next_init_vector, cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes256ecb_decrypt(cipher, inverted_schedule), init_vector); + _mm_storeu_si128(next_init_vector, cipher); return plain; } @@ -303,19 +303,19 @@ static __inline AesBlock128 __fastcall aes256cfb_encrypt( AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 cypher = _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), plain); - _mm_storeu_si128(next_init_vector, cypher); - return cypher; + AesBlock128 cipher = _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), plain); + _mm_storeu_si128(next_init_vector, cipher); + return cipher; } static __inline AesBlock128 __fastcall aes256cfb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes256KeySchedule* key_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { - AesBlock128 plain = _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), cypher); - _mm_storeu_si128(next_init_vector, cypher); + AesBlock128 plain = _mm_xor_si128(raw_aes256ecb_encrypt(init_vector, key_schedule), cipher); + _mm_storeu_si128(next_init_vector, cipher); return plain; } @@ -331,14 +331,14 @@ static __inline AesBlock128 __fastcall aes256ofb_encrypt( } static __inline AesBlock128 __fastcall aes256ofb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes256KeySchedule* key_schedule, AesBlock128 init_vector, AesBlock128* next_init_vector) { AesBlock128 tmp = raw_aes256ecb_encrypt(init_vector, key_schedule); _mm_storeu_si128(next_init_vector, tmp); - return _mm_xor_si128(tmp, cypher); + return _mm_xor_si128(tmp, cipher); } static __inline AesBlock128 __fastcall aes256ctr_encrypt( @@ -354,7 +354,7 @@ static __inline AesBlock128 __fastcall aes256ctr_encrypt( } static __inline AesBlock128 __fastcall aes256ctr_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes256KeySchedule* key_schedule, AesBlock128 init_vector, int counter) @@ -362,5 +362,5 @@ static __inline AesBlock128 __fastcall aes256ctr_decrypt( init_vector = aes128_le2be(init_vector); init_vector = _mm_add_epi32(init_vector, make_aes_block128(0, 0, 0, counter)); init_vector = aes128_be2le(init_vector); - return _mm_xor_si128(cypher, raw_aes256ecb_encrypt(init_vector, key_schedule)); + return _mm_xor_si128(cipher, raw_aes256ecb_encrypt(init_vector, key_schedule)); } diff --git a/include/aesni/raw.h b/include/aesni/raw.h index d6b9518..62ce639 100644 --- a/include/aesni/raw.h +++ b/include/aesni/raw.h @@ -21,7 +21,7 @@ AesBlock128 __fastcall raw_aes128ecb_encrypt( AesBlock128 plain, Aes128KeySchedule* key_schedule); AesBlock128 __fastcall raw_aes128ecb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes128KeySchedule* inverted_schedule); void __fastcall raw_aes192_expand_key_schedule( @@ -36,7 +36,7 @@ AesBlock128 __fastcall raw_aes192ecb_encrypt( AesBlock128 plain, Aes192KeySchedule* key_schedule); AesBlock128 __fastcall raw_aes192ecb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes192KeySchedule* inverted_schedule); void __fastcall raw_aes256_expand_key_schedule( @@ -51,5 +51,5 @@ AesBlock128 __fastcall raw_aes256ecb_encrypt( AesBlock128 plain, Aes256KeySchedule* key_schedule); AesBlock128 __fastcall raw_aes256ecb_decrypt( - AesBlock128 cypher, + AesBlock128 cipher, Aes256KeySchedule* inverted_schedule); |