diff options
author | Egor Tensin <Egor.Tensin@gmail.com> | 2015-06-10 20:48:53 +0300 |
---|---|---|
committer | Egor Tensin <Egor.Tensin@gmail.com> | 2015-06-10 20:48:53 +0300 |
commit | ab42aa88992409eb5db5714df18390387c6e041c (patch) | |
tree | 8f5d8adf9e644f192d6d43d79c8db9656e57c90e /src/asm/aes128.asm | |
parent | bugfix (diff) | |
download | aes-tools-ab42aa88992409eb5db5714df18390387c6e041c.tar.gz aes-tools-ab42aa88992409eb5db5714df18390387c6e041c.zip |
fix register usage in the asm implementation
Apparently, the previously used xmm6/xmm7 registers are exactly the
wrong ones to use for storing temp values.
The xmm4/xmm5, on the other hand hand, don't need to be preserved,
so I switched to those.
Diffstat (limited to 'src/asm/aes128.asm')
-rw-r--r-- | src/asm/aes128.asm | 96 |
1 files changed, 48 insertions, 48 deletions
diff --git a/src/asm/aes128.asm b/src/asm/aes128.asm index b49de0e..874f876 100644 --- a/src/asm/aes128.asm +++ b/src/asm/aes128.asm @@ -88,25 +88,25 @@ movdqa [ecx], xmm0 ; sets w[0], w[1], w[2], w[3] add ecx, 10h ; ecx = &w[4] - aeskeygenassist xmm7, xmm0, 01h ; xmm7[127:96] = RotWord(SubWord(w[3]))^Rcon + aeskeygenassist xmm5, xmm0, 01h ; xmm5[127:96] = RotWord(SubWord(w[3]))^Rcon call aes128_keygen_assist ; sets w[4], w[5], w[6], w[7] - aeskeygenassist xmm7, xmm0, 02h ; xmm7[127:96] = RotWord(SubWord(w[7]))^Rcon + aeskeygenassist xmm5, xmm0, 02h ; xmm5[127:96] = RotWord(SubWord(w[7]))^Rcon call aes128_keygen_assist ; sets w[8], w[9], w[10], w[11] - aeskeygenassist xmm7, xmm0, 04h ; xmm7[127:96] = RotWord(SubWord(w[11]))^Rcon + aeskeygenassist xmm5, xmm0, 04h ; xmm5[127:96] = RotWord(SubWord(w[11]))^Rcon call aes128_keygen_assist ; sets w[12], w[13], w[14], w[15] - aeskeygenassist xmm7, xmm0, 08h ; xmm7[127:96] = RotWord(SubWord(w[15]))^Rcon + aeskeygenassist xmm5, xmm0, 08h ; xmm5[127:96] = RotWord(SubWord(w[15]))^Rcon call aes128_keygen_assist ; sets w[16], w[17], w[18], w[19] - aeskeygenassist xmm7, xmm0, 10h ; xmm7[127:96] = RotWord(SubWord(w[19]))^Rcon + aeskeygenassist xmm5, xmm0, 10h ; xmm5[127:96] = RotWord(SubWord(w[19]))^Rcon call aes128_keygen_assist ; sets w[20], w[21], w[22], w[23] - aeskeygenassist xmm7, xmm0, 20h ; xmm7[127:96] = RotWord(SubWord(w[23]))^Rcon + aeskeygenassist xmm5, xmm0, 20h ; xmm5[127:96] = RotWord(SubWord(w[23]))^Rcon call aes128_keygen_assist ; sets w[24], w[25], w[26], w[27] - aeskeygenassist xmm7, xmm0, 40h ; xmm7[127:96] = RotWord(SubWord(w[27]))^Rcon + aeskeygenassist xmm5, xmm0, 40h ; xmm5[127:96] = RotWord(SubWord(w[27]))^Rcon call aes128_keygen_assist ; sets w[28], w[29], w[30], w[31] - aeskeygenassist xmm7, xmm0, 80h ; xmm7[127:96] = RotWord(SubWord(w[31]))^Rcon + aeskeygenassist xmm5, xmm0, 80h ; xmm5[127:96] = RotWord(SubWord(w[31]))^Rcon call aes128_keygen_assist ; sets w[32], w[33], w[34], w[35] - aeskeygenassist xmm7, xmm0, 1Bh ; xmm7[127:96] = RotWord(SubWord(w[35]))^Rcon + aeskeygenassist xmm5, xmm0, 1Bh ; xmm5[127:96] = RotWord(SubWord(w[35]))^Rcon call aes128_keygen_assist ; sets w[36], w[37], w[38], w[39] - aeskeygenassist xmm7, xmm0, 36h ; xmm7[127:96] = RotWord(SubWord(w[39]))^Rcon + aeskeygenassist xmm5, xmm0, 36h ; xmm5[127:96] = RotWord(SubWord(w[39]))^Rcon call aes128_keygen_assist ; sets w[40], w[41], w[42], w[43] ret @@ -117,7 +117,7 @@ aes128_keygen_assist: ; * xmm0[95:64] == w[i+2], ; * xmm0[63:32] == w[i+1], ; * xmm0[31:0] == w[i], - ; * xmm7[127:96] == RotWord(SubWord(w[i+3]))^Rcon, + ; * xmm5[127:96] == RotWord(SubWord(w[i+3]))^Rcon, ; * ecx == &w[i+4]. ; ; Postconditions: @@ -126,20 +126,20 @@ aes128_keygen_assist: ; * xmm0[63:32] == w[i+5] == RotWord(SubWord(w[i+3]))^Rcon^w[i+1]^w[i], ; * xmm0[31:0] == w[i+4] == RotWord(SubWord(w[i+3]))^Rcon^w[i], ; * ecx == &w[i+8], - ; * the value in xmm6 is also modified. + ; * the value in xmm4 is also modified. ; Calculate ; w[i+3]^w[i+2]^w[i+1]^w[i], ; w[i+2]^w[i+1]^w[i], ; w[i+1]^w[i] and ; w[i]. - movdqa xmm6, xmm0 ; xmm6 = xmm0 - pslldq xmm6, 4 ; xmm6 <<= 32 - pxor xmm0, xmm6 ; xmm0 ^= xmm6 - pslldq xmm6, 4 ; xmm6 <<= 32 - pxor xmm0, xmm6 ; xmm0 ^= xmm6 - pslldq xmm6, 4 ; xmm6 <<= 32 - pxor xmm0, xmm6 ; xmm0 ^= xmm6 + movdqa xmm4, xmm0 ; xmm4 = xmm0 + pslldq xmm4, 4 ; xmm4 <<= 32 + pxor xmm0, xmm4 ; xmm0 ^= xmm4 + pslldq xmm4, 4 ; xmm4 <<= 32 + pxor xmm0, xmm4 ; xmm0 ^= xmm4 + pslldq xmm4, 4 ; xmm4 <<= 32 + pxor xmm0, xmm4 ; xmm0 ^= xmm4 ; xmm0[127:96] == w[i+3]^w[i+2]^w[i+1]^w[i] ; xmm0[95:64] == w[i+2]^w[i+1]^w[i] ; xmm0[63:32] == w[i+1]^w[i] @@ -150,8 +150,8 @@ aes128_keygen_assist: ; w[i+6] == RotWord(SubWord(w[i+3]))^Rcon^w[i+2]^w[i+1]^w[i], ; w[i+5] == RotWord(SubWord(w[i+3]))^Rcon^w[i+1]^w[i] and ; w[i+4] == RotWord(SubWord(w[i+3]))^Rcon^w[i]. - pshufd xmm6, xmm7, 0FFh ; xmm6[127:96] = xmm6[95:64] = xmm6[63:32] = xmm6[31:0] = xmm7[127:96] - pxor xmm0, xmm6 ; xmm0 ^= xmm6 + pshufd xmm4, xmm5, 0FFh ; xmm4[127:96] = xmm4[95:64] = xmm4[63:32] = xmm4[31:0] = xmm5[127:96] + pxor xmm0, xmm4 ; xmm0 ^= xmm4 ; xmm0[127:96] == w[i+7] == RotWord(SubWord(w[i+3]))^Rcon^w[i+3]^w[i+2]^w[i+1]^w[i] ; xmm0[95:64] == w[i+6] == RotWord(SubWord(w[i+3]))^Rcon^w[i+2]^w[i+1]^w[i] ; xmm0[63:32] == w[i+5] == RotWord(SubWord(w[i+3]))^Rcon^w[i+1]^w[i] @@ -168,33 +168,33 @@ aes128_keygen_assist: @raw_aes128_expand_key_schedule@20 endp @raw_aes128_invert_key_schedule@8 proc - movdqa xmm7, [ecx] - movdqa xmm6, [ecx + 0A0h] - movdqa [edx], xmm6 - movdqa [edx + 0A0h], xmm7 - - aesimc xmm7, [ecx + 10h] - aesimc xmm6, [ecx + 90h] - movdqa [edx + 10h], xmm6 - movdqa [edx + 90h], xmm7 - - aesimc xmm7, [ecx + 20h] - aesimc xmm6, [ecx + 80h] - movdqa [edx + 20h], xmm6 - movdqa [edx + 80h], xmm7 - - aesimc xmm7, [ecx + 30h] - aesimc xmm6, [ecx + 70h] - movdqa [edx + 30h], xmm6 - movdqa [edx + 70h], xmm7 - - aesimc xmm7, [ecx + 40h] - aesimc xmm6, [ecx + 60h] - movdqa [edx + 40h], xmm6 - movdqa [edx + 60h], xmm7 - - aesimc xmm7, [ecx + 50h] - movdqa [edx + 50h], xmm7 + movdqa xmm5, [ecx] + movdqa xmm4, [ecx + 0A0h] + movdqa [edx], xmm4 + movdqa [edx + 0A0h], xmm5 + + aesimc xmm5, [ecx + 10h] + aesimc xmm4, [ecx + 90h] + movdqa [edx + 10h], xmm4 + movdqa [edx + 90h], xmm5 + + aesimc xmm5, [ecx + 20h] + aesimc xmm4, [ecx + 80h] + movdqa [edx + 20h], xmm4 + movdqa [edx + 80h], xmm5 + + aesimc xmm5, [ecx + 30h] + aesimc xmm4, [ecx + 70h] + movdqa [edx + 30h], xmm4 + movdqa [edx + 70h], xmm5 + + aesimc xmm5, [ecx + 40h] + aesimc xmm4, [ecx + 60h] + movdqa [edx + 40h], xmm4 + movdqa [edx + 60h], xmm5 + + aesimc xmm5, [ecx + 50h] + movdqa [edx + 50h], xmm5 ret @raw_aes128_invert_key_schedule@8 endp |