summaryrefslogtreecommitdiffstats
path: root/secure/lib/libcrypto/amd64/ghash-x86_64.S
diff options
context:
space:
mode:
Diffstat (limited to 'secure/lib/libcrypto/amd64/ghash-x86_64.S')
-rw-r--r--secure/lib/libcrypto/amd64/ghash-x86_64.S48
1 files changed, 24 insertions, 24 deletions
diff --git a/secure/lib/libcrypto/amd64/ghash-x86_64.S b/secure/lib/libcrypto/amd64/ghash-x86_64.S
index ef024bf..950a4bb 100644
--- a/secure/lib/libcrypto/amd64/ghash-x86_64.S
+++ b/secure/lib/libcrypto/amd64/ghash-x86_64.S
@@ -662,10 +662,10 @@ gcm_ghash_4bit:
gcm_init_clmul:
.L_init_clmul:
movdqu (%rsi),%xmm2
- pshufd $0b01001110,%xmm2,%xmm2
+ pshufd $78,%xmm2,%xmm2
- pshufd $0b11111111,%xmm2,%xmm4
+ pshufd $255,%xmm2,%xmm4
movdqa %xmm2,%xmm3
psllq $1,%xmm2
pxor %xmm5,%xmm5
@@ -679,11 +679,11 @@ gcm_init_clmul:
pxor %xmm5,%xmm2
- pshufd $0b01001110,%xmm2,%xmm6
+ pshufd $78,%xmm2,%xmm6
movdqa %xmm2,%xmm0
pxor %xmm2,%xmm6
movdqa %xmm0,%xmm1
- pshufd $0b01001110,%xmm0,%xmm3
+ pshufd $78,%xmm0,%xmm3
pxor %xmm0,%xmm3
.byte 102,15,58,68,194,0
.byte 102,15,58,68,202,17
@@ -719,8 +719,8 @@ gcm_init_clmul:
pxor %xmm4,%xmm0
psrlq $1,%xmm0
pxor %xmm1,%xmm0
- pshufd $0b01001110,%xmm2,%xmm3
- pshufd $0b01001110,%xmm0,%xmm4
+ pshufd $78,%xmm2,%xmm3
+ pshufd $78,%xmm0,%xmm4
pxor %xmm2,%xmm3
movdqu %xmm2,0(%rdi)
pxor %xmm0,%xmm4
@@ -728,7 +728,7 @@ gcm_init_clmul:
.byte 102,15,58,15,227,8
movdqu %xmm4,32(%rdi)
movdqa %xmm0,%xmm1
- pshufd $0b01001110,%xmm0,%xmm3
+ pshufd $78,%xmm0,%xmm3
pxor %xmm0,%xmm3
.byte 102,15,58,68,194,0
.byte 102,15,58,68,202,17
@@ -766,7 +766,7 @@ gcm_init_clmul:
pxor %xmm1,%xmm0
movdqa %xmm0,%xmm5
movdqa %xmm0,%xmm1
- pshufd $0b01001110,%xmm0,%xmm3
+ pshufd $78,%xmm0,%xmm3
pxor %xmm0,%xmm3
.byte 102,15,58,68,194,0
.byte 102,15,58,68,202,17
@@ -802,8 +802,8 @@ gcm_init_clmul:
pxor %xmm4,%xmm0
psrlq $1,%xmm0
pxor %xmm1,%xmm0
- pshufd $0b01001110,%xmm5,%xmm3
- pshufd $0b01001110,%xmm0,%xmm4
+ pshufd $78,%xmm5,%xmm3
+ pshufd $78,%xmm0,%xmm4
pxor %xmm5,%xmm3
movdqu %xmm5,48(%rdi)
pxor %xmm0,%xmm4
@@ -823,7 +823,7 @@ gcm_gmult_clmul:
movdqu 32(%rsi),%xmm4
.byte 102,15,56,0,197
movdqa %xmm0,%xmm1
- pshufd $0b01001110,%xmm0,%xmm3
+ pshufd $78,%xmm0,%xmm3
pxor %xmm0,%xmm3
.byte 102,15,58,68,194,0
.byte 102,15,58,68,202,17
@@ -900,14 +900,14 @@ gcm_ghash_clmul:
.byte 102,65,15,56,0,218
.byte 102,69,15,56,0,218
movdqa %xmm3,%xmm5
- pshufd $0b01001110,%xmm3,%xmm4
+ pshufd $78,%xmm3,%xmm4
pxor %xmm3,%xmm4
.byte 102,15,58,68,218,0
.byte 102,15,58,68,234,17
.byte 102,15,58,68,231,0
movdqa %xmm11,%xmm13
- pshufd $0b01001110,%xmm11,%xmm12
+ pshufd $78,%xmm11,%xmm12
pxor %xmm11,%xmm12
.byte 102,68,15,58,68,222,0
.byte 102,68,15,58,68,238,17
@@ -922,12 +922,12 @@ gcm_ghash_clmul:
.byte 102,69,15,56,0,218
.byte 102,69,15,56,0,194
movdqa %xmm11,%xmm13
- pshufd $0b01001110,%xmm11,%xmm12
+ pshufd $78,%xmm11,%xmm12
pxor %xmm8,%xmm0
pxor %xmm11,%xmm12
.byte 102,69,15,58,68,222,0
movdqa %xmm0,%xmm1
- pshufd $0b01001110,%xmm0,%xmm8
+ pshufd $78,%xmm0,%xmm8
pxor %xmm0,%xmm8
.byte 102,69,15,58,68,238,17
.byte 102,68,15,58,68,231,0
@@ -950,14 +950,14 @@ gcm_ghash_clmul:
movdqu 32(%rdx),%xmm3
movdqa %xmm11,%xmm13
.byte 102,68,15,58,68,199,16
- pshufd $0b01001110,%xmm11,%xmm12
+ pshufd $78,%xmm11,%xmm12
xorps %xmm5,%xmm1
pxor %xmm11,%xmm12
.byte 102,65,15,56,0,218
movups 32(%rsi),%xmm7
xorps %xmm4,%xmm8
.byte 102,68,15,58,68,218,0
- pshufd $0b01001110,%xmm3,%xmm4
+ pshufd $78,%xmm3,%xmm4
pxor %xmm0,%xmm8
movdqa %xmm3,%xmm5
@@ -1001,7 +1001,7 @@ gcm_ghash_clmul:
movdqa %xmm11,%xmm13
pxor %xmm12,%xmm4
- pshufd $0b01001110,%xmm11,%xmm12
+ pshufd $78,%xmm11,%xmm12
pxor %xmm9,%xmm0
pxor %xmm8,%xmm1
pxor %xmm11,%xmm12
@@ -1011,7 +1011,7 @@ gcm_ghash_clmul:
movdqa %xmm0,%xmm1
.byte 102,69,15,58,68,238,17
xorps %xmm11,%xmm3
- pshufd $0b01001110,%xmm0,%xmm8
+ pshufd $78,%xmm0,%xmm8
pxor %xmm0,%xmm8
.byte 102,68,15,58,68,231,0
@@ -1080,7 +1080,7 @@ gcm_ghash_clmul:
pxor %xmm8,%xmm0
movdqa %xmm3,%xmm5
- pshufd $0b01001110,%xmm3,%xmm4
+ pshufd $78,%xmm3,%xmm4
pxor %xmm3,%xmm4
.byte 102,15,58,68,218,0
.byte 102,15,58,68,234,17
@@ -1097,7 +1097,7 @@ gcm_ghash_clmul:
.Lmod_loop:
movdqa %xmm0,%xmm1
movdqa %xmm4,%xmm8
- pshufd $0b01001110,%xmm0,%xmm4
+ pshufd $78,%xmm0,%xmm4
pxor %xmm0,%xmm4
.byte 102,15,58,68,198,0
@@ -1135,7 +1135,7 @@ gcm_ghash_clmul:
pslldq $8,%xmm0
psrldq $8,%xmm8
pxor %xmm9,%xmm0
- pshufd $0b01001110,%xmm5,%xmm4
+ pshufd $78,%xmm5,%xmm4
pxor %xmm8,%xmm1
pxor %xmm5,%xmm4
@@ -1157,7 +1157,7 @@ gcm_ghash_clmul:
.Leven_tail:
movdqa %xmm0,%xmm1
movdqa %xmm4,%xmm8
- pshufd $0b01001110,%xmm0,%xmm4
+ pshufd $78,%xmm0,%xmm4
pxor %xmm0,%xmm4
.byte 102,15,58,68,198,0
@@ -1205,7 +1205,7 @@ gcm_ghash_clmul:
.byte 102,69,15,56,0,194
pxor %xmm8,%xmm0
movdqa %xmm0,%xmm1
- pshufd $0b01001110,%xmm0,%xmm3
+ pshufd $78,%xmm0,%xmm3
pxor %xmm0,%xmm3
.byte 102,15,58,68,194,0
.byte 102,15,58,68,202,17
OpenPOWER on IntegriCloud