123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381 |
- # This file is generated from a similarly-named Perl script in the BoringSSL
- # source tree. Do not edit by hand.
- #if defined(__i386__)
- #if defined(BORINGSSL_PREFIX)
- #include <boringssl_prefix_symbols_asm.h>
- #endif
- .text
- .align 6,0x90
- L_vpaes_consts:
- .long 218628480,235210255,168496130,67568393
- .long 252381056,17041926,33884169,51187212
- .long 252645135,252645135,252645135,252645135
- .long 1512730624,3266504856,1377990664,3401244816
- .long 830229760,1275146365,2969422977,3447763452
- .long 3411033600,2979783055,338359620,2782886510
- .long 4209124096,907596821,221174255,1006095553
- .long 191964160,3799684038,3164090317,1589111125
- .long 182528256,1777043520,2877432650,3265356744
- .long 1874708224,3503451415,3305285752,363511674
- .long 1606117888,3487855781,1093350906,2384367825
- .long 197121,67569157,134941193,202313229
- .long 67569157,134941193,202313229,197121
- .long 134941193,202313229,197121,67569157
- .long 202313229,197121,67569157,134941193
- .long 33619971,100992007,168364043,235736079
- .long 235736079,33619971,100992007,168364043
- .long 168364043,235736079,33619971,100992007
- .long 100992007,168364043,235736079,33619971
- .long 50462976,117835012,185207048,252579084
- .long 252314880,51251460,117574920,184942860
- .long 184682752,252054788,50987272,118359308
- .long 118099200,185467140,251790600,50727180
- .long 2946363062,528716217,1300004225,1881839624
- .long 1532713819,1532713819,1532713819,1532713819
- .long 3602276352,4288629033,3737020424,4153884961
- .long 1354558464,32357713,2958822624,3775749553
- .long 1201988352,132424512,1572796698,503232858
- .long 2213177600,1597421020,4103937655,675398315
- .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105
- .byte 111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83
- .byte 83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117
- .byte 114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105
- .byte 118,101,114,115,105,116,121,41,0
- .align 6,0x90
- .private_extern __vpaes_preheat
- .align 4
- __vpaes_preheat:
- addl (%esp),%ebp
- movdqa -48(%ebp),%xmm7
- movdqa -16(%ebp),%xmm6
- ret
- .private_extern __vpaes_encrypt_core
- .align 4
- __vpaes_encrypt_core:
- movl $16,%ecx
- movl 240(%edx),%eax
- movdqa %xmm6,%xmm1
- movdqa (%ebp),%xmm2
- pandn %xmm0,%xmm1
- pand %xmm6,%xmm0
- movdqu (%edx),%xmm5
- .byte 102,15,56,0,208
- movdqa 16(%ebp),%xmm0
- pxor %xmm5,%xmm2
- psrld $4,%xmm1
- addl $16,%edx
- .byte 102,15,56,0,193
- leal 192(%ebp),%ebx
- pxor %xmm2,%xmm0
- jmp L000enc_entry
- .align 4,0x90
- L001enc_loop:
- movdqa 32(%ebp),%xmm4
- movdqa 48(%ebp),%xmm0
- .byte 102,15,56,0,226
- .byte 102,15,56,0,195
- pxor %xmm5,%xmm4
- movdqa 64(%ebp),%xmm5
- pxor %xmm4,%xmm0
- movdqa -64(%ebx,%ecx,1),%xmm1
- .byte 102,15,56,0,234
- movdqa 80(%ebp),%xmm2
- movdqa (%ebx,%ecx,1),%xmm4
- .byte 102,15,56,0,211
- movdqa %xmm0,%xmm3
- pxor %xmm5,%xmm2
- .byte 102,15,56,0,193
- addl $16,%edx
- pxor %xmm2,%xmm0
- .byte 102,15,56,0,220
- addl $16,%ecx
- pxor %xmm0,%xmm3
- .byte 102,15,56,0,193
- andl $48,%ecx
- subl $1,%eax
- pxor %xmm3,%xmm0
- L000enc_entry:
- movdqa %xmm6,%xmm1
- movdqa -32(%ebp),%xmm5
- pandn %xmm0,%xmm1
- psrld $4,%xmm1
- pand %xmm6,%xmm0
- .byte 102,15,56,0,232
- movdqa %xmm7,%xmm3
- pxor %xmm1,%xmm0
- .byte 102,15,56,0,217
- movdqa %xmm7,%xmm4
- pxor %xmm5,%xmm3
- .byte 102,15,56,0,224
- movdqa %xmm7,%xmm2
- pxor %xmm5,%xmm4
- .byte 102,15,56,0,211
- movdqa %xmm7,%xmm3
- pxor %xmm0,%xmm2
- .byte 102,15,56,0,220
- movdqu (%edx),%xmm5
- pxor %xmm1,%xmm3
- jnz L001enc_loop
- movdqa 96(%ebp),%xmm4
- movdqa 112(%ebp),%xmm0
- .byte 102,15,56,0,226
- pxor %xmm5,%xmm4
- .byte 102,15,56,0,195
- movdqa 64(%ebx,%ecx,1),%xmm1
- pxor %xmm4,%xmm0
- .byte 102,15,56,0,193
- ret
- .private_extern __vpaes_schedule_core
- .align 4
- __vpaes_schedule_core:
- addl (%esp),%ebp
- movdqu (%esi),%xmm0
- movdqa 320(%ebp),%xmm2
- movdqa %xmm0,%xmm3
- leal (%ebp),%ebx
- movdqa %xmm2,4(%esp)
- call __vpaes_schedule_transform
- movdqa %xmm0,%xmm7
- testl %edi,%edi
- jnz L002schedule_am_decrypting
- movdqu %xmm0,(%edx)
- jmp L003schedule_go
- L002schedule_am_decrypting:
- movdqa 256(%ebp,%ecx,1),%xmm1
- .byte 102,15,56,0,217
- movdqu %xmm3,(%edx)
- xorl $48,%ecx
- L003schedule_go:
- cmpl $192,%eax
- ja L004schedule_256
- L005schedule_128:
- movl $10,%eax
- L006loop_schedule_128:
- call __vpaes_schedule_round
- decl %eax
- jz L007schedule_mangle_last
- call __vpaes_schedule_mangle
- jmp L006loop_schedule_128
- .align 4,0x90
- L004schedule_256:
- movdqu 16(%esi),%xmm0
- call __vpaes_schedule_transform
- movl $7,%eax
- L008loop_schedule_256:
- call __vpaes_schedule_mangle
- movdqa %xmm0,%xmm6
- call __vpaes_schedule_round
- decl %eax
- jz L007schedule_mangle_last
- call __vpaes_schedule_mangle
- pshufd $255,%xmm0,%xmm0
- movdqa %xmm7,20(%esp)
- movdqa %xmm6,%xmm7
- call L_vpaes_schedule_low_round
- movdqa 20(%esp),%xmm7
- jmp L008loop_schedule_256
- .align 4,0x90
- L007schedule_mangle_last:
- leal 384(%ebp),%ebx
- testl %edi,%edi
- jnz L009schedule_mangle_last_dec
- movdqa 256(%ebp,%ecx,1),%xmm1
- .byte 102,15,56,0,193
- leal 352(%ebp),%ebx
- addl $32,%edx
- L009schedule_mangle_last_dec:
- addl $-16,%edx
- pxor 336(%ebp),%xmm0
- call __vpaes_schedule_transform
- movdqu %xmm0,(%edx)
- pxor %xmm0,%xmm0
- pxor %xmm1,%xmm1
- pxor %xmm2,%xmm2
- pxor %xmm3,%xmm3
- pxor %xmm4,%xmm4
- pxor %xmm5,%xmm5
- pxor %xmm6,%xmm6
- pxor %xmm7,%xmm7
- ret
- .private_extern __vpaes_schedule_round
- .align 4
- __vpaes_schedule_round:
- movdqa 8(%esp),%xmm2
- pxor %xmm1,%xmm1
- .byte 102,15,58,15,202,15
- .byte 102,15,58,15,210,15
- pxor %xmm1,%xmm7
- pshufd $255,%xmm0,%xmm0
- .byte 102,15,58,15,192,1
- movdqa %xmm2,8(%esp)
- L_vpaes_schedule_low_round:
- movdqa %xmm7,%xmm1
- pslldq $4,%xmm7
- pxor %xmm1,%xmm7
- movdqa %xmm7,%xmm1
- pslldq $8,%xmm7
- pxor %xmm1,%xmm7
- pxor 336(%ebp),%xmm7
- movdqa -16(%ebp),%xmm4
- movdqa -48(%ebp),%xmm5
- movdqa %xmm4,%xmm1
- pandn %xmm0,%xmm1
- psrld $4,%xmm1
- pand %xmm4,%xmm0
- movdqa -32(%ebp),%xmm2
- .byte 102,15,56,0,208
- pxor %xmm1,%xmm0
- movdqa %xmm5,%xmm3
- .byte 102,15,56,0,217
- pxor %xmm2,%xmm3
- movdqa %xmm5,%xmm4
- .byte 102,15,56,0,224
- pxor %xmm2,%xmm4
- movdqa %xmm5,%xmm2
- .byte 102,15,56,0,211
- pxor %xmm0,%xmm2
- movdqa %xmm5,%xmm3
- .byte 102,15,56,0,220
- pxor %xmm1,%xmm3
- movdqa 32(%ebp),%xmm4
- .byte 102,15,56,0,226
- movdqa 48(%ebp),%xmm0
- .byte 102,15,56,0,195
- pxor %xmm4,%xmm0
- pxor %xmm7,%xmm0
- movdqa %xmm0,%xmm7
- ret
- .private_extern __vpaes_schedule_transform
- .align 4
- __vpaes_schedule_transform:
- movdqa -16(%ebp),%xmm2
- movdqa %xmm2,%xmm1
- pandn %xmm0,%xmm1
- psrld $4,%xmm1
- pand %xmm2,%xmm0
- movdqa (%ebx),%xmm2
- .byte 102,15,56,0,208
- movdqa 16(%ebx),%xmm0
- .byte 102,15,56,0,193
- pxor %xmm2,%xmm0
- ret
- .private_extern __vpaes_schedule_mangle
- .align 4
- __vpaes_schedule_mangle:
- movdqa %xmm0,%xmm4
- movdqa 128(%ebp),%xmm5
- testl %edi,%edi
- jnz L010schedule_mangle_dec
- addl $16,%edx
- pxor 336(%ebp),%xmm4
- .byte 102,15,56,0,229
- movdqa %xmm4,%xmm3
- .byte 102,15,56,0,229
- pxor %xmm4,%xmm3
- .byte 102,15,56,0,229
- pxor %xmm4,%xmm3
- jmp L011schedule_mangle_both
- .align 4,0x90
- L010schedule_mangle_dec:
- movdqa -16(%ebp),%xmm2
- leal (%ebp),%esi
- movdqa %xmm2,%xmm1
- pandn %xmm4,%xmm1
- psrld $4,%xmm1
- pand %xmm2,%xmm4
- movdqa (%esi),%xmm2
- .byte 102,15,56,0,212
- movdqa 16(%esi),%xmm3
- .byte 102,15,56,0,217
- pxor %xmm2,%xmm3
- .byte 102,15,56,0,221
- movdqa 32(%esi),%xmm2
- .byte 102,15,56,0,212
- pxor %xmm3,%xmm2
- movdqa 48(%esi),%xmm3
- .byte 102,15,56,0,217
- pxor %xmm2,%xmm3
- .byte 102,15,56,0,221
- movdqa 64(%esi),%xmm2
- .byte 102,15,56,0,212
- pxor %xmm3,%xmm2
- movdqa 80(%esi),%xmm3
- .byte 102,15,56,0,217
- pxor %xmm2,%xmm3
- .byte 102,15,56,0,221
- movdqa 96(%esi),%xmm2
- .byte 102,15,56,0,212
- pxor %xmm3,%xmm2
- movdqa 112(%esi),%xmm3
- .byte 102,15,56,0,217
- pxor %xmm2,%xmm3
- addl $-16,%edx
- L011schedule_mangle_both:
- movdqa 256(%ebp,%ecx,1),%xmm1
- .byte 102,15,56,0,217
- addl $-16,%ecx
- andl $48,%ecx
- movdqu %xmm3,(%edx)
- ret
- .globl _GFp_vpaes_set_encrypt_key
- .private_extern _GFp_vpaes_set_encrypt_key
- .align 4
- _GFp_vpaes_set_encrypt_key:
- L_GFp_vpaes_set_encrypt_key_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- movl 20(%esp),%esi
- leal -56(%esp),%ebx
- movl 24(%esp),%eax
- andl $-16,%ebx
- movl 28(%esp),%edx
- xchgl %esp,%ebx
- movl %ebx,48(%esp)
- movl %eax,%ebx
- shrl $5,%ebx
- addl $5,%ebx
- movl %ebx,240(%edx)
- movl $48,%ecx
- movl $0,%edi
- leal L_vpaes_consts+0x30-L012pic_point,%ebp
- call __vpaes_schedule_core
- L012pic_point:
- movl 48(%esp),%esp
- xorl %eax,%eax
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- .globl _GFp_vpaes_encrypt
- .private_extern _GFp_vpaes_encrypt
- .align 4
- _GFp_vpaes_encrypt:
- L_GFp_vpaes_encrypt_begin:
- pushl %ebp
- pushl %ebx
- pushl %esi
- pushl %edi
- leal L_vpaes_consts+0x30-L013pic_point,%ebp
- call __vpaes_preheat
- L013pic_point:
- movl 20(%esp),%esi
- leal -56(%esp),%ebx
- movl 24(%esp),%edi
- andl $-16,%ebx
- movl 28(%esp),%edx
- xchgl %esp,%ebx
- movl %ebx,48(%esp)
- movdqu (%esi),%xmm0
- call __vpaes_encrypt_core
- movdqu %xmm0,(%edi)
- movl 48(%esp),%esp
- popl %edi
- popl %esi
- popl %ebx
- popl %ebp
- ret
- #endif
|