1 #if defined(__i386__) 2 .text 3 .align 64 4 .L_vpaes_consts: 5 .long 218628480,235210255,168496130,67568393 6 .long 252381056,17041926,33884169,51187212 7 .long 252645135,252645135,252645135,252645135 8 .long 1512730624,3266504856,1377990664,3401244816 9 .long 830229760,1275146365,2969422977,3447763452 10 .long 3411033600,2979783055,338359620,2782886510 11 .long 4209124096,907596821,221174255,1006095553 12 .long 191964160,3799684038,3164090317,1589111125 13 .long 182528256,1777043520,2877432650,3265356744 14 .long 1874708224,3503451415,3305285752,363511674 15 .long 1606117888,3487855781,1093350906,2384367825 16 .long 197121,67569157,134941193,202313229 17 .long 67569157,134941193,202313229,197121 18 .long 134941193,202313229,197121,67569157 19 .long 202313229,197121,67569157,134941193 20 .long 33619971,100992007,168364043,235736079 21 .long 235736079,33619971,100992007,168364043 22 .long 168364043,235736079,33619971,100992007 23 .long 100992007,168364043,235736079,33619971 24 .long 50462976,117835012,185207048,252579084 25 .long 252314880,51251460,117574920,184942860 26 .long 184682752,252054788,50987272,118359308 27 .long 118099200,185467140,251790600,50727180 28 .long 2946363062,528716217,1300004225,1881839624 29 .long 1532713819,1532713819,1532713819,1532713819 30 .long 3602276352,4288629033,3737020424,4153884961 31 .long 1354558464,32357713,2958822624,3775749553 32 .long 1201988352,132424512,1572796698,503232858 33 .long 2213177600,1597421020,4103937655,675398315 34 .long 2749646592,4273543773,1511898873,121693092 35 .long 3040248576,1103263732,2871565598,1608280554 36 .long 2236667136,2588920351,482954393,64377734 37 .long 3069987328,291237287,2117370568,3650299247 38 .long 533321216,3573750986,2572112006,1401264716 39 .long 1339849704,2721158661,548607111,3445553514 40 .long 2128193280,3054596040,2183486460,1257083700 41 .long 655635200,1165381986,3923443150,2344132524 42 .long 190078720,256924420,290342170,357187870 43 .long 1610966272,2263057382,4103205268,309794674 44 .long 2592527872,2233205587,1335446729,3402964816 45 .long 3973531904,3225098121,3002836325,1918774430 46 .long 3870401024,2102906079,2284471353,4117666579 47 .long 617007872,1021508343,366931923,691083277 48 .long 2528395776,3491914898,2968704004,1613121270 49 .long 3445188352,3247741094,844474987,4093578302 50 .long 651481088,1190302358,1689581232,574775300 51 .long 4289380608,206939853,2555985458,2489840491 52 .long 2130264064,327674451,3566485037,3349835193 53 .long 2470714624,316102159,3636825756,3393945945 54 .byte 86,101,99,116,111,114,32,80,101,114,109,117,116,97,116,105 55 .byte 111,110,32,65,69,83,32,102,111,114,32,120,56,54,47,83 56 .byte 83,83,69,51,44,32,77,105,107,101,32,72,97,109,98,117 57 .byte 114,103,32,40,83,116,97,110,102,111,114,100,32,85,110,105 58 .byte 118,101,114,115,105,116,121,41,0 59 .align 64 60 .hidden _vpaes_preheat 61 .type _vpaes_preheat,@function 62 .align 16 63 _vpaes_preheat: 64 addl (%esp),%ebp 65 movdqa -48(%ebp),%xmm7 66 movdqa -16(%ebp),%xmm6 67 ret 68 .size _vpaes_preheat,.-_vpaes_preheat 69 .hidden _vpaes_encrypt_core 70 .type _vpaes_encrypt_core,@function 71 .align 16 72 _vpaes_encrypt_core: 73 movl $16,%ecx 74 movl 240(%edx),%eax 75 movdqa %xmm6,%xmm1 76 movdqa (%ebp),%xmm2 77 pandn %xmm0,%xmm1 78 pand %xmm6,%xmm0 79 movdqu (%edx),%xmm5 80 .byte 102,15,56,0,208 81 movdqa 16(%ebp),%xmm0 82 pxor %xmm5,%xmm2 83 psrld $4,%xmm1 84 addl $16,%edx 85 .byte 102,15,56,0,193 86 leal 192(%ebp),%ebx 87 pxor %xmm2,%xmm0 88 jmp .L000enc_entry 89 .align 16 90 .L001enc_loop: 91 movdqa 32(%ebp),%xmm4 92 movdqa 48(%ebp),%xmm0 93 .byte 102,15,56,0,226 94 .byte 102,15,56,0,195 95 pxor %xmm5,%xmm4 96 movdqa 64(%ebp),%xmm5 97 pxor %xmm4,%xmm0 98 movdqa -64(%ebx,%ecx,1),%xmm1 99 .byte 102,15,56,0,234 100 movdqa 80(%ebp),%xmm2 101 movdqa (%ebx,%ecx,1),%xmm4 102 .byte 102,15,56,0,211 103 movdqa %xmm0,%xmm3 104 pxor %xmm5,%xmm2 105 .byte 102,15,56,0,193 106 addl $16,%edx 107 pxor %xmm2,%xmm0 108 .byte 102,15,56,0,220 109 addl $16,%ecx 110 pxor %xmm0,%xmm3 111 .byte 102,15,56,0,193 112 andl $48,%ecx 113 subl $1,%eax 114 pxor %xmm3,%xmm0 115 .L000enc_entry: 116 movdqa %xmm6,%xmm1 117 movdqa -32(%ebp),%xmm5 118 pandn %xmm0,%xmm1 119 psrld $4,%xmm1 120 pand %xmm6,%xmm0 121 .byte 102,15,56,0,232 122 movdqa %xmm7,%xmm3 123 pxor %xmm1,%xmm0 124 .byte 102,15,56,0,217 125 movdqa %xmm7,%xmm4 126 pxor %xmm5,%xmm3 127 .byte 102,15,56,0,224 128 movdqa %xmm7,%xmm2 129 pxor %xmm5,%xmm4 130 .byte 102,15,56,0,211 131 movdqa %xmm7,%xmm3 132 pxor %xmm0,%xmm2 133 .byte 102,15,56,0,220 134 movdqu (%edx),%xmm5 135 pxor %xmm1,%xmm3 136 jnz .L001enc_loop 137 movdqa 96(%ebp),%xmm4 138 movdqa 112(%ebp),%xmm0 139 .byte 102,15,56,0,226 140 pxor %xmm5,%xmm4 141 .byte 102,15,56,0,195 142 movdqa 64(%ebx,%ecx,1),%xmm1 143 pxor %xmm4,%xmm0 144 .byte 102,15,56,0,193 145 ret 146 .size _vpaes_encrypt_core,.-_vpaes_encrypt_core 147 .hidden _vpaes_decrypt_core 148 .type _vpaes_decrypt_core,@function 149 .align 16 150 _vpaes_decrypt_core: 151 leal 608(%ebp),%ebx 152 movl 240(%edx),%eax 153 movdqa %xmm6,%xmm1 154 movdqa -64(%ebx),%xmm2 155 pandn %xmm0,%xmm1 156 movl %eax,%ecx 157 psrld $4,%xmm1 158 movdqu (%edx),%xmm5 159 shll $4,%ecx 160 pand %xmm6,%xmm0 161 .byte 102,15,56,0,208 162 movdqa -48(%ebx),%xmm0 163 xorl $48,%ecx 164 .byte 102,15,56,0,193 165 andl $48,%ecx 166 pxor %xmm5,%xmm2 167 movdqa 176(%ebp),%xmm5 168 pxor %xmm2,%xmm0 169 addl $16,%edx 170 leal -352(%ebx,%ecx,1),%ecx 171 jmp .L002dec_entry 172 .align 16 173 .L003dec_loop: 174 movdqa -32(%ebx),%xmm4 175 movdqa -16(%ebx),%xmm1 176 .byte 102,15,56,0,226 177 .byte 102,15,56,0,203 178 pxor %xmm4,%xmm0 179 movdqa (%ebx),%xmm4 180 pxor %xmm1,%xmm0 181 movdqa 16(%ebx),%xmm1 182 .byte 102,15,56,0,226 183 .byte 102,15,56,0,197 184 .byte 102,15,56,0,203 185 pxor %xmm4,%xmm0 186 movdqa 32(%ebx),%xmm4 187 pxor %xmm1,%xmm0 188 movdqa 48(%ebx),%xmm1 189 .byte 102,15,56,0,226 190 .byte 102,15,56,0,197 191 .byte 102,15,56,0,203 192 pxor %xmm4,%xmm0 193 movdqa 64(%ebx),%xmm4 194 pxor %xmm1,%xmm0 195 movdqa 80(%ebx),%xmm1 196 .byte 102,15,56,0,226 197 .byte 102,15,56,0,197 198 .byte 102,15,56,0,203 199 pxor %xmm4,%xmm0 200 addl $16,%edx 201 .byte 102,15,58,15,237,12 202 pxor %xmm1,%xmm0 203 subl $1,%eax 204 .L002dec_entry: 205 movdqa %xmm6,%xmm1 206 movdqa -32(%ebp),%xmm2 207 pandn %xmm0,%xmm1 208 pand %xmm6,%xmm0 209 psrld $4,%xmm1 210 .byte 102,15,56,0,208 211 movdqa %xmm7,%xmm3 212 pxor %xmm1,%xmm0 213 .byte 102,15,56,0,217 214 movdqa %xmm7,%xmm4 215 pxor %xmm2,%xmm3 216 .byte 102,15,56,0,224 217 pxor %xmm2,%xmm4 218 movdqa %xmm7,%xmm2 219 .byte 102,15,56,0,211 220 movdqa %xmm7,%xmm3 221 pxor %xmm0,%xmm2 222 .byte 102,15,56,0,220 223 movdqu (%edx),%xmm0 224 pxor %xmm1,%xmm3 225 jnz .L003dec_loop 226 movdqa 96(%ebx),%xmm4 227 .byte 102,15,56,0,226 228 pxor %xmm0,%xmm4 229 movdqa 112(%ebx),%xmm0 230 movdqa (%ecx),%xmm2 231 .byte 102,15,56,0,195 232 pxor %xmm4,%xmm0 233 .byte 102,15,56,0,194 234 ret 235 .size _vpaes_decrypt_core,.-_vpaes_decrypt_core 236 .hidden _vpaes_schedule_core 237 .type _vpaes_schedule_core,@function 238 .align 16 239 _vpaes_schedule_core: 240 addl (%esp),%ebp 241 movdqu (%esi),%xmm0 242 movdqa 320(%ebp),%xmm2 243 movdqa %xmm0,%xmm3 244 leal (%ebp),%ebx 245 movdqa %xmm2,4(%esp) 246 call _vpaes_schedule_transform 247 movdqa %xmm0,%xmm7 248 testl %edi,%edi 249 jnz .L004schedule_am_decrypting 250 movdqu %xmm0,(%edx) 251 jmp .L005schedule_go 252 .L004schedule_am_decrypting: 253 movdqa 256(%ebp,%ecx,1),%xmm1 254 .byte 102,15,56,0,217 255 movdqu %xmm3,(%edx) 256 xorl $48,%ecx 257 .L005schedule_go: 258 cmpl $192,%eax 259 ja .L006schedule_256 260 je .L007schedule_192 261 .L008schedule_128: 262 movl $10,%eax 263 .L009loop_schedule_128: 264 call _vpaes_schedule_round 265 decl %eax 266 jz .L010schedule_mangle_last 267 call _vpaes_schedule_mangle 268 jmp .L009loop_schedule_128 269 .align 16 270 .L007schedule_192: 271 movdqu 8(%esi),%xmm0 272 call _vpaes_schedule_transform 273 movdqa %xmm0,%xmm6 274 pxor %xmm4,%xmm4 275 movhlps %xmm4,%xmm6 276 movl $4,%eax 277 .L011loop_schedule_192: 278 call _vpaes_schedule_round 279 .byte 102,15,58,15,198,8 280 call _vpaes_schedule_mangle 281 call _vpaes_schedule_192_smear 282 call _vpaes_schedule_mangle 283 call _vpaes_schedule_round 284 decl %eax 285 jz .L010schedule_mangle_last 286 call _vpaes_schedule_mangle 287 call _vpaes_schedule_192_smear 288 jmp .L011loop_schedule_192 289 .align 16 290 .L006schedule_256: 291 movdqu 16(%esi),%xmm0 292 call _vpaes_schedule_transform 293 movl $7,%eax 294 .L012loop_schedule_256: 295 call _vpaes_schedule_mangle 296 movdqa %xmm0,%xmm6 297 call _vpaes_schedule_round 298 decl %eax 299 jz .L010schedule_mangle_last 300 call _vpaes_schedule_mangle 301 pshufd $255,%xmm0,%xmm0 302 movdqa %xmm7,20(%esp) 303 movdqa %xmm6,%xmm7 304 call .L_vpaes_schedule_low_round 305 movdqa 20(%esp),%xmm7 306 jmp .L012loop_schedule_256 307 .align 16 308 .L010schedule_mangle_last: 309 leal 384(%ebp),%ebx 310 testl %edi,%edi 311 jnz .L013schedule_mangle_last_dec 312 movdqa 256(%ebp,%ecx,1),%xmm1 313 .byte 102,15,56,0,193 314 leal 352(%ebp),%ebx 315 addl $32,%edx 316 .L013schedule_mangle_last_dec: 317 addl $-16,%edx 318 pxor 336(%ebp),%xmm0 319 call _vpaes_schedule_transform 320 movdqu %xmm0,(%edx) 321 pxor %xmm0,%xmm0 322 pxor %xmm1,%xmm1 323 pxor %xmm2,%xmm2 324 pxor %xmm3,%xmm3 325 pxor %xmm4,%xmm4 326 pxor %xmm5,%xmm5 327 pxor %xmm6,%xmm6 328 pxor %xmm7,%xmm7 329 ret 330 .size _vpaes_schedule_core,.-_vpaes_schedule_core 331 .hidden _vpaes_schedule_192_smear 332 .type _vpaes_schedule_192_smear,@function 333 .align 16 334 _vpaes_schedule_192_smear: 335 pshufd $128,%xmm6,%xmm1 336 pshufd $254,%xmm7,%xmm0 337 pxor %xmm1,%xmm6 338 pxor %xmm1,%xmm1 339 pxor %xmm0,%xmm6 340 movdqa %xmm6,%xmm0 341 movhlps %xmm1,%xmm6 342 ret 343 .size _vpaes_schedule_192_smear,.-_vpaes_schedule_192_smear 344 .hidden _vpaes_schedule_round 345 .type _vpaes_schedule_round,@function 346 .align 16 347 _vpaes_schedule_round: 348 movdqa 8(%esp),%xmm2 349 pxor %xmm1,%xmm1 350 .byte 102,15,58,15,202,15 351 .byte 102,15,58,15,210,15 352 pxor %xmm1,%xmm7 353 pshufd $255,%xmm0,%xmm0 354 .byte 102,15,58,15,192,1 355 movdqa %xmm2,8(%esp) 356 .L_vpaes_schedule_low_round: 357 movdqa %xmm7,%xmm1 358 pslldq $4,%xmm7 359 pxor %xmm1,%xmm7 360 movdqa %xmm7,%xmm1 361 pslldq $8,%xmm7 362 pxor %xmm1,%xmm7 363 pxor 336(%ebp),%xmm7 364 movdqa -16(%ebp),%xmm4 365 movdqa -48(%ebp),%xmm5 366 movdqa %xmm4,%xmm1 367 pandn %xmm0,%xmm1 368 psrld $4,%xmm1 369 pand %xmm4,%xmm0 370 movdqa -32(%ebp),%xmm2 371 .byte 102,15,56,0,208 372 pxor %xmm1,%xmm0 373 movdqa %xmm5,%xmm3 374 .byte 102,15,56,0,217 375 pxor %xmm2,%xmm3 376 movdqa %xmm5,%xmm4 377 .byte 102,15,56,0,224 378 pxor %xmm2,%xmm4 379 movdqa %xmm5,%xmm2 380 .byte 102,15,56,0,211 381 pxor %xmm0,%xmm2 382 movdqa %xmm5,%xmm3 383 .byte 102,15,56,0,220 384 pxor %xmm1,%xmm3 385 movdqa 32(%ebp),%xmm4 386 .byte 102,15,56,0,226 387 movdqa 48(%ebp),%xmm0 388 .byte 102,15,56,0,195 389 pxor %xmm4,%xmm0 390 pxor %xmm7,%xmm0 391 movdqa %xmm0,%xmm7 392 ret 393 .size _vpaes_schedule_round,.-_vpaes_schedule_round 394 .hidden _vpaes_schedule_transform 395 .type _vpaes_schedule_transform,@function 396 .align 16 397 _vpaes_schedule_transform: 398 movdqa -16(%ebp),%xmm2 399 movdqa %xmm2,%xmm1 400 pandn %xmm0,%xmm1 401 psrld $4,%xmm1 402 pand %xmm2,%xmm0 403 movdqa (%ebx),%xmm2 404 .byte 102,15,56,0,208 405 movdqa 16(%ebx),%xmm0 406 .byte 102,15,56,0,193 407 pxor %xmm2,%xmm0 408 ret 409 .size _vpaes_schedule_transform,.-_vpaes_schedule_transform 410 .hidden _vpaes_schedule_mangle 411 .type _vpaes_schedule_mangle,@function 412 .align 16 413 _vpaes_schedule_mangle: 414 movdqa %xmm0,%xmm4 415 movdqa 128(%ebp),%xmm5 416 testl %edi,%edi 417 jnz .L014schedule_mangle_dec 418 addl $16,%edx 419 pxor 336(%ebp),%xmm4 420 .byte 102,15,56,0,229 421 movdqa %xmm4,%xmm3 422 .byte 102,15,56,0,229 423 pxor %xmm4,%xmm3 424 .byte 102,15,56,0,229 425 pxor %xmm4,%xmm3 426 jmp .L015schedule_mangle_both 427 .align 16 428 .L014schedule_mangle_dec: 429 movdqa -16(%ebp),%xmm2 430 leal 416(%ebp),%esi 431 movdqa %xmm2,%xmm1 432 pandn %xmm4,%xmm1 433 psrld $4,%xmm1 434 pand %xmm2,%xmm4 435 movdqa (%esi),%xmm2 436 .byte 102,15,56,0,212 437 movdqa 16(%esi),%xmm3 438 .byte 102,15,56,0,217 439 pxor %xmm2,%xmm3 440 .byte 102,15,56,0,221 441 movdqa 32(%esi),%xmm2 442 .byte 102,15,56,0,212 443 pxor %xmm3,%xmm2 444 movdqa 48(%esi),%xmm3 445 .byte 102,15,56,0,217 446 pxor %xmm2,%xmm3 447 .byte 102,15,56,0,221 448 movdqa 64(%esi),%xmm2 449 .byte 102,15,56,0,212 450 pxor %xmm3,%xmm2 451 movdqa 80(%esi),%xmm3 452 .byte 102,15,56,0,217 453 pxor %xmm2,%xmm3 454 .byte 102,15,56,0,221 455 movdqa 96(%esi),%xmm2 456 .byte 102,15,56,0,212 457 pxor %xmm3,%xmm2 458 movdqa 112(%esi),%xmm3 459 .byte 102,15,56,0,217 460 pxor %xmm2,%xmm3 461 addl $-16,%edx 462 .L015schedule_mangle_both: 463 movdqa 256(%ebp,%ecx,1),%xmm1 464 .byte 102,15,56,0,217 465 addl $-16,%ecx 466 andl $48,%ecx 467 movdqu %xmm3,(%edx) 468 ret 469 .size _vpaes_schedule_mangle,.-_vpaes_schedule_mangle 470 .globl vpaes_set_encrypt_key 471 .hidden vpaes_set_encrypt_key 472 .type vpaes_set_encrypt_key,@function 473 .align 16 474 vpaes_set_encrypt_key: 475 .L_vpaes_set_encrypt_key_begin: 476 pushl %ebp 477 pushl %ebx 478 pushl %esi 479 pushl %edi 480 movl 20(%esp),%esi 481 leal -56(%esp),%ebx 482 movl 24(%esp),%eax 483 andl $-16,%ebx 484 movl 28(%esp),%edx 485 xchgl %esp,%ebx 486 movl %ebx,48(%esp) 487 movl %eax,%ebx 488 shrl $5,%ebx 489 addl $5,%ebx 490 movl %ebx,240(%edx) 491 movl $48,%ecx 492 movl $0,%edi 493 leal .L_vpaes_consts+0x30-.L016pic_point,%ebp 494 call _vpaes_schedule_core 495 .L016pic_point: 496 movl 48(%esp),%esp 497 xorl %eax,%eax 498 popl %edi 499 popl %esi 500 popl %ebx 501 popl %ebp 502 ret 503 .size vpaes_set_encrypt_key,.-.L_vpaes_set_encrypt_key_begin 504 .globl vpaes_set_decrypt_key 505 .hidden vpaes_set_decrypt_key 506 .type vpaes_set_decrypt_key,@function 507 .align 16 508 vpaes_set_decrypt_key: 509 .L_vpaes_set_decrypt_key_begin: 510 pushl %ebp 511 pushl %ebx 512 pushl %esi 513 pushl %edi 514 movl 20(%esp),%esi 515 leal -56(%esp),%ebx 516 movl 24(%esp),%eax 517 andl $-16,%ebx 518 movl 28(%esp),%edx 519 xchgl %esp,%ebx 520 movl %ebx,48(%esp) 521 movl %eax,%ebx 522 shrl $5,%ebx 523 addl $5,%ebx 524 movl %ebx,240(%edx) 525 shll $4,%ebx 526 leal 16(%edx,%ebx,1),%edx 527 movl $1,%edi 528 movl %eax,%ecx 529 shrl $1,%ecx 530 andl $32,%ecx 531 xorl $32,%ecx 532 leal .L_vpaes_consts+0x30-.L017pic_point,%ebp 533 call _vpaes_schedule_core 534 .L017pic_point: 535 movl 48(%esp),%esp 536 xorl %eax,%eax 537 popl %edi 538 popl %esi 539 popl %ebx 540 popl %ebp 541 ret 542 .size vpaes_set_decrypt_key,.-.L_vpaes_set_decrypt_key_begin 543 .globl vpaes_encrypt 544 .hidden vpaes_encrypt 545 .type vpaes_encrypt,@function 546 .align 16 547 vpaes_encrypt: 548 .L_vpaes_encrypt_begin: 549 pushl %ebp 550 pushl %ebx 551 pushl %esi 552 pushl %edi 553 leal .L_vpaes_consts+0x30-.L018pic_point,%ebp 554 call _vpaes_preheat 555 .L018pic_point: 556 movl 20(%esp),%esi 557 leal -56(%esp),%ebx 558 movl 24(%esp),%edi 559 andl $-16,%ebx 560 movl 28(%esp),%edx 561 xchgl %esp,%ebx 562 movl %ebx,48(%esp) 563 movdqu (%esi),%xmm0 564 call _vpaes_encrypt_core 565 movdqu %xmm0,(%edi) 566 movl 48(%esp),%esp 567 popl %edi 568 popl %esi 569 popl %ebx 570 popl %ebp 571 ret 572 .size vpaes_encrypt,.-.L_vpaes_encrypt_begin 573 .globl vpaes_decrypt 574 .hidden vpaes_decrypt 575 .type vpaes_decrypt,@function 576 .align 16 577 vpaes_decrypt: 578 .L_vpaes_decrypt_begin: 579 pushl %ebp 580 pushl %ebx 581 pushl %esi 582 pushl %edi 583 leal .L_vpaes_consts+0x30-.L019pic_point,%ebp 584 call _vpaes_preheat 585 .L019pic_point: 586 movl 20(%esp),%esi 587 leal -56(%esp),%ebx 588 movl 24(%esp),%edi 589 andl $-16,%ebx 590 movl 28(%esp),%edx 591 xchgl %esp,%ebx 592 movl %ebx,48(%esp) 593 movdqu (%esi),%xmm0 594 call _vpaes_decrypt_core 595 movdqu %xmm0,(%edi) 596 movl 48(%esp),%esp 597 popl %edi 598 popl %esi 599 popl %ebx 600 popl %ebp 601 ret 602 .size vpaes_decrypt,.-.L_vpaes_decrypt_begin 603 .globl vpaes_cbc_encrypt 604 .hidden vpaes_cbc_encrypt 605 .type vpaes_cbc_encrypt,@function 606 .align 16 607 vpaes_cbc_encrypt: 608 .L_vpaes_cbc_encrypt_begin: 609 pushl %ebp 610 pushl %ebx 611 pushl %esi 612 pushl %edi 613 movl 20(%esp),%esi 614 movl 24(%esp),%edi 615 movl 28(%esp),%eax 616 movl 32(%esp),%edx 617 subl $16,%eax 618 jc .L020cbc_abort 619 leal -56(%esp),%ebx 620 movl 36(%esp),%ebp 621 andl $-16,%ebx 622 movl 40(%esp),%ecx 623 xchgl %esp,%ebx 624 movdqu (%ebp),%xmm1 625 subl %esi,%edi 626 movl %ebx,48(%esp) 627 movl %edi,(%esp) 628 movl %edx,4(%esp) 629 movl %ebp,8(%esp) 630 movl %eax,%edi 631 leal .L_vpaes_consts+0x30-.L021pic_point,%ebp 632 call _vpaes_preheat 633 .L021pic_point: 634 cmpl $0,%ecx 635 je .L022cbc_dec_loop 636 jmp .L023cbc_enc_loop 637 .align 16 638 .L023cbc_enc_loop: 639 movdqu (%esi),%xmm0 640 pxor %xmm1,%xmm0 641 call _vpaes_encrypt_core 642 movl (%esp),%ebx 643 movl 4(%esp),%edx 644 movdqa %xmm0,%xmm1 645 movdqu %xmm0,(%ebx,%esi,1) 646 leal 16(%esi),%esi 647 subl $16,%edi 648 jnc .L023cbc_enc_loop 649 jmp .L024cbc_done 650 .align 16 651 .L022cbc_dec_loop: 652 movdqu (%esi),%xmm0 653 movdqa %xmm1,16(%esp) 654 movdqa %xmm0,32(%esp) 655 call _vpaes_decrypt_core 656 movl (%esp),%ebx 657 movl 4(%esp),%edx 658 pxor 16(%esp),%xmm0 659 movdqa 32(%esp),%xmm1 660 movdqu %xmm0,(%ebx,%esi,1) 661 leal 16(%esi),%esi 662 subl $16,%edi 663 jnc .L022cbc_dec_loop 664 .L024cbc_done: 665 movl 8(%esp),%ebx 666 movl 48(%esp),%esp 667 movdqu %xmm1,(%ebx) 668 .L020cbc_abort: 669 popl %edi 670 popl %esi 671 popl %ebx 672 popl %ebp 673 ret 674 .size vpaes_cbc_encrypt,.-.L_vpaes_cbc_encrypt_begin 675 #endif 676