diff --git a/sys/crypto/openssl/powerpc/aesp8-ppc.S b/sys/crypto/openssl/powerpc/aesp8-ppc.S index 21067ff2521c..d0fb0f70adbc 100644 --- a/sys/crypto/openssl/powerpc/aesp8-ppc.S +++ b/sys/crypto/openssl/powerpc/aesp8-ppc.S @@ -1,3642 +1,3687 @@ /* Do not modify. This file is auto-generated from aesp8-ppc.pl. */ .machine "any" .text .align 7 rcon: .byte 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00 .byte 0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00 .byte 0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c .byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 +.long 0x0f102132, 0x43546576, 0x8798a9ba, 0xcbdcedfe .Lconsts: mflr 0 bcl 20,31,$+4 mflr 6 - addi 6,6,-0x48 + addi 6,6,-0x58 mtlr 0 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .byte 65,69,83,32,102,111,114,32,80,111,119,101,114,73,83,65,32,50,46,48,55,44,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .globl aes_p8_set_encrypt_key .type aes_p8_set_encrypt_key,@function .align 5 aes_p8_set_encrypt_key: .Lset_encrypt_key: mflr 11 stw 11,4(1) li 6,-1 cmplwi 3,0 beq- .Lenc_key_abort cmplwi 5,0 beq- .Lenc_key_abort li 6,-2 cmpwi 4,128 blt- .Lenc_key_abort cmpwi 4,256 bgt- .Lenc_key_abort andi. 0,4,0x3f bne- .Lenc_key_abort lis 0,0xfff0 mfspr 12,256 mtspr 256,0 bl .Lconsts mtlr 11 neg 9,3 lvx 1,0,3 addi 3,3,15 lvsr 3,0,9 li 8,0x20 cmpwi 4,192 lvx 2,0,3 lvx 4,0,6 lvx 5,8,6 addi 6,6,0x10 vperm 1,1,2,3 li 7,8 vxor 0,0,0 mtctr 7 lvsr 8,0,5 vspltisb 9,-1 lvx 10,0,5 vperm 9,0,9,8 blt .Loop128 addi 3,3,8 beq .L192 addi 3,3,8 b .L256 .align 4 .Loop128: vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 bdnz .Loop128 lvx 4,0,6 vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vxor 1,1,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,0x50 li 8,10 b .Ldone .align 4 .L192: lvx 6,0,3 li 7,4 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 5,5,16 vperm 2,2,6,3 vspltisb 3,8 mtctr 7 vsububm 5,5,3 .Loop192: vperm 3,2,2,5 vsldoi 6,0,1,12 .long 0x10632509 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 7,0,2,8 vspltw 6,1,3 vxor 6,6,2 vsldoi 2,0,2,12 vadduwm 4,4,4 vxor 2,2,6 vxor 1,1,3 vxor 2,2,3 vsldoi 7,7,1,8 vperm 3,2,2,5 vsldoi 6,0,1,12 vperm 11,7,7,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vsldoi 7,1,2,8 vxor 1,1,6 vsldoi 6,0,6,12 vperm 11,7,7,8 vsel 7,10,11,9 vor 10,11,11 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 stvx 7,0,5 addi 5,5,16 vspltw 6,1,3 vxor 6,6,2 vsldoi 2,0,2,12 vadduwm 4,4,4 vxor 2,2,6 vxor 1,1,3 vxor 2,2,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,16 bdnz .Loop192 li 8,12 addi 5,5,0x20 b .Ldone .align 4 .L256: lvx 6,0,3 li 7,7 li 8,14 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 5,5,16 vperm 2,2,6,3 mtctr 7 .Loop256: vperm 3,2,2,5 vsldoi 6,0,1,12 vperm 11,2,2,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,16 bdz .Ldone vspltw 3,1,3 vsldoi 6,0,2,12 .long 0x106305C8 vxor 2,2,6 vsldoi 6,0,6,12 vxor 2,2,6 vsldoi 6,0,6,12 vxor 2,2,6 vxor 2,2,3 b .Loop256 .align 4 .Ldone: lvx 2,0,3 vsel 2,10,2,9 stvx 2,0,3 li 6,0 mtspr 256,12 stw 8,0(5) .Lenc_key_abort: mr 3,6 blr .long 0 .byte 0,12,0x14,1,0,0,3,0 .long 0 .size aes_p8_set_encrypt_key,.-aes_p8_set_encrypt_key .globl aes_p8_set_decrypt_key .type aes_p8_set_decrypt_key,@function .align 5 aes_p8_set_decrypt_key: stwu 1,-32(1) mflr 10 stw 10,32+4(1) bl .Lset_encrypt_key mtlr 10 cmpwi 3,0 bne- .Ldec_key_abort slwi 7,8,4 subi 3,5,240 srwi 8,8,1 add 5,3,7 mtctr 8 .Ldeckey: lwz 0, 0(3) lwz 6, 4(3) lwz 7, 8(3) lwz 8, 12(3) addi 3,3,16 lwz 9, 0(5) lwz 10,4(5) lwz 11,8(5) lwz 12,12(5) stw 0, 0(5) stw 6, 4(5) stw 7, 8(5) stw 8, 12(5) subi 5,5,16 stw 9, -16(3) stw 10,-12(3) stw 11,-8(3) stw 12,-4(3) bdnz .Ldeckey xor 3,3,3 .Ldec_key_abort: addi 1,1,32 blr .long 0 .byte 0,12,4,1,0x80,0,3,0 .long 0 .size aes_p8_set_decrypt_key,.-aes_p8_set_decrypt_key .globl aes_p8_encrypt .type aes_p8_encrypt,@function .align 5 aes_p8_encrypt: lwz 6,240(5) lis 0,0xfc00 mfspr 12,256 li 7,15 mtspr 256,0 lvx 0,0,3 neg 11,4 lvx 1,7,3 lvsl 2,0,3 lvsl 3,0,11 li 7,16 vperm 0,0,1,2 lvx 1,0,5 lvsl 5,0,5 srwi 6,6,1 lvx 2,7,5 addi 7,7,16 subi 6,6,1 vperm 1,1,2,5 vxor 0,0,1 lvx 1,7,5 addi 7,7,16 mtctr 6 .Loop_enc: vperm 2,2,1,5 .long 0x10001508 lvx 2,7,5 addi 7,7,16 vperm 1,1,2,5 .long 0x10000D08 lvx 1,7,5 addi 7,7,16 bdnz .Loop_enc vperm 2,2,1,5 .long 0x10001508 lvx 2,7,5 vperm 1,1,2,5 .long 0x10000D09 vspltisb 2,-1 vxor 1,1,1 li 7,15 vperm 2,1,2,3 lvx 1,0,4 vperm 0,0,0,3 vsel 1,1,0,2 lvx 4,7,4 stvx 1,0,4 vsel 0,0,4,2 stvx 0,7,4 mtspr 256,12 blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .long 0 .size aes_p8_encrypt,.-aes_p8_encrypt .globl aes_p8_decrypt .type aes_p8_decrypt,@function .align 5 aes_p8_decrypt: lwz 6,240(5) lis 0,0xfc00 mfspr 12,256 li 7,15 mtspr 256,0 lvx 0,0,3 neg 11,4 lvx 1,7,3 lvsl 2,0,3 lvsl 3,0,11 li 7,16 vperm 0,0,1,2 lvx 1,0,5 lvsl 5,0,5 srwi 6,6,1 lvx 2,7,5 addi 7,7,16 subi 6,6,1 vperm 1,1,2,5 vxor 0,0,1 lvx 1,7,5 addi 7,7,16 mtctr 6 .Loop_dec: vperm 2,2,1,5 .long 0x10001548 lvx 2,7,5 addi 7,7,16 vperm 1,1,2,5 .long 0x10000D48 lvx 1,7,5 addi 7,7,16 bdnz .Loop_dec vperm 2,2,1,5 .long 0x10001548 lvx 2,7,5 vperm 1,1,2,5 .long 0x10000D49 vspltisb 2,-1 vxor 1,1,1 li 7,15 vperm 2,1,2,3 lvx 1,0,4 vperm 0,0,0,3 vsel 1,1,0,2 lvx 4,7,4 stvx 1,0,4 vsel 0,0,4,2 stvx 0,7,4 mtspr 256,12 blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .long 0 .size aes_p8_decrypt,.-aes_p8_decrypt .globl aes_p8_cbc_encrypt .type aes_p8_cbc_encrypt,@function .align 5 aes_p8_cbc_encrypt: cmplwi 5,16 .long 0x4dc00020 cmpwi 8,0 lis 0,0xffe0 mfspr 12,256 mtspr 256,0 li 10,15 vxor 0,0,0 lvx 4,0,7 lvsl 6,0,7 lvx 5,10,7 vperm 4,4,5,6 neg 11,3 lvsl 10,0,6 lwz 9,240(6) lvsr 6,0,11 lvx 5,0,3 addi 3,3,15 lvsr 8,0,4 vspltisb 9,-1 lvx 7,0,4 vperm 9,0,9,8 srwi 9,9,1 li 10,16 subi 9,9,1 beq .Lcbc_dec .Lcbc_enc: vor 2,5,5 lvx 5,0,3 addi 3,3,16 mtctr 9 subi 5,5,16 lvx 0,0,6 vperm 2,2,5,6 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 vxor 2,2,0 lvx 0,10,6 addi 10,10,16 vxor 2,2,4 .Loop_cbc_enc: vperm 1,1,0,10 .long 0x10420D08 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 .long 0x10420508 lvx 0,10,6 addi 10,10,16 bdnz .Loop_cbc_enc vperm 1,1,0,10 .long 0x10420D08 lvx 1,10,6 li 10,16 vperm 0,0,1,10 .long 0x10820509 cmplwi 5,16 vperm 3,4,4,8 vsel 2,7,3,9 vor 7,3,3 stvx 2,0,4 addi 4,4,16 bge .Lcbc_enc b .Lcbc_done .align 4 .Lcbc_dec: cmplwi 5,128 bge _aesp8_cbc_decrypt8x vor 3,5,5 lvx 5,0,3 addi 3,3,16 mtctr 9 subi 5,5,16 lvx 0,0,6 vperm 3,3,5,6 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 vxor 2,3,0 lvx 0,10,6 addi 10,10,16 .Loop_cbc_dec: vperm 1,1,0,10 .long 0x10420D48 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 .long 0x10420548 lvx 0,10,6 addi 10,10,16 bdnz .Loop_cbc_dec vperm 1,1,0,10 .long 0x10420D48 lvx 1,10,6 li 10,16 vperm 0,0,1,10 .long 0x10420549 cmplwi 5,16 vxor 2,2,4 vor 4,3,3 vperm 3,2,2,8 vsel 2,7,3,9 vor 7,3,3 stvx 2,0,4 addi 4,4,16 bge .Lcbc_dec .Lcbc_done: addi 4,4,-1 lvx 2,0,4 vsel 2,7,2,9 stvx 2,0,4 neg 8,7 li 10,15 vxor 0,0,0 vspltisb 9,-1 lvsl 8,0,8 vperm 9,0,9,8 lvx 7,0,7 vperm 4,4,4,8 vsel 2,7,4,9 lvx 5,10,7 stvx 2,0,7 vsel 2,4,5,9 stvx 2,10,7 mtspr 256,12 blr .long 0 .byte 0,12,0x14,0,0,0,6,0 .long 0 .align 5 _aesp8_cbc_decrypt8x: stwu 1,-392(1) li 10,175 li 11,191 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 li 0,-1 stw 12,364(1) li 8,0x10 stw 26,368(1) li 26,0x20 stw 27,372(1) li 27,0x30 stw 28,376(1) li 28,0x40 stw 29,380(1) li 29,0x50 stw 30,384(1) li 30,0x60 stw 31,388(1) li 31,0x70 mtspr 256,0 subi 9,9,3 subi 5,5,128 lvx 23,0,6 lvx 30,8,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,23,30,10 addi 11,1,32+15 mtctr 9 .Load_cbc_dec_key: vperm 24,30,31,10 lvx 30,8,6 addi 6,6,0x20 stvx 24,0,11 vperm 25,31,30,10 lvx 31,0,6 stvx 25,8,11 addi 11,11,0x20 bdnz .Load_cbc_dec_key lvx 26,8,6 vperm 24,30,31,10 lvx 27,26,6 stvx 24,0,11 vperm 25,31,26,10 lvx 28,27,6 stvx 25,8,11 addi 11,1,32+15 vperm 26,26,27,10 lvx 29,28,6 vperm 27,27,28,10 lvx 30,29,6 vperm 28,28,29,10 lvx 31,30,6 vperm 29,29,30,10 lvx 14,31,6 vperm 30,30,31,10 lvx 24,0,11 vperm 31,31,14,10 lvx 25,8,11 subi 3,3,15 .long 0x7C001E99 .long 0x7C281E99 .long 0x7C5A1E99 .long 0x7C7B1E99 .long 0x7D5C1E99 vxor 14,0,23 .long 0x7D7D1E99 vxor 15,1,23 .long 0x7D9E1E99 vxor 16,2,23 .long 0x7DBF1E99 addi 3,3,0x80 vxor 17,3,23 vxor 18,10,23 vxor 19,11,23 vxor 20,12,23 vxor 21,13,23 mtctr 9 b .Loop_cbc_dec8x .align 5 .Loop_cbc_dec8x: .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 lvx 24,26,11 addi 11,11,0x20 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 lvx 25,8,11 bdnz .Loop_cbc_dec8x subic 5,5,128 .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 subfe. 0,0,0 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 and 0,0,5 .long 0x11CED548 .long 0x11EFD548 .long 0x1210D548 .long 0x1231D548 .long 0x1252D548 .long 0x1273D548 .long 0x1294D548 .long 0x12B5D548 add 3,3,0 .long 0x11CEDD48 .long 0x11EFDD48 .long 0x1210DD48 .long 0x1231DD48 .long 0x1252DD48 .long 0x1273DD48 .long 0x1294DD48 .long 0x12B5DD48 addi 11,1,32+15 .long 0x11CEE548 .long 0x11EFE548 .long 0x1210E548 .long 0x1231E548 .long 0x1252E548 .long 0x1273E548 .long 0x1294E548 .long 0x12B5E548 lvx 24,0,11 .long 0x11CEED48 .long 0x11EFED48 .long 0x1210ED48 .long 0x1231ED48 .long 0x1252ED48 .long 0x1273ED48 .long 0x1294ED48 .long 0x12B5ED48 lvx 25,8,11 .long 0x11CEF548 vxor 4,4,31 .long 0x11EFF548 vxor 0,0,31 .long 0x1210F548 vxor 1,1,31 .long 0x1231F548 vxor 2,2,31 .long 0x1252F548 vxor 3,3,31 .long 0x1273F548 vxor 10,10,31 .long 0x1294F548 vxor 11,11,31 .long 0x12B5F548 vxor 12,12,31 .long 0x11CE2549 .long 0x11EF0549 .long 0x7C001E99 .long 0x12100D49 .long 0x7C281E99 .long 0x12311549 .long 0x7C5A1E99 .long 0x12521D49 .long 0x7C7B1E99 .long 0x12735549 .long 0x7D5C1E99 .long 0x12945D49 .long 0x7D7D1E99 .long 0x12B56549 .long 0x7D9E1E99 vor 4,13,13 .long 0x7DBF1E99 addi 3,3,0x80 .long 0x7DC02799 vxor 14,0,23 .long 0x7DE82799 vxor 15,1,23 .long 0x7E1A2799 vxor 16,2,23 .long 0x7E3B2799 vxor 17,3,23 .long 0x7E5C2799 vxor 18,10,23 .long 0x7E7D2799 vxor 19,11,23 .long 0x7E9E2799 vxor 20,12,23 .long 0x7EBF2799 addi 4,4,0x80 vxor 21,13,23 mtctr 9 beq .Loop_cbc_dec8x addic. 5,5,128 beq .Lcbc_dec8x_done nop nop .Loop_cbc_dec8x_tail: .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 lvx 24,26,11 addi 11,11,0x20 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 lvx 25,8,11 bdnz .Loop_cbc_dec8x_tail .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 .long 0x11EFD548 .long 0x1210D548 .long 0x1231D548 .long 0x1252D548 .long 0x1273D548 .long 0x1294D548 .long 0x12B5D548 .long 0x11EFDD48 .long 0x1210DD48 .long 0x1231DD48 .long 0x1252DD48 .long 0x1273DD48 .long 0x1294DD48 .long 0x12B5DD48 .long 0x11EFE548 .long 0x1210E548 .long 0x1231E548 .long 0x1252E548 .long 0x1273E548 .long 0x1294E548 .long 0x12B5E548 .long 0x11EFED48 .long 0x1210ED48 .long 0x1231ED48 .long 0x1252ED48 .long 0x1273ED48 .long 0x1294ED48 .long 0x12B5ED48 .long 0x11EFF548 vxor 4,4,31 .long 0x1210F548 vxor 1,1,31 .long 0x1231F548 vxor 2,2,31 .long 0x1252F548 vxor 3,3,31 .long 0x1273F548 vxor 10,10,31 .long 0x1294F548 vxor 11,11,31 .long 0x12B5F548 vxor 12,12,31 cmplwi 5,32 blt .Lcbc_dec8x_one nop beq .Lcbc_dec8x_two cmplwi 5,64 blt .Lcbc_dec8x_three nop beq .Lcbc_dec8x_four cmplwi 5,96 blt .Lcbc_dec8x_five nop beq .Lcbc_dec8x_six .Lcbc_dec8x_seven: .long 0x11EF2549 .long 0x12100D49 .long 0x12311549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 .long 0x7E9D2799 .long 0x7EBE2799 addi 4,4,0x70 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_six: .long 0x12102549 .long 0x12311549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7E002799 .long 0x7E282799 .long 0x7E5A2799 .long 0x7E7B2799 .long 0x7E9C2799 .long 0x7EBD2799 addi 4,4,0x60 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_five: .long 0x12312549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7E202799 .long 0x7E482799 .long 0x7E7A2799 .long 0x7E9B2799 .long 0x7EBC2799 addi 4,4,0x50 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_four: .long 0x12522549 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7E402799 .long 0x7E682799 .long 0x7E9A2799 .long 0x7EBB2799 addi 4,4,0x40 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_three: .long 0x12732549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7E602799 .long 0x7E882799 .long 0x7EBA2799 addi 4,4,0x30 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_two: .long 0x12942549 .long 0x12B56549 vor 4,13,13 .long 0x7E802799 .long 0x7EA82799 addi 4,4,0x20 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_one: .long 0x12B52549 vor 4,13,13 .long 0x7EA02799 addi 4,4,0x10 .Lcbc_dec8x_done: .long 0x7C803F99 li 10,47 li 11,63 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 mtspr 256,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 lwz 26,368(1) lwz 27,372(1) lwz 28,376(1) lwz 29,380(1) lwz 30,384(1) lwz 31,388(1) addi 1,1,392 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_cbc_encrypt,.-aes_p8_cbc_encrypt .globl aes_p8_ctr32_encrypt_blocks .type aes_p8_ctr32_encrypt_blocks,@function .align 5 aes_p8_ctr32_encrypt_blocks: cmplwi 5,1 .long 0x4dc00020 lis 0,0xfff0 mfspr 12,256 mtspr 256,0 li 10,15 vxor 0,0,0 lvx 4,0,7 lvsl 6,0,7 lvx 5,10,7 vspltisb 11,1 vperm 4,4,5,6 vsldoi 11,0,11,1 neg 11,3 lvsl 10,0,6 lwz 9,240(6) lvsr 6,0,11 lvx 5,0,3 addi 3,3,15 srwi 9,9,1 li 10,16 subi 9,9,1 cmplwi 5,8 bge _aesp8_ctr32_encrypt8x lvsr 8,0,4 vspltisb 9,-1 lvx 7,0,4 vperm 9,0,9,8 lvx 0,0,6 mtctr 9 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 vxor 2,4,0 lvx 0,10,6 addi 10,10,16 b .Loop_ctr32_enc .align 5 .Loop_ctr32_enc: vperm 1,1,0,10 .long 0x10420D08 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 .long 0x10420508 lvx 0,10,6 addi 10,10,16 bdnz .Loop_ctr32_enc vadduwm 4,4,11 vor 3,5,5 lvx 5,0,3 addi 3,3,16 subic. 5,5,1 vperm 1,1,0,10 .long 0x10420D08 lvx 1,10,6 vperm 3,3,5,6 li 10,16 vperm 1,0,1,10 lvx 0,0,6 vxor 3,3,1 .long 0x10421D09 lvx 1,10,6 addi 10,10,16 vperm 2,2,2,8 vsel 3,7,2,9 mtctr 9 vperm 0,0,1,10 vor 7,2,2 vxor 2,4,0 lvx 0,10,6 addi 10,10,16 stvx 3,0,4 addi 4,4,16 bne .Loop_ctr32_enc addi 4,4,-1 lvx 2,0,4 vsel 2,7,2,9 stvx 2,0,4 mtspr 256,12 blr .long 0 .byte 0,12,0x14,0,0,0,6,0 .long 0 .align 5 _aesp8_ctr32_encrypt8x: stwu 1,-392(1) li 10,175 li 11,191 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 li 0,-1 stw 12,364(1) li 8,0x10 stw 26,368(1) li 26,0x20 stw 27,372(1) li 27,0x30 stw 28,376(1) li 28,0x40 stw 29,380(1) li 29,0x50 stw 30,384(1) li 30,0x60 stw 31,388(1) li 31,0x70 mtspr 256,0 subi 9,9,3 lvx 23,0,6 lvx 30,8,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,23,30,10 addi 11,1,32+15 mtctr 9 .Load_ctr32_enc_key: vperm 24,30,31,10 lvx 30,8,6 addi 6,6,0x20 stvx 24,0,11 vperm 25,31,30,10 lvx 31,0,6 stvx 25,8,11 addi 11,11,0x20 bdnz .Load_ctr32_enc_key lvx 26,8,6 vperm 24,30,31,10 lvx 27,26,6 stvx 24,0,11 vperm 25,31,26,10 lvx 28,27,6 stvx 25,8,11 addi 11,1,32+15 vperm 26,26,27,10 lvx 29,28,6 vperm 27,27,28,10 lvx 30,29,6 vperm 28,28,29,10 lvx 31,30,6 vperm 29,29,30,10 lvx 15,31,6 vperm 30,30,31,10 lvx 24,0,11 vperm 31,31,15,10 lvx 25,8,11 vadduwm 7,11,11 subi 3,3,15 slwi 5,5,4 vadduwm 16,4,11 vadduwm 17,4,7 vxor 15,4,23 vadduwm 18,16,7 vxor 16,16,23 vadduwm 19,17,7 vxor 17,17,23 vadduwm 20,18,7 vxor 18,18,23 vadduwm 21,19,7 vxor 19,19,23 vadduwm 22,20,7 vxor 20,20,23 vadduwm 4,21,7 vxor 21,21,23 vxor 22,22,23 mtctr 9 b .Loop_ctr32_enc8x .align 5 .Loop_ctr32_enc8x: .long 0x11EFC508 .long 0x1210C508 .long 0x1231C508 .long 0x1252C508 .long 0x1273C508 .long 0x1294C508 .long 0x12B5C508 .long 0x12D6C508 .Loop_ctr32_enc8x_middle: lvx 24,26,11 addi 11,11,0x20 .long 0x11EFCD08 .long 0x1210CD08 .long 0x1231CD08 .long 0x1252CD08 .long 0x1273CD08 .long 0x1294CD08 .long 0x12B5CD08 .long 0x12D6CD08 lvx 25,8,11 bdnz .Loop_ctr32_enc8x subic 11,5,256 .long 0x11EFC508 .long 0x1210C508 .long 0x1231C508 .long 0x1252C508 .long 0x1273C508 .long 0x1294C508 .long 0x12B5C508 .long 0x12D6C508 subfe 0,0,0 .long 0x11EFCD08 .long 0x1210CD08 .long 0x1231CD08 .long 0x1252CD08 .long 0x1273CD08 .long 0x1294CD08 .long 0x12B5CD08 .long 0x12D6CD08 and 0,0,11 addi 11,1,32+15 .long 0x11EFD508 .long 0x1210D508 .long 0x1231D508 .long 0x1252D508 .long 0x1273D508 .long 0x1294D508 .long 0x12B5D508 .long 0x12D6D508 lvx 24,0,11 subic 5,5,129 .long 0x11EFDD08 addi 5,5,1 .long 0x1210DD08 .long 0x1231DD08 .long 0x1252DD08 .long 0x1273DD08 .long 0x1294DD08 .long 0x12B5DD08 .long 0x12D6DD08 lvx 25,8,11 .long 0x11EFE508 .long 0x7C001E99 .long 0x1210E508 .long 0x7C281E99 .long 0x1231E508 .long 0x7C5A1E99 .long 0x1252E508 .long 0x7C7B1E99 .long 0x1273E508 .long 0x7D5C1E99 .long 0x1294E508 .long 0x7D9D1E99 .long 0x12B5E508 .long 0x7DBE1E99 .long 0x12D6E508 .long 0x7DDF1E99 addi 3,3,0x80 .long 0x11EFED08 .long 0x1210ED08 .long 0x1231ED08 .long 0x1252ED08 .long 0x1273ED08 .long 0x1294ED08 .long 0x12B5ED08 .long 0x12D6ED08 add 3,3,0 subfe. 0,0,0 .long 0x11EFF508 vxor 0,0,31 .long 0x1210F508 vxor 1,1,31 .long 0x1231F508 vxor 2,2,31 .long 0x1252F508 vxor 3,3,31 .long 0x1273F508 vxor 10,10,31 .long 0x1294F508 vxor 12,12,31 .long 0x12B5F508 vxor 13,13,31 .long 0x12D6F508 vxor 14,14,31 bne .Lctr32_enc8x_break .long 0x100F0509 .long 0x10300D09 vadduwm 16,4,11 .long 0x10511509 vadduwm 17,4,7 vxor 15,4,23 .long 0x10721D09 vadduwm 18,16,7 vxor 16,16,23 .long 0x11535509 vadduwm 19,17,7 vxor 17,17,23 .long 0x11946509 vadduwm 20,18,7 vxor 18,18,23 .long 0x11B56D09 vadduwm 21,19,7 vxor 19,19,23 .long 0x11D67509 vadduwm 22,20,7 vxor 20,20,23 vadduwm 4,21,7 vxor 21,21,23 vxor 22,22,23 mtctr 9 .long 0x11EFC508 .long 0x7C002799 .long 0x1210C508 .long 0x7C282799 .long 0x1231C508 .long 0x7C5A2799 .long 0x1252C508 .long 0x7C7B2799 .long 0x1273C508 .long 0x7D5C2799 .long 0x1294C508 .long 0x7D9D2799 .long 0x12B5C508 .long 0x7DBE2799 .long 0x12D6C508 .long 0x7DDF2799 addi 4,4,0x80 b .Loop_ctr32_enc8x_middle .align 5 .Lctr32_enc8x_break: cmpwi 5,-0x60 blt .Lctr32_enc8x_one nop beq .Lctr32_enc8x_two cmpwi 5,-0x40 blt .Lctr32_enc8x_three nop beq .Lctr32_enc8x_four cmpwi 5,-0x20 blt .Lctr32_enc8x_five nop beq .Lctr32_enc8x_six cmpwi 5,0x00 blt .Lctr32_enc8x_seven .Lctr32_enc8x_eight: .long 0x11EF0509 .long 0x12100D09 .long 0x12311509 .long 0x12521D09 .long 0x12735509 .long 0x12946509 .long 0x12B56D09 .long 0x12D67509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 .long 0x7E9D2799 .long 0x7EBE2799 .long 0x7EDF2799 addi 4,4,0x80 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_seven: .long 0x11EF0D09 .long 0x12101509 .long 0x12311D09 .long 0x12525509 .long 0x12736509 .long 0x12946D09 .long 0x12B57509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 .long 0x7E9D2799 .long 0x7EBE2799 addi 4,4,0x70 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_six: .long 0x11EF1509 .long 0x12101D09 .long 0x12315509 .long 0x12526509 .long 0x12736D09 .long 0x12947509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 .long 0x7E9D2799 addi 4,4,0x60 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_five: .long 0x11EF1D09 .long 0x12105509 .long 0x12316509 .long 0x12526D09 .long 0x12737509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 addi 4,4,0x50 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_four: .long 0x11EF5509 .long 0x12106509 .long 0x12316D09 .long 0x12527509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 addi 4,4,0x40 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_three: .long 0x11EF6509 .long 0x12106D09 .long 0x12317509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 addi 4,4,0x30 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_two: .long 0x11EF6D09 .long 0x12107509 .long 0x7DE02799 .long 0x7E082799 addi 4,4,0x20 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_one: .long 0x11EF7509 .long 0x7DE02799 addi 4,4,0x10 .Lctr32_enc8x_done: li 10,47 li 11,63 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 mtspr 256,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 lwz 26,368(1) lwz 27,372(1) lwz 28,376(1) lwz 29,380(1) lwz 30,384(1) lwz 31,388(1) addi 1,1,392 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_ctr32_encrypt_blocks,.-aes_p8_ctr32_encrypt_blocks .globl aes_p8_xts_encrypt .type aes_p8_xts_encrypt,@function .align 5 aes_p8_xts_encrypt: mr 10,3 li 3,-1 cmplwi 5,16 .long 0x4dc00020 lis 0,0xfff0 mfspr 12,256 li 11,0 mtspr 256,0 vspltisb 9,0x07 li 3,15 lvx 8,0,8 lvsl 5,0,8 lvx 4,3,8 vperm 8,8,4,5 neg 11,10 lvsr 5,0,11 lvx 2,0,10 addi 10,10,15 cmplwi 7,0 beq .Lxts_enc_no_key2 lvsl 7,0,7 lwz 9,240(7) srwi 9,9,1 subi 9,9,1 li 3,16 lvx 0,0,7 lvx 1,3,7 addi 3,3,16 vperm 0,0,1,7 vxor 8,8,0 lvx 0,3,7 addi 3,3,16 mtctr 9 .Ltweak_xts_enc: vperm 1,1,0,7 .long 0x11080D08 lvx 1,3,7 addi 3,3,16 vperm 0,0,1,7 .long 0x11080508 lvx 0,3,7 addi 3,3,16 bdnz .Ltweak_xts_enc vperm 1,1,0,7 .long 0x11080D08 lvx 1,3,7 vperm 0,0,1,7 .long 0x11080509 li 8,0 b .Lxts_enc .Lxts_enc_no_key2: li 3,-16 and 5,5,3 .Lxts_enc: lvx 4,0,10 addi 10,10,16 lvsl 7,0,6 lwz 9,240(6) srwi 9,9,1 subi 9,9,1 li 3,16 vslb 10,9,9 vor 10,10,9 vspltisb 11,1 vsldoi 10,10,11,15 cmplwi 5,96 bge _aesp8_xts_encrypt6x andi. 7,5,15 subic 0,5,32 subi 7,7,16 subfe 0,0,0 and 0,0,7 add 10,10,0 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,0,1,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 b .Loop_xts_enc .align 5 .Loop_xts_enc: vperm 1,1,0,7 .long 0x10420D08 lvx 1,3,6 addi 3,3,16 vperm 0,0,1,7 .long 0x10420508 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_enc vperm 1,1,0,7 .long 0x10420D08 lvx 1,3,6 li 3,16 vperm 0,0,1,7 vxor 0,0,8 .long 0x10620509 nop .long 0x7C602799 addi 4,4,16 subic. 5,5,16 beq .Lxts_enc_done vor 2,4,4 lvx 4,0,10 addi 10,10,16 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 subic 0,5,32 subfe 0,0,0 and 0,0,7 add 10,10,0 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 2,2,4,5 vperm 0,0,1,7 vxor 2,2,8 vxor 3,3,0 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmplwi 5,16 bge .Loop_xts_enc vxor 3,3,8 lvsr 5,0,5 vxor 4,4,4 vspltisb 11,-1 vperm 4,4,11,5 vsel 2,2,3,4 subi 11,4,17 subi 4,4,16 mtctr 5 li 5,16 .Loop_xts_enc_steal: lbzu 0,1(11) stb 0,16(11) bdnz .Loop_xts_enc_steal mtctr 9 b .Loop_xts_enc .Lxts_enc_done: cmplwi 8,0 beq .Lxts_enc_ret vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 .long 0x7D004799 .Lxts_enc_ret: mtspr 256,12 li 3,0 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_xts_encrypt,.-aes_p8_xts_encrypt .globl aes_p8_xts_decrypt .type aes_p8_xts_decrypt,@function .align 5 aes_p8_xts_decrypt: mr 10,3 li 3,-1 cmplwi 5,16 .long 0x4dc00020 lis 0,0xfff8 mfspr 12,256 li 11,0 mtspr 256,0 andi. 0,5,15 neg 0,0 andi. 0,0,16 sub 5,5,0 vspltisb 9,0x07 li 3,15 lvx 8,0,8 lvsl 5,0,8 lvx 4,3,8 vperm 8,8,4,5 neg 11,10 lvsr 5,0,11 lvx 2,0,10 addi 10,10,15 cmplwi 7,0 beq .Lxts_dec_no_key2 lvsl 7,0,7 lwz 9,240(7) srwi 9,9,1 subi 9,9,1 li 3,16 lvx 0,0,7 lvx 1,3,7 addi 3,3,16 vperm 0,0,1,7 vxor 8,8,0 lvx 0,3,7 addi 3,3,16 mtctr 9 .Ltweak_xts_dec: vperm 1,1,0,7 .long 0x11080D08 lvx 1,3,7 addi 3,3,16 vperm 0,0,1,7 .long 0x11080508 lvx 0,3,7 addi 3,3,16 bdnz .Ltweak_xts_dec vperm 1,1,0,7 .long 0x11080D08 lvx 1,3,7 vperm 0,0,1,7 .long 0x11080509 li 8,0 b .Lxts_dec .Lxts_dec_no_key2: neg 3,5 andi. 3,3,15 add 5,5,3 .Lxts_dec: lvx 4,0,10 addi 10,10,16 lvsl 7,0,6 lwz 9,240(6) srwi 9,9,1 subi 9,9,1 li 3,16 vslb 10,9,9 vor 10,10,9 vspltisb 11,1 vsldoi 10,10,11,15 cmplwi 5,96 bge _aesp8_xts_decrypt6x lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,0,1,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmplwi 5,16 blt .Ltail_xts_dec b .Loop_xts_dec .align 5 .Loop_xts_dec: vperm 1,1,0,7 .long 0x10420D48 lvx 1,3,6 addi 3,3,16 vperm 0,0,1,7 .long 0x10420548 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_dec vperm 1,1,0,7 .long 0x10420D48 lvx 1,3,6 li 3,16 vperm 0,0,1,7 vxor 0,0,8 .long 0x10620549 nop .long 0x7C602799 addi 4,4,16 subic. 5,5,16 beq .Lxts_dec_done vor 2,4,4 lvx 4,0,10 addi 10,10,16 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 2,2,4,5 vperm 0,0,1,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmplwi 5,16 bge .Loop_xts_dec .Ltail_xts_dec: vsrab 11,8,9 vaddubm 12,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 12,12,11 subi 10,10,16 add 10,10,5 vxor 2,2,8 vxor 2,2,12 .Loop_xts_dec_short: vperm 1,1,0,7 .long 0x10420D48 lvx 1,3,6 addi 3,3,16 vperm 0,0,1,7 .long 0x10420548 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_dec_short vperm 1,1,0,7 .long 0x10420D48 lvx 1,3,6 li 3,16 vperm 0,0,1,7 vxor 0,0,12 .long 0x10620549 nop .long 0x7C602799 vor 2,4,4 lvx 4,0,10 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,0,1,7 lvsr 5,0,5 vxor 4,4,4 vspltisb 11,-1 vperm 4,4,11,5 vsel 2,2,3,4 vxor 0,0,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 subi 11,4,1 mtctr 5 li 5,16 .Loop_xts_dec_steal: lbzu 0,1(11) stb 0,16(11) bdnz .Loop_xts_dec_steal mtctr 9 b .Loop_xts_dec .Lxts_dec_done: cmplwi 8,0 beq .Lxts_dec_ret vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 .long 0x7D004799 .Lxts_dec_ret: mtspr 256,12 li 3,0 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_xts_decrypt,.-aes_p8_xts_decrypt .align 5 _aesp8_xts_encrypt6x: stwu 1,-392(1) mflr 11 li 7,175 li 3,191 stw 11,396(1) stvx 20,7,1 addi 7,7,32 stvx 21,3,1 addi 3,3,32 stvx 22,7,1 addi 7,7,32 stvx 23,3,1 addi 3,3,32 stvx 24,7,1 addi 7,7,32 stvx 25,3,1 addi 3,3,32 stvx 26,7,1 addi 7,7,32 stvx 27,3,1 addi 3,3,32 stvx 28,7,1 addi 7,7,32 stvx 29,3,1 addi 3,3,32 stvx 30,7,1 stvx 31,3,1 li 0,-1 stw 12,364(1) li 3,0x10 stw 26,368(1) li 26,0x20 stw 27,372(1) li 27,0x30 stw 28,376(1) li 28,0x40 stw 29,380(1) li 29,0x50 stw 30,384(1) li 30,0x60 stw 31,388(1) li 31,0x70 mtspr 256,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 lvx 30,3,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,23,30,7 addi 7,1,32+15 mtctr 9 .Load_xts_enc_key: vperm 24,30,31,7 lvx 30,3,6 addi 6,6,0x20 stvx 24,0,7 vperm 25,31,30,7 lvx 31,0,6 stvx 25,3,7 addi 7,7,0x20 bdnz .Load_xts_enc_key lvx 26,3,6 vperm 24,30,31,7 lvx 27,26,6 stvx 24,0,7 vperm 25,31,26,7 lvx 28,27,6 stvx 25,3,7 addi 7,1,32+15 vperm 26,26,27,7 lvx 29,28,6 vperm 27,27,28,7 lvx 30,29,6 vperm 28,28,29,7 lvx 31,30,6 vperm 29,29,30,7 lvx 22,31,6 vperm 30,30,31,7 lvx 24,0,7 vperm 31,31,22,7 lvx 25,3,7 + + + + + + + + vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 b .Loop_xts_enc6x .align 5 .Loop_xts_enc6x: .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 .long 0x1210C508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 .long 0x1210CD08 lvx 25,3,7 bdnz .Loop_xts_enc6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C508 .long 0x118CC508 vsrab 11,8,9 vxor 17,8,23 vaddubm 8,8,8 .long 0x11ADC508 .long 0x11CEC508 - vsldoi 11,11,11,15 .long 0x11EFC508 .long 0x1210C508 subfe. 0,0,0 vand 11,11,10 .long 0x10E7CD08 .long 0x118CCD08 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD08 .long 0x11CECD08 vxor 1,18,31 vsrab 11,8,9 vxor 18,8,23 .long 0x11EFCD08 .long 0x1210CD08 and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D508 .long 0x118CD508 vand 11,11,10 .long 0x11ADD508 .long 0x11CED508 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD508 .long 0x1210D508 add 10,10,0 vxor 2,19,31 vsrab 11,8,9 vxor 19,8,23 vaddubm 8,8,8 .long 0x10E7DD08 .long 0x118CDD08 - vsldoi 11,11,11,15 .long 0x11ADDD08 .long 0x11CEDD08 vand 11,11,10 .long 0x11EFDD08 .long 0x1210DD08 addi 7,1,32+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E508 .long 0x118CE508 vxor 3,20,31 vsrab 11,8,9 vxor 20,8,23 .long 0x11ADE508 .long 0x11CEE508 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE508 .long 0x1210E508 lvx 24,0,7 vand 11,11,10 .long 0x10E7ED08 .long 0x118CED08 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED08 .long 0x11CEED08 vxor 4,21,31 vsrab 11,8,9 vxor 21,8,23 .long 0x11EFED08 .long 0x1210ED08 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F508 .long 0x118CF508 vand 11,11,10 .long 0x11ADF508 .long 0x11CEF508 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF508 .long 0x1210F508 vxor 5,22,31 vsrab 11,8,9 vxor 22,8,23 .long 0x10E70509 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D09 .long 0x7C235699 .long 0x11AD1509 .long 0x7C5A5699 vand 11,11,10 .long 0x11CE1D09 .long 0x7C7B5699 .long 0x11EF2509 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x11702D09 .long 0x7CBD5699 addi 10,10,0x60 .long 0x7CE02799 vxor 7,0,17 .long 0x7D832799 vxor 12,1,18 .long 0x7DBA2799 vxor 13,2,19 .long 0x7DDB2799 vxor 14,3,20 .long 0x7DFC2799 vxor 15,4,21 .long 0x7D7D2799 vxor 16,5,22 addi 4,4,0x60 mtctr 9 beq .Loop_xts_enc6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_enc6x_zero cmpwi 5,0x20 blt .Lxts_enc6x_one nop beq .Lxts_enc6x_two cmpwi 5,0x40 blt .Lxts_enc6x_three nop beq .Lxts_enc6x_four .Lxts_enc6x_five: vxor 7,1,17 vxor 12,2,18 vxor 13,3,19 vxor 14,4,20 vxor 15,5,21 bl _aesp8_xts_enc5x vor 17,22,22 .long 0x7CE02799 .long 0x7D832799 .long 0x7DBA2799 vxor 11,15,22 .long 0x7DDB2799 .long 0x7DFC2799 addi 4,4,0x50 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_four: vxor 7,2,17 vxor 12,3,18 vxor 13,4,19 vxor 14,5,20 vxor 15,15,15 bl _aesp8_xts_enc5x vor 17,21,21 .long 0x7CE02799 .long 0x7D832799 vxor 11,14,21 .long 0x7DBA2799 .long 0x7DDB2799 addi 4,4,0x40 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_three: vxor 7,3,17 vxor 12,4,18 vxor 13,5,19 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_enc5x vor 17,20,20 .long 0x7CE02799 vxor 11,13,20 .long 0x7D832799 .long 0x7DBA2799 addi 4,4,0x30 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_two: vxor 7,4,17 vxor 12,5,18 vxor 13,13,13 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_enc5x vor 17,19,19 vxor 11,12,19 .long 0x7CE02799 .long 0x7D832799 addi 4,4,0x20 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_one: vxor 7,5,17 nop .Loop_xts_enc1x: .long 0x10E7C508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 lvx 25,3,7 bdnz .Loop_xts_enc1x add 10,10,31 cmpwi 31,0 .long 0x10E7C508 subi 10,10,16 .long 0x10E7CD08 lvsr 5,0,31 .long 0x10E7D508 .long 0x7C005699 .long 0x10E7DD08 addi 7,1,32+15 .long 0x10E7E508 lvx 24,0,7 .long 0x10E7ED08 lvx 25,3,7 vxor 17,17,31 .long 0x10E7F508 vperm 0,0,0,5 .long 0x10E78D09 vor 17,18,18 vxor 11,7,18 .long 0x7CE02799 addi 4,4,0x10 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_zero: cmpwi 31,0 beq .Lxts_enc6x_done add 10,10,31 subi 10,10,16 .long 0x7C005699 lvsr 5,0,31 vperm 0,0,0,5 vxor 11,11,17 .Lxts_enc6x_steal: vxor 0,0,17 vxor 7,7,7 vspltisb 12,-1 vperm 7,7,12,5 vsel 7,0,11,7 subi 30,4,17 subi 4,4,16 mtctr 31 .Loop_xts_enc6x_steal: lbzu 0,1(30) stb 0,16(30) bdnz .Loop_xts_enc6x_steal li 31,0 mtctr 9 b .Loop_xts_enc1x .align 4 .Lxts_enc6x_done: cmplwi 8,0 beq .Lxts_enc6x_ret vxor 8,17,23 .long 0x7D004799 .Lxts_enc6x_ret: mtlr 11 li 10,47 li 11,63 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 mtspr 256,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 lwz 26,368(1) lwz 27,372(1) lwz 28,376(1) lwz 29,380(1) lwz 30,384(1) lwz 31,388(1) addi 1,1,392 blr .long 0 .byte 0,12,0x04,1,0x80,6,6,0 .long 0 .align 5 _aesp8_xts_enc5x: .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 lvx 25,3,7 bdnz _aesp8_xts_enc5x add 10,10,31 cmpwi 31,0 .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 subi 10,10,16 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 vxor 17,17,31 .long 0x10E7D508 lvsr 5,0,31 .long 0x118CD508 .long 0x11ADD508 .long 0x11CED508 .long 0x11EFD508 vxor 1,18,31 .long 0x10E7DD08 .long 0x7C005699 .long 0x118CDD08 .long 0x11ADDD08 .long 0x11CEDD08 .long 0x11EFDD08 vxor 2,19,31 addi 7,1,32+15 .long 0x10E7E508 .long 0x118CE508 .long 0x11ADE508 .long 0x11CEE508 .long 0x11EFE508 lvx 24,0,7 vxor 3,20,31 .long 0x10E7ED08 .long 0x118CED08 .long 0x11ADED08 .long 0x11CEED08 .long 0x11EFED08 lvx 25,3,7 vxor 4,21,31 .long 0x10E7F508 vperm 0,0,0,5 .long 0x118CF508 .long 0x11ADF508 .long 0x11CEF508 .long 0x11EFF508 .long 0x10E78D09 .long 0x118C0D09 .long 0x11AD1509 .long 0x11CE1D09 .long 0x11EF2509 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .align 5 _aesp8_xts_decrypt6x: stwu 1,-392(1) mflr 11 li 7,175 li 3,191 stw 11,396(1) stvx 20,7,1 addi 7,7,32 stvx 21,3,1 addi 3,3,32 stvx 22,7,1 addi 7,7,32 stvx 23,3,1 addi 3,3,32 stvx 24,7,1 addi 7,7,32 stvx 25,3,1 addi 3,3,32 stvx 26,7,1 addi 7,7,32 stvx 27,3,1 addi 3,3,32 stvx 28,7,1 addi 7,7,32 stvx 29,3,1 addi 3,3,32 stvx 30,7,1 stvx 31,3,1 li 0,-1 stw 12,364(1) li 3,0x10 stw 26,368(1) li 26,0x20 stw 27,372(1) li 27,0x30 stw 28,376(1) li 28,0x40 stw 29,380(1) li 29,0x50 stw 30,384(1) li 30,0x60 stw 31,388(1) li 31,0x70 mtspr 256,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 lvx 30,3,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,23,30,7 addi 7,1,32+15 mtctr 9 .Load_xts_dec_key: vperm 24,30,31,7 lvx 30,3,6 addi 6,6,0x20 stvx 24,0,7 vperm 25,31,30,7 lvx 31,0,6 stvx 25,3,7 addi 7,7,0x20 bdnz .Load_xts_dec_key lvx 26,3,6 vperm 24,30,31,7 lvx 27,26,6 stvx 24,0,7 vperm 25,31,26,7 lvx 28,27,6 stvx 25,3,7 addi 7,1,32+15 vperm 26,26,27,7 lvx 29,28,6 vperm 27,27,28,7 lvx 30,29,6 vperm 28,28,29,7 lvx 31,30,6 vperm 29,29,30,7 lvx 22,31,6 vperm 30,30,31,7 lvx 24,0,7 vperm 31,31,22,7 lvx 25,3,7 vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 b .Loop_xts_dec6x .align 5 .Loop_xts_dec6x: .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 lvx 25,3,7 bdnz .Loop_xts_dec6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C548 .long 0x118CC548 vsrab 11,8,9 vxor 17,8,23 vaddubm 8,8,8 .long 0x11ADC548 .long 0x11CEC548 - vsldoi 11,11,11,15 .long 0x11EFC548 .long 0x1210C548 subfe. 0,0,0 vand 11,11,10 .long 0x10E7CD48 .long 0x118CCD48 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD48 .long 0x11CECD48 vxor 1,18,31 vsrab 11,8,9 vxor 18,8,23 .long 0x11EFCD48 .long 0x1210CD48 and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D548 .long 0x118CD548 vand 11,11,10 .long 0x11ADD548 .long 0x11CED548 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD548 .long 0x1210D548 add 10,10,0 vxor 2,19,31 vsrab 11,8,9 vxor 19,8,23 vaddubm 8,8,8 .long 0x10E7DD48 .long 0x118CDD48 - vsldoi 11,11,11,15 .long 0x11ADDD48 .long 0x11CEDD48 vand 11,11,10 .long 0x11EFDD48 .long 0x1210DD48 addi 7,1,32+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E548 .long 0x118CE548 vxor 3,20,31 vsrab 11,8,9 vxor 20,8,23 .long 0x11ADE548 .long 0x11CEE548 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE548 .long 0x1210E548 lvx 24,0,7 vand 11,11,10 .long 0x10E7ED48 .long 0x118CED48 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED48 .long 0x11CEED48 vxor 4,21,31 vsrab 11,8,9 vxor 21,8,23 .long 0x11EFED48 .long 0x1210ED48 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F548 .long 0x118CF548 vand 11,11,10 .long 0x11ADF548 .long 0x11CEF548 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF548 .long 0x1210F548 vxor 5,22,31 vsrab 11,8,9 vxor 22,8,23 .long 0x10E70549 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D49 .long 0x7C235699 .long 0x11AD1549 .long 0x7C5A5699 vand 11,11,10 .long 0x11CE1D49 .long 0x7C7B5699 .long 0x11EF2549 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x12102D49 .long 0x7CBD5699 addi 10,10,0x60 .long 0x7CE02799 vxor 7,0,17 .long 0x7D832799 vxor 12,1,18 .long 0x7DBA2799 vxor 13,2,19 .long 0x7DDB2799 vxor 14,3,20 .long 0x7DFC2799 vxor 15,4,21 .long 0x7E1D2799 vxor 16,5,22 addi 4,4,0x60 mtctr 9 beq .Loop_xts_dec6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_dec6x_zero cmpwi 5,0x20 blt .Lxts_dec6x_one nop beq .Lxts_dec6x_two cmpwi 5,0x40 blt .Lxts_dec6x_three nop beq .Lxts_dec6x_four .Lxts_dec6x_five: vxor 7,1,17 vxor 12,2,18 vxor 13,3,19 vxor 14,4,20 vxor 15,5,21 bl _aesp8_xts_dec5x vor 17,22,22 vxor 18,8,23 .long 0x7CE02799 vxor 7,0,18 .long 0x7D832799 .long 0x7DBA2799 .long 0x7DDB2799 .long 0x7DFC2799 addi 4,4,0x50 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_four: vxor 7,2,17 vxor 12,3,18 vxor 13,4,19 vxor 14,5,20 vxor 15,15,15 bl _aesp8_xts_dec5x vor 17,21,21 vor 18,22,22 .long 0x7CE02799 vxor 7,0,22 .long 0x7D832799 .long 0x7DBA2799 .long 0x7DDB2799 addi 4,4,0x40 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_three: vxor 7,3,17 vxor 12,4,18 vxor 13,5,19 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_dec5x vor 17,20,20 vor 18,21,21 .long 0x7CE02799 vxor 7,0,21 .long 0x7D832799 .long 0x7DBA2799 addi 4,4,0x30 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_two: vxor 7,4,17 vxor 12,5,18 vxor 13,13,13 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_dec5x vor 17,19,19 vor 18,20,20 .long 0x7CE02799 vxor 7,0,20 .long 0x7D832799 addi 4,4,0x20 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_one: vxor 7,5,17 nop .Loop_xts_dec1x: .long 0x10E7C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 lvx 25,3,7 bdnz .Loop_xts_dec1x subi 0,31,1 .long 0x10E7C548 andi. 0,0,16 cmpwi 31,0 .long 0x10E7CD48 sub 10,10,0 .long 0x10E7D548 .long 0x7C005699 .long 0x10E7DD48 addi 7,1,32+15 .long 0x10E7E548 lvx 24,0,7 .long 0x10E7ED48 lvx 25,3,7 vxor 17,17,31 .long 0x10E7F548 mtctr 9 .long 0x10E78D49 vor 17,18,18 vor 18,19,19 .long 0x7CE02799 addi 4,4,0x10 vxor 7,0,19 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_zero: cmpwi 31,0 beq .Lxts_dec6x_done .long 0x7C005699 vxor 7,0,18 .Lxts_dec6x_steal: .long 0x10E7C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 lvx 25,3,7 bdnz .Lxts_dec6x_steal add 10,10,31 .long 0x10E7C548 cmpwi 31,0 .long 0x10E7CD48 .long 0x7C005699 .long 0x10E7D548 lvsr 5,0,31 .long 0x10E7DD48 addi 7,1,32+15 .long 0x10E7E548 lvx 24,0,7 .long 0x10E7ED48 lvx 25,3,7 vxor 18,18,31 .long 0x10E7F548 vperm 0,0,0,5 .long 0x11679549 .long 0x7D602799 vxor 7,7,7 vspltisb 12,-1 vperm 7,7,12,5 vsel 7,0,11,7 vxor 7,7,17 subi 30,4,1 mtctr 31 .Loop_xts_dec6x_steal: lbzu 0,1(30) stb 0,16(30) bdnz .Loop_xts_dec6x_steal li 31,0 mtctr 9 b .Loop_xts_dec1x .align 4 .Lxts_dec6x_done: cmplwi 8,0 beq .Lxts_dec6x_ret vxor 8,17,23 .long 0x7D004799 .Lxts_dec6x_ret: mtlr 11 li 10,47 li 11,63 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 mtspr 256,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 lwz 26,368(1) lwz 27,372(1) lwz 28,376(1) lwz 29,380(1) lwz 30,384(1) lwz 31,388(1) addi 1,1,392 blr .long 0 .byte 0,12,0x04,1,0x80,6,6,0 .long 0 .align 5 _aesp8_xts_dec5x: .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 lvx 25,3,7 bdnz _aesp8_xts_dec5x subi 0,31,1 .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 andi. 0,0,16 cmpwi 31,0 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 vxor 17,17,31 sub 10,10,0 .long 0x10E7D548 .long 0x118CD548 .long 0x11ADD548 .long 0x11CED548 .long 0x11EFD548 vxor 1,18,31 .long 0x10E7DD48 .long 0x7C005699 .long 0x118CDD48 .long 0x11ADDD48 .long 0x11CEDD48 .long 0x11EFDD48 vxor 2,19,31 addi 7,1,32+15 .long 0x10E7E548 .long 0x118CE548 .long 0x11ADE548 .long 0x11CEE548 .long 0x11EFE548 lvx 24,0,7 vxor 3,20,31 .long 0x10E7ED48 .long 0x118CED48 .long 0x11ADED48 .long 0x11CEED48 .long 0x11EFED48 lvx 25,3,7 vxor 4,21,31 .long 0x10E7F548 .long 0x118CF548 .long 0x11ADF548 .long 0x11CEF548 .long 0x11EFF548 .long 0x10E78D49 .long 0x118C0D49 .long 0x11AD1549 .long 0x11CE1D49 .long 0x11EF2549 mtctr 9 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 diff --git a/sys/crypto/openssl/powerpc/poly1305-ppc.S b/sys/crypto/openssl/powerpc/poly1305-ppc.S index d6fe34693724..41ef89e4f384 100644 --- a/sys/crypto/openssl/powerpc/poly1305-ppc.S +++ b/sys/crypto/openssl/powerpc/poly1305-ppc.S @@ -1,1301 +1,1301 @@ /* Do not modify. This file is auto-generated from poly1305-ppc.pl. */ .machine "any" .text .globl poly1305_init_int .type poly1305_init_int,@function .align 4 poly1305_init_int: xor 0,0,0 stw 0,0(3) stw 0,4(3) stw 0,8(3) stw 0,12(3) stw 0,16(3) stw 0,24(3) .long 0x7c040040 beq- .Lno_key li 8,4 lwbrx 7,0,4 li 9,8 lwbrx 8,8,4 li 10,12 lwbrx 9,9,4 lwbrx 10,10,4 lis 0,0xf000 li 12,-4 andc 12,12,0 andc 7,7,0 and 8,8,12 and 9,9,12 and 10,10,12 stw 7,32(3) stw 8,36(3) stw 9,40(3) stw 10,44(3) .Lno_key: xor 3,3,3 blr .long 0 .byte 0,12,0x14,0,0,0,2,0 .size poly1305_init_int,.-poly1305_init_int .globl poly1305_blocks .type poly1305_blocks,@function .align 4 poly1305_blocks: .Lpoly1305_blocks: srwi. 5,5,4 beq- .Labort stwu 1,-96(1) mflr 0 stw 14,24(1) stw 15,28(1) stw 16,32(1) stw 17,36(1) stw 18,40(1) stw 19,44(1) stw 20,48(1) stw 21,52(1) stw 22,56(1) stw 23,60(1) stw 24,64(1) stw 25,68(1) stw 26,72(1) stw 27,76(1) stw 28,80(1) stw 29,84(1) stw 30,88(1) stw 31,92(1) stw 0,100(1) lwz 12,32(3) lwz 14,36(3) lwz 15,40(3) lwz 16,44(3) lwz 7,0(3) lwz 8,4(3) lwz 9,8(3) lwz 10,12(3) lwz 11,16(3) srwi 17,14,2 srwi 18,15,2 srwi 19,16,2 add 17,17,14 add 18,18,15 add 19,19,16 mtctr 5 li 0,3 b .Loop .align 4 .Loop: li 29,4 lwbrx 28,0,4 li 30,8 lwbrx 29,29,4 li 31,12 lwbrx 30,30,4 lwbrx 31,31,4 addi 4,4,16 addc 7,7,28 adde 8,8,29 adde 9,9,30 mullw 28,7,12 mulhwu 24,7,12 mullw 29,7,14 mulhwu 25,7,14 mullw 30,7,15 mulhwu 26,7,15 adde 10,10,31 adde 11,11,6 mullw 31,7,16 mulhwu 27,7,16 mullw 20,8,19 mulhwu 21,8,19 mullw 22,8,12 mulhwu 23,8,12 addc 28,28,20 adde 24,24,21 mullw 20,8,14 mulhwu 21,8,14 addc 29,29,22 adde 25,25,23 mullw 22,8,15 mulhwu 23,8,15 addc 30,30,20 adde 26,26,21 mullw 20,9,18 mulhwu 21,9,18 addc 31,31,22 adde 27,27,23 mullw 22,9,19 mulhwu 23,9,19 addc 28,28,20 adde 24,24,21 mullw 20,9,12 mulhwu 21,9,12 addc 29,29,22 adde 25,25,23 mullw 22,9,14 mulhwu 23,9,14 addc 30,30,20 adde 26,26,21 mullw 20,10,17 mulhwu 21,10,17 addc 31,31,22 adde 27,27,23 mullw 22,10,18 mulhwu 23,10,18 addc 28,28,20 adde 24,24,21 mullw 20,10,19 mulhwu 21,10,19 addc 29,29,22 adde 25,25,23 mullw 22,10,12 mulhwu 23,10,12 addc 30,30,20 adde 26,26,21 mullw 20,11,17 addc 31,31,22 adde 27,27,23 addc 29,29,20 mullw 21,11,18 addze 25,25 addc 30,30,21 addze 26,26 mullw 22,11,19 addc 31,31,22 addze 27,27 mullw 11,11,12 addc 8,29,24 adde 9,30,25 adde 10,31,26 adde 11,11,27 andc 24,11,0 and 11,11,0 srwi 25,24,2 add 24,24,25 addc 7,28,24 addze 8,8 addze 9,9 addze 10,10 addze 11,11 bdnz .Loop stw 7,0(3) stw 8,4(3) stw 9,8(3) stw 10,12(3) stw 11,16(3) lwz 14,24(1) lwz 15,28(1) lwz 16,32(1) lwz 17,36(1) lwz 18,40(1) lwz 19,44(1) lwz 20,48(1) lwz 21,52(1) lwz 22,56(1) lwz 23,60(1) lwz 24,64(1) lwz 25,68(1) lwz 26,72(1) lwz 27,76(1) lwz 28,80(1) lwz 29,84(1) lwz 30,88(1) lwz 31,92(1) addi 1,1,96 .Labort: blr .long 0 .byte 0,12,4,1,0x80,18,4,0 .size poly1305_blocks,.-poly1305_blocks .globl poly1305_emit .type poly1305_emit,@function .align 5 poly1305_emit: lwz 0,24(3) lwz 6,0(3) lwz 7,4(3) lwz 8,8(3) lwz 9,12(3) lwz 10,16(3) cmplwi 0,0 beq .Lemit_base2_32 slwi 11,7,26 srwi 7,7,6 slwi 12,8,20 srwi 8,8,12 addc 6,6,11 slwi 11,9,14 srwi 9,9,18 adde 7,7,12 slwi 12,10,8 srwi 10,10,24 adde 8,8,11 adde 9,9,12 addze 10,10 .Lemit_base2_32: addic 0,6,5 addze 0,7 addze 0,8 addze 0,9 addze 0,10 srwi 0,0,2 neg 0,0 andi. 0,0,5 addc 6,6,0 lwz 0,0(5) addze 7,7 lwz 11,4(5) addze 8,8 lwz 12,8(5) addze 9,9 lwz 10,12(5) addc 6,6,0 adde 7,7,11 adde 8,8,12 adde 9,9,10 addi 3,4,-1 addi 4,4,7 stbu 6,1(3) srwi 6,6,8 stbu 8,1(4) srwi 8,8,8 stbu 6,1(3) srwi 6,6,8 stbu 8,1(4) srwi 8,8,8 stbu 6,1(3) srwi 6,6,8 stbu 8,1(4) srwi 8,8,8 stbu 6,1(3) stbu 8,1(4) stbu 7,1(3) srwi 7,7,8 stbu 9,1(4) srwi 9,9,8 stbu 7,1(3) srwi 7,7,8 stbu 9,1(4) srwi 9,9,8 stbu 7,1(3) srwi 7,7,8 stbu 9,1(4) srwi 9,9,8 stbu 7,1(3) stbu 9,1(4) blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .size poly1305_emit,.-poly1305_emit .globl poly1305_blocks_vsx .type poly1305_blocks_vsx,@function .align 5 poly1305_blocks_vsx: lwz 7,24(3) cmplwi 5,128 bge __poly1305_blocks_vsx cmplwi 7,0 beq .Lpoly1305_blocks lwz 7,0(3) lwz 8,4(3) lwz 9,8(3) lwz 10,12(3) lwz 11,16(3) slwi 0,8,26 srwi 8,8,6 slwi 12,9,20 srwi 9,9,12 addc 7,7,0 slwi 0,10,14 srwi 10,10,18 adde 8,8,12 slwi 12,11,8 srwi 11,11,24 adde 9,9,0 li 0,0 adde 10,10,12 addze 11,11 stw 7,0(3) stw 8,4(3) stw 9,8(3) stw 10,12(3) stw 11,16(3) stw 0,24(3) b .Lpoly1305_blocks .long 0 .byte 0,12,0x14,0,0,0,4,0 .size poly1305_blocks_vsx,.-poly1305_blocks_vsx .align 5 __poly1305_mul: .long 0x11E05088 .long 0x12015088 .long 0x12225088 .long 0x12435088 .long 0x12645088 .long 0x12846088 .long 0x11EFA0C0 .long 0x12805888 .long 0x1210A0C0 .long 0x12815888 .long 0x1231A0C0 .long 0x12825888 .long 0x1252A0C0 .long 0x12835888 .long 0x1273A0C0 .long 0x12837088 .long 0x11EFA0C0 .long 0x12847088 .long 0x1210A0C0 .long 0x12806888 .long 0x1231A0C0 .long 0x12816888 .long 0x1252A0C0 .long 0x12826888 .long 0x1273A0C0 .long 0x12823888 .long 0x11EFA0C0 .long 0x12833888 .long 0x1210A0C0 .long 0x12843888 .long 0x1231A0C0 .long 0x12803088 .long 0x1252A0C0 .long 0x12813088 .long 0x1273A0C0 .long 0x12814888 .long 0x11EFA0C0 .long 0x12824888 .long 0x1210A0C0 .long 0x12834888 .long 0x1231A0C0 .long 0x12844888 .long 0x1252A0C0 .long 0x12804088 .long 0x1273A0C0 vspltisb 20,2 .long 0x1092CEC4 .long 0x102FCEC4 vand 3,18,29 vand 0,15,29 .long 0x108498C0 .long 0x102180C0 .long 0x1264CEC4 .long 0x1201CEC4 vand 4,4,29 vand 1,1,29 .long 0x100098C0 .long 0x105180C0 .long 0x1273A5C4 .long 0x1222CEC4 vand 2,2,29 .long 0x100098C0 .long 0x106388C0 .long 0x11E0CEC4 .long 0x1243CEC4 vand 0,0,29 vand 3,3,29 .long 0x102178C0 .long 0x108490C0 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .size __poly1305_mul,.-__poly1305_mul .align 5 __poly1305_blocks_vsx: - stwu 1,-384(1) + stwu 1,-368(1) mflr 0 li 10,167 li 11,183 mfspr 12,256 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 - stvx 23,10,1 - addi 10,10,32 - stvx 24,11,1 + stvx 23,11,1 addi 11,11,32 - stvx 25,10,1 + stvx 24,10,1 addi 10,10,32 + stvx 25,11,1 + addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 - stw 12,360(1) + stw 12,344(1) li 12,-1 mtspr 256,12 - stw 27,364(1) - stw 28,368(1) - stw 29,372(1) - stw 30,376(1) - stw 31,380(1) - stw 0,388(1) + stw 27,348(1) + stw 28,352(1) + stw 29,356(1) + stw 30,360(1) + stw 31,364(1) + stw 0,372(1) bl .LPICmeup li 27,0x10 li 28,0x20 li 29,0x30 li 30,0x40 li 31,0x50 .long 0x7FA06699 .long 0x7F3B6699 .long 0x7F7C6699 .long 0x7FFD6699 .long 0x7FDE6699 cmplwi 7,0 bne .Lskip_init_vsx lwz 8,32(3) lwz 9,36(3) lwz 10,40(3) lwz 11,44(3) extrwi 7,8,26,6 extrwi 8,8,6,0 insrwi 8,9,20,6 extrwi 9,9,12,0 insrwi 9,10,14,6 extrwi 10,10,18,0 insrwi 10,11,8,6 extrwi 11,11,24,0 .long 0x7D4701E7 slwi 7,8,2 .long 0x7D6801E7 add 8,8,7 .long 0x7D8801E7 slwi 8,9,2 .long 0x7DA901E7 add 9,9,8 .long 0x7DC901E7 slwi 9,10,2 .long 0x7CCA01E7 add 10,10,9 .long 0x7CEA01E7 slwi 10,11,2 .long 0x7D0B01E7 add 11,11,10 .long 0x7D2B01E7 vor 0,10,10 vor 1,11,11 vor 2,13,13 vor 3,6,6 vor 4,8,8 bl __poly1305_mul .long 0xF1405057 .long 0xF1615857 .long 0xF1A26857 .long 0xF0C33057 .long 0xF1044057 .long 0xF0000057 .long 0xF0210857 .long 0xF0421057 .long 0xF0631857 .long 0xF0842057 .long 0x118BA5C4 .long 0x11CDA5C4 .long 0x10E6A5C4 .long 0x1128A5C4 .long 0x118C58C0 .long 0x11CE68C0 .long 0x10E730C0 .long 0x112940C0 bl __poly1305_mul addi 7,3,0x60 lwz 8,0(3) lwz 9,4(3) lwz 10,8(3) lwz 11,12(3) lwz 0,16(3) .long 0x114A068C .long 0x116B0E8C .long 0x11AD168C .long 0x10C61E8C .long 0x1108268C vslw 12,11,20 vslw 14,13,20 vslw 7,6,20 vslw 9,8,20 vadduwm 12,12,11 vadduwm 14,14,13 vadduwm 7,7,6 vadduwm 9,9,8 .long 0x7D5D1F99 .long 0x7D7E1F99 .long 0x7D9F1F99 .long 0x7DA03F99 .long 0x7DDB3F99 .long 0x7CDC3F99 .long 0x7CFD3F99 .long 0x7D1E3F99 .long 0x7D3F3F99 extrwi 7,8,26,6 extrwi 8,8,6,0 .long 0x7C0701E7 insrwi 8,9,20,6 extrwi 9,9,12,0 .long 0x7C2801E7 insrwi 9,10,14,6 extrwi 10,10,18,0 .long 0x7C4901E7 insrwi 10,11,8,6 extrwi 11,11,24,0 .long 0x7C6A01E7 insrwi 11,0,3,5 .long 0x7C8B01E7 li 0,1 stw 0,24(3) b .Loaded_vsx .align 4 .Lskip_init_vsx: li 27,4 li 28,8 li 29,12 li 30,16 .long 0x7C001819 .long 0x7C3B1819 .long 0x7C5C1819 .long 0x7C7D1819 .long 0x7C9E1819 .Loaded_vsx: li 27,0x10 li 28,0x20 li 29,0x30 li 30,0x40 li 31,0x50 li 7,0x60 li 8,0x70 addi 10,3,64 addi 11,1,39 vxor 20,20,20 .long 0xF000A057 .long 0xF021A057 .long 0xF042A057 .long 0xF063A057 .long 0xF084A057 .long 0x7F5F6699 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 vperm 21,21,21,26 vperm 22,22,22,26 vperm 23,23,23,26 vperm 24,24,24,26 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 vspltisb 28,14 .long 0xF115B357 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0xF2B7C057 vperm 22,23,24,31 .long 0xF2F7C357 .long 0x1295CEC4 .long 0x12D6D6C4 .long 0x1317DEC4 .long 0x12F7E6C4 vand 21,21,29 vand 20,20,29 vand 22,22,29 vand 23,23,29 .long 0x11384E8C .long 0x10B52E8C .long 0x10D4368C .long 0x10F63E8C .long 0x1117468C vor 9,9,30 .long 0x7D5D1A99 .long 0x7D605299 .long 0x7D9B5299 .long 0x7DBC5299 .long 0x7DDD5299 .long 0x7EBE5299 .long 0x7EDF5299 .long 0x7EE75299 .long 0x7F085299 stvx 11,0,11 stvx 12,27,11 stvx 13,28,11 stvx 14,29,11 stvx 21,30,11 stvx 22,31,11 stvx 23,7,11 stvx 24,8,11 addi 4,4,0x40 addi 12,12,0x50 addi 0,5,-64 srdi 0,0,6 mtctr 0 b .Loop_vsx .align 4 .Loop_vsx: .long 0x11E55288 .long 0x12055A88 .long 0x12256A88 .long 0x12466A88 .long 0x12865288 .long 0x1210A0C0 .long 0x12865A88 .long 0x1231A0C0 .long 0x12676A88 .long 0x12896288 .long 0x11EFA0C0 .long 0x12875A88 .long 0x1252A0C0 lvx 12,31,11 .long 0x12885A88 .long 0x1273A0C0 lvx 11,30,11 .long 0x104238C0 .long 0x100028C0 .long 0x106340C0 .long 0x102130C0 .long 0x108448C0 .long 0x12887288 .long 0x11EFA0C0 .long 0x12897288 .long 0x1210A0C0 .long 0x12875288 .long 0x1231A0C0 .long 0x12885288 .long 0x1252A0C0 lvx 14,8,11 .long 0x12895288 .long 0x1273A0C0 lvx 13,7,11 .long 0x12876288 .long 0x11EFA0C0 .long 0x12886288 .long 0x1210A0C0 .long 0x12896288 .long 0x1231A0C0 .long 0x12855A88 .long 0x1252A0C0 .long 0x12865A88 .long 0x1273A0C0 .long 0x7F406699 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 vperm 21,21,21,26 vperm 22,22,22,26 vperm 23,23,23,26 vperm 24,24,24,26 .long 0x12867288 .long 0x11EFA0C0 .long 0x12877288 .long 0x1210A0C0 .long 0x12887288 .long 0x1231A0C0 .long 0x12897288 .long 0x1252A0C0 .long 0x12856A88 .long 0x1273A0C0 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 .long 0xF115B357 .long 0x12805088 .long 0x11EFA0C0 .long 0x12815088 .long 0x1210A0C0 .long 0x12825088 .long 0x1231A0C0 .long 0x12835088 .long 0x1252A0C0 .long 0x12845088 .long 0x1273A0C0 .long 0xF2B7C057 vperm 22,23,24,31 .long 0xF2F7C357 .long 0x12826088 .long 0x11EFA0C0 .long 0x12836088 .long 0x1210A0C0 .long 0x12846088 .long 0x1231A0C0 .long 0x12805888 .long 0x1252A0C0 lvx 12,27,11 .long 0x12815888 .long 0x1273A0C0 lvx 11,0,11 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 .long 0x12817088 .long 0x11EFA0C0 .long 0x12827088 .long 0x1210A0C0 .long 0x12837088 .long 0x1231A0C0 .long 0x12847088 .long 0x1252A0C0 lvx 14,29,11 .long 0x12806888 .long 0x1273A0C0 lvx 13,28,11 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0x12846088 .long 0x11EFA0C0 .long 0x12805888 .long 0x1210A0C0 .long 0x12815888 .long 0x1231A0C0 .long 0x12825888 .long 0x1252A0C0 .long 0x12835888 .long 0x1273A0C0 .long 0x12D6D6C4 .long 0x1355CEC4 .long 0x1317DEC4 .long 0x12F7E6C4 .long 0x12837088 .long 0x11EFA0C0 .long 0x12847088 .long 0x1210A0C0 .long 0x12806888 .long 0x1231A0C0 .long 0x12816888 .long 0x1252A0C0 .long 0x12826888 .long 0x1273A0C0 vand 21,21,29 vand 26,26,29 vand 22,22,29 vand 23,23,29 vspltisb 20,2 .long 0x1092CEC4 .long 0x102FCEC4 vand 3,18,29 vand 0,15,29 .long 0x108498C0 .long 0x102180C0 .long 0x11384E8C .long 0x10B52E8C .long 0x10DA368C .long 0x10F63E8C .long 0x1117468C vor 9,9,30 .long 0x1264CEC4 .long 0x1201CEC4 vand 4,4,29 vand 1,1,29 .long 0x100098C0 .long 0x105180C0 .long 0x1273A5C4 .long 0x1222CEC4 vand 2,2,29 .long 0x100098C0 .long 0x106388C0 .long 0x11E0CEC4 .long 0x1243CEC4 vand 0,0,29 vand 3,3,29 .long 0x102178C0 .long 0x108490C0 addi 4,4,0x40 bdnz .Loop_vsx neg 5,5 andi. 5,5,0x30 sub 4,4,5 .long 0x7D5D1E99 .long 0x7D605699 .long 0x7D9B5699 .long 0x7DBC5699 .long 0x7DDD5699 .Last_vsx: .long 0x11E55288 .long 0x12065288 .long 0x12275288 .long 0x12485288 .long 0x12695288 .long 0x12896288 .long 0x11EFA0C0 .long 0x12855A88 .long 0x1210A0C0 .long 0x12865A88 .long 0x1231A0C0 .long 0x12875A88 .long 0x1252A0C0 .long 0x7D9F5699 .long 0x12885A88 .long 0x1273A0C0 .long 0x7D7E5699 .long 0x104238C0 .long 0x100028C0 .long 0x106340C0 .long 0x102130C0 .long 0x108448C0 .long 0x12887288 .long 0x11EFA0C0 .long 0x12897288 .long 0x1210A0C0 .long 0x12856A88 .long 0x1231A0C0 .long 0x12866A88 .long 0x1252A0C0 .long 0x7DC85699 .long 0x12876A88 .long 0x1273A0C0 .long 0x7DA75699 .long 0x12876288 .long 0x11EFA0C0 .long 0x12886288 .long 0x1210A0C0 .long 0x12896288 .long 0x1231A0C0 .long 0x12855A88 .long 0x1252A0C0 .long 0x12865A88 .long 0x1273A0C0 .long 0x12867288 .long 0x11EFA0C0 .long 0x12877288 .long 0x1210A0C0 .long 0x12887288 .long 0x1231A0C0 .long 0x12897288 .long 0x1252A0C0 .long 0x12856A88 .long 0x1273A0C0 .long 0x12805088 .long 0x11EFA0C0 .long 0x12815088 .long 0x1210A0C0 .long 0x12825088 .long 0x1231A0C0 .long 0x12835088 .long 0x1252A0C0 .long 0x12845088 .long 0x1273A0C0 .long 0x12826088 .long 0x11EFA0C0 .long 0x12836088 .long 0x1210A0C0 .long 0x12846088 .long 0x1231A0C0 .long 0x12805888 .long 0x1252A0C0 .long 0x7D9B5699 .long 0x12815888 .long 0x1273A0C0 .long 0x7D605699 .long 0x12817088 .long 0x11EFA0C0 .long 0x12827088 .long 0x1210A0C0 .long 0x12837088 .long 0x1231A0C0 .long 0x12847088 .long 0x1252A0C0 .long 0x7DDD5699 .long 0x12806888 .long 0x1273A0C0 .long 0x7DBC5699 .long 0x12846088 .long 0x11EFA0C0 .long 0x12805888 .long 0x1210A0C0 .long 0x12815888 .long 0x1231A0C0 .long 0x12825888 .long 0x1252A0C0 .long 0x12835888 .long 0x1273A0C0 .long 0x12837088 .long 0x11EFA0C0 .long 0x12847088 .long 0x1210A0C0 .long 0x12806888 .long 0x1231A0C0 .long 0x12816888 .long 0x1252A0C0 .long 0x12826888 .long 0x1273A0C0 .long 0xF00F7A57 .long 0xF0308257 .long 0xF0518A57 .long 0xF0729257 .long 0xF0939A57 .long 0x11EF00C0 .long 0x121008C0 .long 0x123110C0 .long 0x125218C0 .long 0x127320C0 vspltisb 20,2 .long 0x1092CEC4 .long 0x102FCEC4 vand 3,18,29 vand 0,15,29 .long 0x108498C0 .long 0x102180C0 .long 0x1264CEC4 .long 0x1201CEC4 vand 4,4,29 vand 1,1,29 .long 0x100098C0 .long 0x105180C0 .long 0x1273A5C4 .long 0x1222CEC4 vand 2,2,29 .long 0x100098C0 .long 0x106388C0 .long 0x11E0CEC4 .long 0x1243CEC4 vand 0,0,29 vand 3,3,29 .long 0x102178C0 .long 0x108490C0 beq .Ldone_vsx add 6,12,5 .long 0x7F406699 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 vperm 21,21,21,26 vperm 22,22,22,26 vperm 23,23,23,26 vperm 24,24,24,26 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 .long 0xF115B357 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0xF297C057 vperm 21,23,24,31 .long 0xF2D7C357 .long 0x7DE03699 .long 0x7E1D3699 .long 0x12F4CEC4 .long 0x12B5D6C4 .long 0x1316DEC4 .long 0x12D6E6C4 vand 20,20,29 vand 23,23,29 vand 21,21,29 vand 22,22,29 .long 0x11384E8C .long 0x10B42E8C .long 0x10D7368C .long 0x10F53E8C .long 0x1116468C vor 9,9,30 vperm 0,0,0,15 vand 5,5, 16 vperm 1,1,1,15 vand 6,6, 16 vperm 2,2,2,15 vand 7,7, 16 vperm 3,3,3,15 vand 8,8, 16 vperm 4,4,4,15 vand 9,9, 16 .long 0x10A500C0 vxor 0,0,0 .long 0x10C608C0 vxor 1,1,1 .long 0x10E710C0 vxor 2,2,2 .long 0x110818C0 vxor 3,3,3 .long 0x112920C0 vxor 4,4,4 xor. 5,5,5 b .Last_vsx .align 4 .Ldone_vsx: - lwz 0,388(1) + lwz 0,372(1) li 27,4 li 28,8 li 29,12 li 30,16 .long 0x7C001919 .long 0x7C3B1919 .long 0x7C5C1919 .long 0x7C7D1919 .long 0x7C9E1919 - lwz 12,360(1) + lwz 12,344(1) mtlr 0 li 10,167 li 11,183 mtspr 256,12 lvx 20,10,1 addi 10,10,32 - lvx 21,10,1 - addi 10,10,32 - lvx 22,11,1 + lvx 21,11,1 addi 11,11,32 - lvx 23,10,1 + lvx 22,10,1 addi 10,10,32 - lvx 24,11,1 + lvx 23,11,1 addi 11,11,32 - lvx 25,10,1 + lvx 24,10,1 addi 10,10,32 - lvx 26,11,1 + lvx 25,11,1 addi 11,11,32 - lvx 27,10,1 + lvx 26,10,1 addi 10,10,32 - lvx 28,11,1 + lvx 27,11,1 addi 11,11,32 - lvx 29,10,1 + lvx 28,10,1 addi 10,10,32 - lvx 30,11,1 - lvx 31,10,1 - lwz 27,364(1) - lwz 28,368(1) - lwz 29,372(1) - lwz 30,376(1) - lwz 31,380(1) - addi 1,1,384 + lvx 29,11,1 + addi 11,11,32 + lvx 30,10,1 + lvx 31,11,1 + lwz 27,348(1) + lwz 28,352(1) + lwz 29,356(1) + lwz 30,360(1) + lwz 31,364(1) + addi 1,1,368 blr .long 0 .byte 0,12,0x04,1,0x80,5,4,0 .long 0 .size __poly1305_blocks_vsx,.-__poly1305_blocks_vsx .align 6 .LPICmeup: mflr 0 bcl 20,31,$+4 mflr 12 addi 12,12,56 mtlr 0 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .space 28 .long 0x00000000,0x03ffffff .long 0x00000000,0x03ffffff .long 0x00000000,0x0000001a .long 0x00000000,0x0000001a .long 0x00000000,0x00000028 .long 0x00000000,0x00000028 .long 0x00000000,0x0e0f0001 .long 0x00000000,0x1e1f1011 .long 0x01000000,0x01000000 .long 0x01000000,0x01000000 .long 0x07060504,0x03020100 .long 0x0f0e0d0c,0x0b0a0908 .long 0x00000000,0x00000000 .long 0x00000000,0x04050607 .long 0x04050607,0x00000000 .long 0x00000000,0x00000000 .long 0x00000000,0x00000000 .long 0x04050607,0x00000000 .long 0xffffffff,0x00000000 .long 0xffffffff,0xffffffff .long 0xffffffff,0x00000000 .long 0xffffffff,0x00000000 .long 0x00000000,0x00000000 .long 0xffffffff,0x00000000 .byte 80,111,108,121,49,51,48,53,32,102,111,114,32,80,80,67,44,67,82,89,80,84,79,71,65,77,83,32,98,121,32,64,100,111,116,45,97,115,109,0 .align 2 diff --git a/sys/crypto/openssl/powerpc64/aesp8-ppc.S b/sys/crypto/openssl/powerpc64/aesp8-ppc.S index 5fdbf0552b26..f2d9eb3c4664 100644 --- a/sys/crypto/openssl/powerpc64/aesp8-ppc.S +++ b/sys/crypto/openssl/powerpc64/aesp8-ppc.S @@ -1,3659 +1,3704 @@ /* Do not modify. This file is auto-generated from aesp8-ppc.pl. */ .machine "any" .abiversion 2 .text .align 7 rcon: .byte 0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00 .byte 0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00 .byte 0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c,0x0d,0x0e,0x0f,0x0c .byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 +.long 0x0f102132, 0x43546576, 0x8798a9ba, 0xcbdcedfe .Lconsts: mflr 0 bcl 20,31,$+4 mflr 6 - addi 6,6,-0x48 + addi 6,6,-0x58 mtlr 0 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .byte 65,69,83,32,102,111,114,32,80,111,119,101,114,73,83,65,32,50,46,48,55,44,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .globl aes_p8_set_encrypt_key .type aes_p8_set_encrypt_key,@function .align 5 aes_p8_set_encrypt_key: .localentry aes_p8_set_encrypt_key,0 .Lset_encrypt_key: mflr 11 std 11,16(1) li 6,-1 cmpldi 3,0 beq- .Lenc_key_abort cmpldi 5,0 beq- .Lenc_key_abort li 6,-2 cmpwi 4,128 blt- .Lenc_key_abort cmpwi 4,256 bgt- .Lenc_key_abort andi. 0,4,0x3f bne- .Lenc_key_abort lis 0,0xfff0 li 12,-1 or 0,0,0 bl .Lconsts mtlr 11 neg 9,3 lvx 1,0,3 addi 3,3,15 lvsr 3,0,9 li 8,0x20 cmpwi 4,192 lvx 2,0,3 lvx 4,0,6 lvx 5,8,6 addi 6,6,0x10 vperm 1,1,2,3 li 7,8 vxor 0,0,0 mtctr 7 lvsr 8,0,5 vspltisb 9,-1 lvx 10,0,5 vperm 9,0,9,8 blt .Loop128 addi 3,3,8 beq .L192 addi 3,3,8 b .L256 .align 4 .Loop128: vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 bdnz .Loop128 lvx 4,0,6 vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vxor 1,1,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,0x50 li 8,10 b .Ldone .align 4 .L192: lvx 6,0,3 li 7,4 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 5,5,16 vperm 2,2,6,3 vspltisb 3,8 mtctr 7 vsububm 5,5,3 .Loop192: vperm 3,2,2,5 vsldoi 6,0,1,12 .long 0x10632509 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 7,0,2,8 vspltw 6,1,3 vxor 6,6,2 vsldoi 2,0,2,12 vadduwm 4,4,4 vxor 2,2,6 vxor 1,1,3 vxor 2,2,3 vsldoi 7,7,1,8 vperm 3,2,2,5 vsldoi 6,0,1,12 vperm 11,7,7,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vsldoi 7,1,2,8 vxor 1,1,6 vsldoi 6,0,6,12 vperm 11,7,7,8 vsel 7,10,11,9 vor 10,11,11 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 stvx 7,0,5 addi 5,5,16 vspltw 6,1,3 vxor 6,6,2 vsldoi 2,0,2,12 vadduwm 4,4,4 vxor 2,2,6 vxor 1,1,3 vxor 2,2,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,16 bdnz .Loop192 li 8,12 addi 5,5,0x20 b .Ldone .align 4 .L256: lvx 6,0,3 li 7,7 li 8,14 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 5,5,16 vperm 2,2,6,3 mtctr 7 .Loop256: vperm 3,2,2,5 vsldoi 6,0,1,12 vperm 11,2,2,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,16 bdz .Ldone vspltw 3,1,3 vsldoi 6,0,2,12 .long 0x106305C8 vxor 2,2,6 vsldoi 6,0,6,12 vxor 2,2,6 vsldoi 6,0,6,12 vxor 2,2,6 vxor 2,2,3 b .Loop256 .align 4 .Ldone: lvx 2,0,3 vsel 2,10,2,9 stvx 2,0,3 li 6,0 or 12,12,12 stw 8,0(5) .Lenc_key_abort: mr 3,6 blr .long 0 .byte 0,12,0x14,1,0,0,3,0 .long 0 .size aes_p8_set_encrypt_key,.-aes_p8_set_encrypt_key .globl aes_p8_set_decrypt_key .type aes_p8_set_decrypt_key,@function .align 5 aes_p8_set_decrypt_key: .localentry aes_p8_set_decrypt_key,0 stdu 1,-64(1) mflr 10 std 10,64+16(1) bl .Lset_encrypt_key mtlr 10 cmpwi 3,0 bne- .Ldec_key_abort slwi 7,8,4 subi 3,5,240 srwi 8,8,1 add 5,3,7 mtctr 8 .Ldeckey: lwz 0, 0(3) lwz 6, 4(3) lwz 7, 8(3) lwz 8, 12(3) addi 3,3,16 lwz 9, 0(5) lwz 10,4(5) lwz 11,8(5) lwz 12,12(5) stw 0, 0(5) stw 6, 4(5) stw 7, 8(5) stw 8, 12(5) subi 5,5,16 stw 9, -16(3) stw 10,-12(3) stw 11,-8(3) stw 12,-4(3) bdnz .Ldeckey xor 3,3,3 .Ldec_key_abort: addi 1,1,64 blr .long 0 .byte 0,12,4,1,0x80,0,3,0 .long 0 .size aes_p8_set_decrypt_key,.-aes_p8_set_decrypt_key .globl aes_p8_encrypt .type aes_p8_encrypt,@function .align 5 aes_p8_encrypt: .localentry aes_p8_encrypt,0 lwz 6,240(5) lis 0,0xfc00 li 12,-1 li 7,15 or 0,0,0 lvx 0,0,3 neg 11,4 lvx 1,7,3 lvsl 2,0,3 lvsl 3,0,11 li 7,16 vperm 0,0,1,2 lvx 1,0,5 lvsl 5,0,5 srwi 6,6,1 lvx 2,7,5 addi 7,7,16 subi 6,6,1 vperm 1,1,2,5 vxor 0,0,1 lvx 1,7,5 addi 7,7,16 mtctr 6 .Loop_enc: vperm 2,2,1,5 .long 0x10001508 lvx 2,7,5 addi 7,7,16 vperm 1,1,2,5 .long 0x10000D08 lvx 1,7,5 addi 7,7,16 bdnz .Loop_enc vperm 2,2,1,5 .long 0x10001508 lvx 2,7,5 vperm 1,1,2,5 .long 0x10000D09 vspltisb 2,-1 vxor 1,1,1 li 7,15 vperm 2,1,2,3 lvx 1,0,4 vperm 0,0,0,3 vsel 1,1,0,2 lvx 4,7,4 stvx 1,0,4 vsel 0,0,4,2 stvx 0,7,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .long 0 .size aes_p8_encrypt,.-aes_p8_encrypt .globl aes_p8_decrypt .type aes_p8_decrypt,@function .align 5 aes_p8_decrypt: .localentry aes_p8_decrypt,0 lwz 6,240(5) lis 0,0xfc00 li 12,-1 li 7,15 or 0,0,0 lvx 0,0,3 neg 11,4 lvx 1,7,3 lvsl 2,0,3 lvsl 3,0,11 li 7,16 vperm 0,0,1,2 lvx 1,0,5 lvsl 5,0,5 srwi 6,6,1 lvx 2,7,5 addi 7,7,16 subi 6,6,1 vperm 1,1,2,5 vxor 0,0,1 lvx 1,7,5 addi 7,7,16 mtctr 6 .Loop_dec: vperm 2,2,1,5 .long 0x10001548 lvx 2,7,5 addi 7,7,16 vperm 1,1,2,5 .long 0x10000D48 lvx 1,7,5 addi 7,7,16 bdnz .Loop_dec vperm 2,2,1,5 .long 0x10001548 lvx 2,7,5 vperm 1,1,2,5 .long 0x10000D49 vspltisb 2,-1 vxor 1,1,1 li 7,15 vperm 2,1,2,3 lvx 1,0,4 vperm 0,0,0,3 vsel 1,1,0,2 lvx 4,7,4 stvx 1,0,4 vsel 0,0,4,2 stvx 0,7,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .long 0 .size aes_p8_decrypt,.-aes_p8_decrypt .globl aes_p8_cbc_encrypt .type aes_p8_cbc_encrypt,@function .align 5 aes_p8_cbc_encrypt: .localentry aes_p8_cbc_encrypt,0 cmpldi 5,16 .long 0x4dc00020 cmpwi 8,0 lis 0,0xffe0 li 12,-1 or 0,0,0 li 10,15 vxor 0,0,0 lvx 4,0,7 lvsl 6,0,7 lvx 5,10,7 vperm 4,4,5,6 neg 11,3 lvsl 10,0,6 lwz 9,240(6) lvsr 6,0,11 lvx 5,0,3 addi 3,3,15 lvsr 8,0,4 vspltisb 9,-1 lvx 7,0,4 vperm 9,0,9,8 srwi 9,9,1 li 10,16 subi 9,9,1 beq .Lcbc_dec .Lcbc_enc: vor 2,5,5 lvx 5,0,3 addi 3,3,16 mtctr 9 subi 5,5,16 lvx 0,0,6 vperm 2,2,5,6 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 vxor 2,2,0 lvx 0,10,6 addi 10,10,16 vxor 2,2,4 .Loop_cbc_enc: vperm 1,1,0,10 .long 0x10420D08 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 .long 0x10420508 lvx 0,10,6 addi 10,10,16 bdnz .Loop_cbc_enc vperm 1,1,0,10 .long 0x10420D08 lvx 1,10,6 li 10,16 vperm 0,0,1,10 .long 0x10820509 cmpldi 5,16 vperm 3,4,4,8 vsel 2,7,3,9 vor 7,3,3 stvx 2,0,4 addi 4,4,16 bge .Lcbc_enc b .Lcbc_done .align 4 .Lcbc_dec: cmpldi 5,128 bge _aesp8_cbc_decrypt8x vor 3,5,5 lvx 5,0,3 addi 3,3,16 mtctr 9 subi 5,5,16 lvx 0,0,6 vperm 3,3,5,6 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 vxor 2,3,0 lvx 0,10,6 addi 10,10,16 .Loop_cbc_dec: vperm 1,1,0,10 .long 0x10420D48 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 .long 0x10420548 lvx 0,10,6 addi 10,10,16 bdnz .Loop_cbc_dec vperm 1,1,0,10 .long 0x10420D48 lvx 1,10,6 li 10,16 vperm 0,0,1,10 .long 0x10420549 cmpldi 5,16 vxor 2,2,4 vor 4,3,3 vperm 3,2,2,8 vsel 2,7,3,9 vor 7,3,3 stvx 2,0,4 addi 4,4,16 bge .Lcbc_dec .Lcbc_done: addi 4,4,-1 lvx 2,0,4 vsel 2,7,2,9 stvx 2,0,4 neg 8,7 li 10,15 vxor 0,0,0 vspltisb 9,-1 lvsl 8,0,8 vperm 9,0,9,8 lvx 7,0,7 vperm 4,4,4,8 vsel 2,7,4,9 lvx 5,10,7 stvx 2,0,7 vsel 2,4,5,9 stvx 2,10,7 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,6,0 .long 0 .align 5 _aesp8_cbc_decrypt8x: stdu 1,-448(1) li 10,207 li 11,223 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 li 0,-1 stw 12,396(1) li 8,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 subi 9,9,3 subi 5,5,128 lvx 23,0,6 lvx 30,8,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,23,30,10 addi 11,1,64+15 mtctr 9 .Load_cbc_dec_key: vperm 24,30,31,10 lvx 30,8,6 addi 6,6,0x20 stvx 24,0,11 vperm 25,31,30,10 lvx 31,0,6 stvx 25,8,11 addi 11,11,0x20 bdnz .Load_cbc_dec_key lvx 26,8,6 vperm 24,30,31,10 lvx 27,26,6 stvx 24,0,11 vperm 25,31,26,10 lvx 28,27,6 stvx 25,8,11 addi 11,1,64+15 vperm 26,26,27,10 lvx 29,28,6 vperm 27,27,28,10 lvx 30,29,6 vperm 28,28,29,10 lvx 31,30,6 vperm 29,29,30,10 lvx 14,31,6 vperm 30,30,31,10 lvx 24,0,11 vperm 31,31,14,10 lvx 25,8,11 subi 3,3,15 .long 0x7C001E99 .long 0x7C281E99 .long 0x7C5A1E99 .long 0x7C7B1E99 .long 0x7D5C1E99 vxor 14,0,23 .long 0x7D7D1E99 vxor 15,1,23 .long 0x7D9E1E99 vxor 16,2,23 .long 0x7DBF1E99 addi 3,3,0x80 vxor 17,3,23 vxor 18,10,23 vxor 19,11,23 vxor 20,12,23 vxor 21,13,23 mtctr 9 b .Loop_cbc_dec8x .align 5 .Loop_cbc_dec8x: .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 lvx 24,26,11 addi 11,11,0x20 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 lvx 25,8,11 bdnz .Loop_cbc_dec8x subic 5,5,128 .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 subfe. 0,0,0 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 and 0,0,5 .long 0x11CED548 .long 0x11EFD548 .long 0x1210D548 .long 0x1231D548 .long 0x1252D548 .long 0x1273D548 .long 0x1294D548 .long 0x12B5D548 add 3,3,0 .long 0x11CEDD48 .long 0x11EFDD48 .long 0x1210DD48 .long 0x1231DD48 .long 0x1252DD48 .long 0x1273DD48 .long 0x1294DD48 .long 0x12B5DD48 addi 11,1,64+15 .long 0x11CEE548 .long 0x11EFE548 .long 0x1210E548 .long 0x1231E548 .long 0x1252E548 .long 0x1273E548 .long 0x1294E548 .long 0x12B5E548 lvx 24,0,11 .long 0x11CEED48 .long 0x11EFED48 .long 0x1210ED48 .long 0x1231ED48 .long 0x1252ED48 .long 0x1273ED48 .long 0x1294ED48 .long 0x12B5ED48 lvx 25,8,11 .long 0x11CEF548 vxor 4,4,31 .long 0x11EFF548 vxor 0,0,31 .long 0x1210F548 vxor 1,1,31 .long 0x1231F548 vxor 2,2,31 .long 0x1252F548 vxor 3,3,31 .long 0x1273F548 vxor 10,10,31 .long 0x1294F548 vxor 11,11,31 .long 0x12B5F548 vxor 12,12,31 .long 0x11CE2549 .long 0x11EF0549 .long 0x7C001E99 .long 0x12100D49 .long 0x7C281E99 .long 0x12311549 .long 0x7C5A1E99 .long 0x12521D49 .long 0x7C7B1E99 .long 0x12735549 .long 0x7D5C1E99 .long 0x12945D49 .long 0x7D7D1E99 .long 0x12B56549 .long 0x7D9E1E99 vor 4,13,13 .long 0x7DBF1E99 addi 3,3,0x80 .long 0x7DC02799 vxor 14,0,23 .long 0x7DE82799 vxor 15,1,23 .long 0x7E1A2799 vxor 16,2,23 .long 0x7E3B2799 vxor 17,3,23 .long 0x7E5C2799 vxor 18,10,23 .long 0x7E7D2799 vxor 19,11,23 .long 0x7E9E2799 vxor 20,12,23 .long 0x7EBF2799 addi 4,4,0x80 vxor 21,13,23 mtctr 9 beq .Loop_cbc_dec8x addic. 5,5,128 beq .Lcbc_dec8x_done nop nop .Loop_cbc_dec8x_tail: .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 lvx 24,26,11 addi 11,11,0x20 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 lvx 25,8,11 bdnz .Loop_cbc_dec8x_tail .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 .long 0x11EFD548 .long 0x1210D548 .long 0x1231D548 .long 0x1252D548 .long 0x1273D548 .long 0x1294D548 .long 0x12B5D548 .long 0x11EFDD48 .long 0x1210DD48 .long 0x1231DD48 .long 0x1252DD48 .long 0x1273DD48 .long 0x1294DD48 .long 0x12B5DD48 .long 0x11EFE548 .long 0x1210E548 .long 0x1231E548 .long 0x1252E548 .long 0x1273E548 .long 0x1294E548 .long 0x12B5E548 .long 0x11EFED48 .long 0x1210ED48 .long 0x1231ED48 .long 0x1252ED48 .long 0x1273ED48 .long 0x1294ED48 .long 0x12B5ED48 .long 0x11EFF548 vxor 4,4,31 .long 0x1210F548 vxor 1,1,31 .long 0x1231F548 vxor 2,2,31 .long 0x1252F548 vxor 3,3,31 .long 0x1273F548 vxor 10,10,31 .long 0x1294F548 vxor 11,11,31 .long 0x12B5F548 vxor 12,12,31 cmplwi 5,32 blt .Lcbc_dec8x_one nop beq .Lcbc_dec8x_two cmplwi 5,64 blt .Lcbc_dec8x_three nop beq .Lcbc_dec8x_four cmplwi 5,96 blt .Lcbc_dec8x_five nop beq .Lcbc_dec8x_six .Lcbc_dec8x_seven: .long 0x11EF2549 .long 0x12100D49 .long 0x12311549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 .long 0x7E9D2799 .long 0x7EBE2799 addi 4,4,0x70 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_six: .long 0x12102549 .long 0x12311549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7E002799 .long 0x7E282799 .long 0x7E5A2799 .long 0x7E7B2799 .long 0x7E9C2799 .long 0x7EBD2799 addi 4,4,0x60 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_five: .long 0x12312549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7E202799 .long 0x7E482799 .long 0x7E7A2799 .long 0x7E9B2799 .long 0x7EBC2799 addi 4,4,0x50 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_four: .long 0x12522549 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7E402799 .long 0x7E682799 .long 0x7E9A2799 .long 0x7EBB2799 addi 4,4,0x40 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_three: .long 0x12732549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 .long 0x7E602799 .long 0x7E882799 .long 0x7EBA2799 addi 4,4,0x30 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_two: .long 0x12942549 .long 0x12B56549 vor 4,13,13 .long 0x7E802799 .long 0x7EA82799 addi 4,4,0x20 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_one: .long 0x12B52549 vor 4,13,13 .long 0x7EA02799 addi 4,4,0x10 .Lcbc_dec8x_done: .long 0x7C803F99 li 10,79 li 11,95 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_cbc_encrypt,.-aes_p8_cbc_encrypt .globl aes_p8_ctr32_encrypt_blocks .type aes_p8_ctr32_encrypt_blocks,@function .align 5 aes_p8_ctr32_encrypt_blocks: .localentry aes_p8_ctr32_encrypt_blocks,0 cmpldi 5,1 .long 0x4dc00020 lis 0,0xfff0 li 12,-1 or 0,0,0 li 10,15 vxor 0,0,0 lvx 4,0,7 lvsl 6,0,7 lvx 5,10,7 vspltisb 11,1 vperm 4,4,5,6 vsldoi 11,0,11,1 neg 11,3 lvsl 10,0,6 lwz 9,240(6) lvsr 6,0,11 lvx 5,0,3 addi 3,3,15 srwi 9,9,1 li 10,16 subi 9,9,1 cmpldi 5,8 bge _aesp8_ctr32_encrypt8x lvsr 8,0,4 vspltisb 9,-1 lvx 7,0,4 vperm 9,0,9,8 lvx 0,0,6 mtctr 9 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 vxor 2,4,0 lvx 0,10,6 addi 10,10,16 b .Loop_ctr32_enc .align 5 .Loop_ctr32_enc: vperm 1,1,0,10 .long 0x10420D08 lvx 1,10,6 addi 10,10,16 vperm 0,0,1,10 .long 0x10420508 lvx 0,10,6 addi 10,10,16 bdnz .Loop_ctr32_enc vadduwm 4,4,11 vor 3,5,5 lvx 5,0,3 addi 3,3,16 subic. 5,5,1 vperm 1,1,0,10 .long 0x10420D08 lvx 1,10,6 vperm 3,3,5,6 li 10,16 vperm 1,0,1,10 lvx 0,0,6 vxor 3,3,1 .long 0x10421D09 lvx 1,10,6 addi 10,10,16 vperm 2,2,2,8 vsel 3,7,2,9 mtctr 9 vperm 0,0,1,10 vor 7,2,2 vxor 2,4,0 lvx 0,10,6 addi 10,10,16 stvx 3,0,4 addi 4,4,16 bne .Loop_ctr32_enc addi 4,4,-1 lvx 2,0,4 vsel 2,7,2,9 stvx 2,0,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,6,0 .long 0 .align 5 _aesp8_ctr32_encrypt8x: stdu 1,-448(1) li 10,207 li 11,223 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 li 0,-1 stw 12,396(1) li 8,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 subi 9,9,3 lvx 23,0,6 lvx 30,8,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,23,30,10 addi 11,1,64+15 mtctr 9 .Load_ctr32_enc_key: vperm 24,30,31,10 lvx 30,8,6 addi 6,6,0x20 stvx 24,0,11 vperm 25,31,30,10 lvx 31,0,6 stvx 25,8,11 addi 11,11,0x20 bdnz .Load_ctr32_enc_key lvx 26,8,6 vperm 24,30,31,10 lvx 27,26,6 stvx 24,0,11 vperm 25,31,26,10 lvx 28,27,6 stvx 25,8,11 addi 11,1,64+15 vperm 26,26,27,10 lvx 29,28,6 vperm 27,27,28,10 lvx 30,29,6 vperm 28,28,29,10 lvx 31,30,6 vperm 29,29,30,10 lvx 15,31,6 vperm 30,30,31,10 lvx 24,0,11 vperm 31,31,15,10 lvx 25,8,11 vadduwm 7,11,11 subi 3,3,15 sldi 5,5,4 vadduwm 16,4,11 vadduwm 17,4,7 vxor 15,4,23 vadduwm 18,16,7 vxor 16,16,23 vadduwm 19,17,7 vxor 17,17,23 vadduwm 20,18,7 vxor 18,18,23 vadduwm 21,19,7 vxor 19,19,23 vadduwm 22,20,7 vxor 20,20,23 vadduwm 4,21,7 vxor 21,21,23 vxor 22,22,23 mtctr 9 b .Loop_ctr32_enc8x .align 5 .Loop_ctr32_enc8x: .long 0x11EFC508 .long 0x1210C508 .long 0x1231C508 .long 0x1252C508 .long 0x1273C508 .long 0x1294C508 .long 0x12B5C508 .long 0x12D6C508 .Loop_ctr32_enc8x_middle: lvx 24,26,11 addi 11,11,0x20 .long 0x11EFCD08 .long 0x1210CD08 .long 0x1231CD08 .long 0x1252CD08 .long 0x1273CD08 .long 0x1294CD08 .long 0x12B5CD08 .long 0x12D6CD08 lvx 25,8,11 bdnz .Loop_ctr32_enc8x subic 11,5,256 .long 0x11EFC508 .long 0x1210C508 .long 0x1231C508 .long 0x1252C508 .long 0x1273C508 .long 0x1294C508 .long 0x12B5C508 .long 0x12D6C508 subfe 0,0,0 .long 0x11EFCD08 .long 0x1210CD08 .long 0x1231CD08 .long 0x1252CD08 .long 0x1273CD08 .long 0x1294CD08 .long 0x12B5CD08 .long 0x12D6CD08 and 0,0,11 addi 11,1,64+15 .long 0x11EFD508 .long 0x1210D508 .long 0x1231D508 .long 0x1252D508 .long 0x1273D508 .long 0x1294D508 .long 0x12B5D508 .long 0x12D6D508 lvx 24,0,11 subic 5,5,129 .long 0x11EFDD08 addi 5,5,1 .long 0x1210DD08 .long 0x1231DD08 .long 0x1252DD08 .long 0x1273DD08 .long 0x1294DD08 .long 0x12B5DD08 .long 0x12D6DD08 lvx 25,8,11 .long 0x11EFE508 .long 0x7C001E99 .long 0x1210E508 .long 0x7C281E99 .long 0x1231E508 .long 0x7C5A1E99 .long 0x1252E508 .long 0x7C7B1E99 .long 0x1273E508 .long 0x7D5C1E99 .long 0x1294E508 .long 0x7D9D1E99 .long 0x12B5E508 .long 0x7DBE1E99 .long 0x12D6E508 .long 0x7DDF1E99 addi 3,3,0x80 .long 0x11EFED08 .long 0x1210ED08 .long 0x1231ED08 .long 0x1252ED08 .long 0x1273ED08 .long 0x1294ED08 .long 0x12B5ED08 .long 0x12D6ED08 add 3,3,0 subfe. 0,0,0 .long 0x11EFF508 vxor 0,0,31 .long 0x1210F508 vxor 1,1,31 .long 0x1231F508 vxor 2,2,31 .long 0x1252F508 vxor 3,3,31 .long 0x1273F508 vxor 10,10,31 .long 0x1294F508 vxor 12,12,31 .long 0x12B5F508 vxor 13,13,31 .long 0x12D6F508 vxor 14,14,31 bne .Lctr32_enc8x_break .long 0x100F0509 .long 0x10300D09 vadduwm 16,4,11 .long 0x10511509 vadduwm 17,4,7 vxor 15,4,23 .long 0x10721D09 vadduwm 18,16,7 vxor 16,16,23 .long 0x11535509 vadduwm 19,17,7 vxor 17,17,23 .long 0x11946509 vadduwm 20,18,7 vxor 18,18,23 .long 0x11B56D09 vadduwm 21,19,7 vxor 19,19,23 .long 0x11D67509 vadduwm 22,20,7 vxor 20,20,23 vadduwm 4,21,7 vxor 21,21,23 vxor 22,22,23 mtctr 9 .long 0x11EFC508 .long 0x7C002799 .long 0x1210C508 .long 0x7C282799 .long 0x1231C508 .long 0x7C5A2799 .long 0x1252C508 .long 0x7C7B2799 .long 0x1273C508 .long 0x7D5C2799 .long 0x1294C508 .long 0x7D9D2799 .long 0x12B5C508 .long 0x7DBE2799 .long 0x12D6C508 .long 0x7DDF2799 addi 4,4,0x80 b .Loop_ctr32_enc8x_middle .align 5 .Lctr32_enc8x_break: cmpwi 5,-0x60 blt .Lctr32_enc8x_one nop beq .Lctr32_enc8x_two cmpwi 5,-0x40 blt .Lctr32_enc8x_three nop beq .Lctr32_enc8x_four cmpwi 5,-0x20 blt .Lctr32_enc8x_five nop beq .Lctr32_enc8x_six cmpwi 5,0x00 blt .Lctr32_enc8x_seven .Lctr32_enc8x_eight: .long 0x11EF0509 .long 0x12100D09 .long 0x12311509 .long 0x12521D09 .long 0x12735509 .long 0x12946509 .long 0x12B56D09 .long 0x12D67509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 .long 0x7E9D2799 .long 0x7EBE2799 .long 0x7EDF2799 addi 4,4,0x80 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_seven: .long 0x11EF0D09 .long 0x12101509 .long 0x12311D09 .long 0x12525509 .long 0x12736509 .long 0x12946D09 .long 0x12B57509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 .long 0x7E9D2799 .long 0x7EBE2799 addi 4,4,0x70 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_six: .long 0x11EF1509 .long 0x12101D09 .long 0x12315509 .long 0x12526509 .long 0x12736D09 .long 0x12947509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 .long 0x7E9D2799 addi 4,4,0x60 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_five: .long 0x11EF1D09 .long 0x12105509 .long 0x12316509 .long 0x12526D09 .long 0x12737509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 .long 0x7E7C2799 addi 4,4,0x50 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_four: .long 0x11EF5509 .long 0x12106509 .long 0x12316D09 .long 0x12527509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 .long 0x7E5B2799 addi 4,4,0x40 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_three: .long 0x11EF6509 .long 0x12106D09 .long 0x12317509 .long 0x7DE02799 .long 0x7E082799 .long 0x7E3A2799 addi 4,4,0x30 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_two: .long 0x11EF6D09 .long 0x12107509 .long 0x7DE02799 .long 0x7E082799 addi 4,4,0x20 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_one: .long 0x11EF7509 .long 0x7DE02799 addi 4,4,0x10 .Lctr32_enc8x_done: li 10,79 li 11,95 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_ctr32_encrypt_blocks,.-aes_p8_ctr32_encrypt_blocks .globl aes_p8_xts_encrypt .type aes_p8_xts_encrypt,@function .align 5 aes_p8_xts_encrypt: .localentry aes_p8_xts_encrypt,0 mr 10,3 li 3,-1 cmpldi 5,16 .long 0x4dc00020 lis 0,0xfff0 li 12,-1 li 11,0 or 0,0,0 vspltisb 9,0x07 li 3,15 lvx 8,0,8 lvsl 5,0,8 lvx 4,3,8 vperm 8,8,4,5 neg 11,10 lvsr 5,0,11 lvx 2,0,10 addi 10,10,15 cmpldi 7,0 beq .Lxts_enc_no_key2 lvsl 7,0,7 lwz 9,240(7) srwi 9,9,1 subi 9,9,1 li 3,16 lvx 0,0,7 lvx 1,3,7 addi 3,3,16 vperm 0,0,1,7 vxor 8,8,0 lvx 0,3,7 addi 3,3,16 mtctr 9 .Ltweak_xts_enc: vperm 1,1,0,7 .long 0x11080D08 lvx 1,3,7 addi 3,3,16 vperm 0,0,1,7 .long 0x11080508 lvx 0,3,7 addi 3,3,16 bdnz .Ltweak_xts_enc vperm 1,1,0,7 .long 0x11080D08 lvx 1,3,7 vperm 0,0,1,7 .long 0x11080509 li 8,0 b .Lxts_enc .Lxts_enc_no_key2: li 3,-16 and 5,5,3 .Lxts_enc: lvx 4,0,10 addi 10,10,16 lvsl 7,0,6 lwz 9,240(6) srwi 9,9,1 subi 9,9,1 li 3,16 vslb 10,9,9 vor 10,10,9 vspltisb 11,1 vsldoi 10,10,11,15 cmpldi 5,96 bge _aesp8_xts_encrypt6x andi. 7,5,15 subic 0,5,32 subi 7,7,16 subfe 0,0,0 and 0,0,7 add 10,10,0 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,0,1,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 b .Loop_xts_enc .align 5 .Loop_xts_enc: vperm 1,1,0,7 .long 0x10420D08 lvx 1,3,6 addi 3,3,16 vperm 0,0,1,7 .long 0x10420508 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_enc vperm 1,1,0,7 .long 0x10420D08 lvx 1,3,6 li 3,16 vperm 0,0,1,7 vxor 0,0,8 .long 0x10620509 nop .long 0x7C602799 addi 4,4,16 subic. 5,5,16 beq .Lxts_enc_done vor 2,4,4 lvx 4,0,10 addi 10,10,16 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 subic 0,5,32 subfe 0,0,0 and 0,0,7 add 10,10,0 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 2,2,4,5 vperm 0,0,1,7 vxor 2,2,8 vxor 3,3,0 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 bge .Loop_xts_enc vxor 3,3,8 lvsr 5,0,5 vxor 4,4,4 vspltisb 11,-1 vperm 4,4,11,5 vsel 2,2,3,4 subi 11,4,17 subi 4,4,16 mtctr 5 li 5,16 .Loop_xts_enc_steal: lbzu 0,1(11) stb 0,16(11) bdnz .Loop_xts_enc_steal mtctr 9 b .Loop_xts_enc .Lxts_enc_done: cmpldi 8,0 beq .Lxts_enc_ret vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 .long 0x7D004799 .Lxts_enc_ret: or 12,12,12 li 3,0 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_xts_encrypt,.-aes_p8_xts_encrypt .globl aes_p8_xts_decrypt .type aes_p8_xts_decrypt,@function .align 5 aes_p8_xts_decrypt: .localentry aes_p8_xts_decrypt,0 mr 10,3 li 3,-1 cmpldi 5,16 .long 0x4dc00020 lis 0,0xfff8 li 12,-1 li 11,0 or 0,0,0 andi. 0,5,15 neg 0,0 andi. 0,0,16 sub 5,5,0 vspltisb 9,0x07 li 3,15 lvx 8,0,8 lvsl 5,0,8 lvx 4,3,8 vperm 8,8,4,5 neg 11,10 lvsr 5,0,11 lvx 2,0,10 addi 10,10,15 cmpldi 7,0 beq .Lxts_dec_no_key2 lvsl 7,0,7 lwz 9,240(7) srwi 9,9,1 subi 9,9,1 li 3,16 lvx 0,0,7 lvx 1,3,7 addi 3,3,16 vperm 0,0,1,7 vxor 8,8,0 lvx 0,3,7 addi 3,3,16 mtctr 9 .Ltweak_xts_dec: vperm 1,1,0,7 .long 0x11080D08 lvx 1,3,7 addi 3,3,16 vperm 0,0,1,7 .long 0x11080508 lvx 0,3,7 addi 3,3,16 bdnz .Ltweak_xts_dec vperm 1,1,0,7 .long 0x11080D08 lvx 1,3,7 vperm 0,0,1,7 .long 0x11080509 li 8,0 b .Lxts_dec .Lxts_dec_no_key2: neg 3,5 andi. 3,3,15 add 5,5,3 .Lxts_dec: lvx 4,0,10 addi 10,10,16 lvsl 7,0,6 lwz 9,240(6) srwi 9,9,1 subi 9,9,1 li 3,16 vslb 10,9,9 vor 10,10,9 vspltisb 11,1 vsldoi 10,10,11,15 cmpldi 5,96 bge _aesp8_xts_decrypt6x lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,0,1,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 blt .Ltail_xts_dec b .Loop_xts_dec .align 5 .Loop_xts_dec: vperm 1,1,0,7 .long 0x10420D48 lvx 1,3,6 addi 3,3,16 vperm 0,0,1,7 .long 0x10420548 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_dec vperm 1,1,0,7 .long 0x10420D48 lvx 1,3,6 li 3,16 vperm 0,0,1,7 vxor 0,0,8 .long 0x10620549 nop .long 0x7C602799 addi 4,4,16 subic. 5,5,16 beq .Lxts_dec_done vor 2,4,4 lvx 4,0,10 addi 10,10,16 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 2,2,4,5 vperm 0,0,1,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 bge .Loop_xts_dec .Ltail_xts_dec: vsrab 11,8,9 vaddubm 12,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 12,12,11 subi 10,10,16 add 10,10,5 vxor 2,2,8 vxor 2,2,12 .Loop_xts_dec_short: vperm 1,1,0,7 .long 0x10420D48 lvx 1,3,6 addi 3,3,16 vperm 0,0,1,7 .long 0x10420548 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_dec_short vperm 1,1,0,7 .long 0x10420D48 lvx 1,3,6 li 3,16 vperm 0,0,1,7 vxor 0,0,12 .long 0x10620549 nop .long 0x7C602799 vor 2,4,4 lvx 4,0,10 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,0,1,7 lvsr 5,0,5 vxor 4,4,4 vspltisb 11,-1 vperm 4,4,11,5 vsel 2,2,3,4 vxor 0,0,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 subi 11,4,1 mtctr 5 li 5,16 .Loop_xts_dec_steal: lbzu 0,1(11) stb 0,16(11) bdnz .Loop_xts_dec_steal mtctr 9 b .Loop_xts_dec .Lxts_dec_done: cmpldi 8,0 beq .Lxts_dec_ret vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 .long 0x7D004799 .Lxts_dec_ret: or 12,12,12 li 3,0 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_xts_decrypt,.-aes_p8_xts_decrypt .align 5 _aesp8_xts_encrypt6x: stdu 1,-448(1) mflr 11 li 7,207 li 3,223 std 11,464(1) stvx 20,7,1 addi 7,7,32 stvx 21,3,1 addi 3,3,32 stvx 22,7,1 addi 7,7,32 stvx 23,3,1 addi 3,3,32 stvx 24,7,1 addi 7,7,32 stvx 25,3,1 addi 3,3,32 stvx 26,7,1 addi 7,7,32 stvx 27,3,1 addi 3,3,32 stvx 28,7,1 addi 7,7,32 stvx 29,3,1 addi 3,3,32 stvx 30,7,1 stvx 31,3,1 li 0,-1 stw 12,396(1) li 3,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 lvx 30,3,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,23,30,7 addi 7,1,64+15 mtctr 9 .Load_xts_enc_key: vperm 24,30,31,7 lvx 30,3,6 addi 6,6,0x20 stvx 24,0,7 vperm 25,31,30,7 lvx 31,0,6 stvx 25,3,7 addi 7,7,0x20 bdnz .Load_xts_enc_key lvx 26,3,6 vperm 24,30,31,7 lvx 27,26,6 stvx 24,0,7 vperm 25,31,26,7 lvx 28,27,6 stvx 25,3,7 addi 7,1,64+15 vperm 26,26,27,7 lvx 29,28,6 vperm 27,27,28,7 lvx 30,29,6 vperm 28,28,29,7 lvx 31,30,6 vperm 29,29,30,7 lvx 22,31,6 vperm 30,30,31,7 lvx 24,0,7 vperm 31,31,22,7 lvx 25,3,7 + + + + + + + + vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 b .Loop_xts_enc6x .align 5 .Loop_xts_enc6x: .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 .long 0x1210C508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 .long 0x1210CD08 lvx 25,3,7 bdnz .Loop_xts_enc6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C508 .long 0x118CC508 vsrab 11,8,9 vxor 17,8,23 vaddubm 8,8,8 .long 0x11ADC508 .long 0x11CEC508 - vsldoi 11,11,11,15 .long 0x11EFC508 .long 0x1210C508 subfe. 0,0,0 vand 11,11,10 .long 0x10E7CD08 .long 0x118CCD08 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD08 .long 0x11CECD08 vxor 1,18,31 vsrab 11,8,9 vxor 18,8,23 .long 0x11EFCD08 .long 0x1210CD08 and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D508 .long 0x118CD508 vand 11,11,10 .long 0x11ADD508 .long 0x11CED508 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD508 .long 0x1210D508 add 10,10,0 vxor 2,19,31 vsrab 11,8,9 vxor 19,8,23 vaddubm 8,8,8 .long 0x10E7DD08 .long 0x118CDD08 - vsldoi 11,11,11,15 .long 0x11ADDD08 .long 0x11CEDD08 vand 11,11,10 .long 0x11EFDD08 .long 0x1210DD08 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E508 .long 0x118CE508 vxor 3,20,31 vsrab 11,8,9 vxor 20,8,23 .long 0x11ADE508 .long 0x11CEE508 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE508 .long 0x1210E508 lvx 24,0,7 vand 11,11,10 .long 0x10E7ED08 .long 0x118CED08 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED08 .long 0x11CEED08 vxor 4,21,31 vsrab 11,8,9 vxor 21,8,23 .long 0x11EFED08 .long 0x1210ED08 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F508 .long 0x118CF508 vand 11,11,10 .long 0x11ADF508 .long 0x11CEF508 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF508 .long 0x1210F508 vxor 5,22,31 vsrab 11,8,9 vxor 22,8,23 .long 0x10E70509 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D09 .long 0x7C235699 .long 0x11AD1509 .long 0x7C5A5699 vand 11,11,10 .long 0x11CE1D09 .long 0x7C7B5699 .long 0x11EF2509 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x11702D09 .long 0x7CBD5699 addi 10,10,0x60 .long 0x7CE02799 vxor 7,0,17 .long 0x7D832799 vxor 12,1,18 .long 0x7DBA2799 vxor 13,2,19 .long 0x7DDB2799 vxor 14,3,20 .long 0x7DFC2799 vxor 15,4,21 .long 0x7D7D2799 vxor 16,5,22 addi 4,4,0x60 mtctr 9 beq .Loop_xts_enc6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_enc6x_zero cmpwi 5,0x20 blt .Lxts_enc6x_one nop beq .Lxts_enc6x_two cmpwi 5,0x40 blt .Lxts_enc6x_three nop beq .Lxts_enc6x_four .Lxts_enc6x_five: vxor 7,1,17 vxor 12,2,18 vxor 13,3,19 vxor 14,4,20 vxor 15,5,21 bl _aesp8_xts_enc5x vor 17,22,22 .long 0x7CE02799 .long 0x7D832799 .long 0x7DBA2799 vxor 11,15,22 .long 0x7DDB2799 .long 0x7DFC2799 addi 4,4,0x50 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_four: vxor 7,2,17 vxor 12,3,18 vxor 13,4,19 vxor 14,5,20 vxor 15,15,15 bl _aesp8_xts_enc5x vor 17,21,21 .long 0x7CE02799 .long 0x7D832799 vxor 11,14,21 .long 0x7DBA2799 .long 0x7DDB2799 addi 4,4,0x40 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_three: vxor 7,3,17 vxor 12,4,18 vxor 13,5,19 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_enc5x vor 17,20,20 .long 0x7CE02799 vxor 11,13,20 .long 0x7D832799 .long 0x7DBA2799 addi 4,4,0x30 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_two: vxor 7,4,17 vxor 12,5,18 vxor 13,13,13 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_enc5x vor 17,19,19 vxor 11,12,19 .long 0x7CE02799 .long 0x7D832799 addi 4,4,0x20 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_one: vxor 7,5,17 nop .Loop_xts_enc1x: .long 0x10E7C508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 lvx 25,3,7 bdnz .Loop_xts_enc1x add 10,10,31 cmpwi 31,0 .long 0x10E7C508 subi 10,10,16 .long 0x10E7CD08 lvsr 5,0,31 .long 0x10E7D508 .long 0x7C005699 .long 0x10E7DD08 addi 7,1,64+15 .long 0x10E7E508 lvx 24,0,7 .long 0x10E7ED08 lvx 25,3,7 vxor 17,17,31 .long 0x10E7F508 vperm 0,0,0,5 .long 0x10E78D09 vor 17,18,18 vxor 11,7,18 .long 0x7CE02799 addi 4,4,0x10 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_zero: cmpwi 31,0 beq .Lxts_enc6x_done add 10,10,31 subi 10,10,16 .long 0x7C005699 lvsr 5,0,31 vperm 0,0,0,5 vxor 11,11,17 .Lxts_enc6x_steal: vxor 0,0,17 vxor 7,7,7 vspltisb 12,-1 vperm 7,7,12,5 vsel 7,0,11,7 subi 30,4,17 subi 4,4,16 mtctr 31 .Loop_xts_enc6x_steal: lbzu 0,1(30) stb 0,16(30) bdnz .Loop_xts_enc6x_steal li 31,0 mtctr 9 b .Loop_xts_enc1x .align 4 .Lxts_enc6x_done: cmpldi 8,0 beq .Lxts_enc6x_ret vxor 8,17,23 .long 0x7D004799 .Lxts_enc6x_ret: mtlr 11 li 10,79 li 11,95 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,1,0x80,6,6,0 .long 0 .align 5 _aesp8_xts_enc5x: .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 lvx 25,3,7 bdnz _aesp8_xts_enc5x add 10,10,31 cmpwi 31,0 .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 subi 10,10,16 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 vxor 17,17,31 .long 0x10E7D508 lvsr 5,0,31 .long 0x118CD508 .long 0x11ADD508 .long 0x11CED508 .long 0x11EFD508 vxor 1,18,31 .long 0x10E7DD08 .long 0x7C005699 .long 0x118CDD08 .long 0x11ADDD08 .long 0x11CEDD08 .long 0x11EFDD08 vxor 2,19,31 addi 7,1,64+15 .long 0x10E7E508 .long 0x118CE508 .long 0x11ADE508 .long 0x11CEE508 .long 0x11EFE508 lvx 24,0,7 vxor 3,20,31 .long 0x10E7ED08 .long 0x118CED08 .long 0x11ADED08 .long 0x11CEED08 .long 0x11EFED08 lvx 25,3,7 vxor 4,21,31 .long 0x10E7F508 vperm 0,0,0,5 .long 0x118CF508 .long 0x11ADF508 .long 0x11CEF508 .long 0x11EFF508 .long 0x10E78D09 .long 0x118C0D09 .long 0x11AD1509 .long 0x11CE1D09 .long 0x11EF2509 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .align 5 _aesp8_xts_decrypt6x: stdu 1,-448(1) mflr 11 li 7,207 li 3,223 std 11,464(1) stvx 20,7,1 addi 7,7,32 stvx 21,3,1 addi 3,3,32 stvx 22,7,1 addi 7,7,32 stvx 23,3,1 addi 3,3,32 stvx 24,7,1 addi 7,7,32 stvx 25,3,1 addi 3,3,32 stvx 26,7,1 addi 7,7,32 stvx 27,3,1 addi 3,3,32 stvx 28,7,1 addi 7,7,32 stvx 29,3,1 addi 3,3,32 stvx 30,7,1 stvx 31,3,1 li 0,-1 stw 12,396(1) li 3,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 lvx 30,3,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,23,30,7 addi 7,1,64+15 mtctr 9 .Load_xts_dec_key: vperm 24,30,31,7 lvx 30,3,6 addi 6,6,0x20 stvx 24,0,7 vperm 25,31,30,7 lvx 31,0,6 stvx 25,3,7 addi 7,7,0x20 bdnz .Load_xts_dec_key lvx 26,3,6 vperm 24,30,31,7 lvx 27,26,6 stvx 24,0,7 vperm 25,31,26,7 lvx 28,27,6 stvx 25,3,7 addi 7,1,64+15 vperm 26,26,27,7 lvx 29,28,6 vperm 27,27,28,7 lvx 30,29,6 vperm 28,28,29,7 lvx 31,30,6 vperm 29,29,30,7 lvx 22,31,6 vperm 30,30,31,7 lvx 24,0,7 vperm 31,31,22,7 lvx 25,3,7 vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 b .Loop_xts_dec6x .align 5 .Loop_xts_dec6x: .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 lvx 25,3,7 bdnz .Loop_xts_dec6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C548 .long 0x118CC548 vsrab 11,8,9 vxor 17,8,23 vaddubm 8,8,8 .long 0x11ADC548 .long 0x11CEC548 - vsldoi 11,11,11,15 .long 0x11EFC548 .long 0x1210C548 subfe. 0,0,0 vand 11,11,10 .long 0x10E7CD48 .long 0x118CCD48 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD48 .long 0x11CECD48 vxor 1,18,31 vsrab 11,8,9 vxor 18,8,23 .long 0x11EFCD48 .long 0x1210CD48 and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D548 .long 0x118CD548 vand 11,11,10 .long 0x11ADD548 .long 0x11CED548 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD548 .long 0x1210D548 add 10,10,0 vxor 2,19,31 vsrab 11,8,9 vxor 19,8,23 vaddubm 8,8,8 .long 0x10E7DD48 .long 0x118CDD48 - vsldoi 11,11,11,15 .long 0x11ADDD48 .long 0x11CEDD48 vand 11,11,10 .long 0x11EFDD48 .long 0x1210DD48 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E548 .long 0x118CE548 vxor 3,20,31 vsrab 11,8,9 vxor 20,8,23 .long 0x11ADE548 .long 0x11CEE548 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE548 .long 0x1210E548 lvx 24,0,7 vand 11,11,10 .long 0x10E7ED48 .long 0x118CED48 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED48 .long 0x11CEED48 vxor 4,21,31 vsrab 11,8,9 vxor 21,8,23 .long 0x11EFED48 .long 0x1210ED48 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F548 .long 0x118CF548 vand 11,11,10 .long 0x11ADF548 .long 0x11CEF548 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF548 .long 0x1210F548 vxor 5,22,31 vsrab 11,8,9 vxor 22,8,23 .long 0x10E70549 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D49 .long 0x7C235699 .long 0x11AD1549 .long 0x7C5A5699 vand 11,11,10 .long 0x11CE1D49 .long 0x7C7B5699 .long 0x11EF2549 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x12102D49 .long 0x7CBD5699 addi 10,10,0x60 .long 0x7CE02799 vxor 7,0,17 .long 0x7D832799 vxor 12,1,18 .long 0x7DBA2799 vxor 13,2,19 .long 0x7DDB2799 vxor 14,3,20 .long 0x7DFC2799 vxor 15,4,21 .long 0x7E1D2799 vxor 16,5,22 addi 4,4,0x60 mtctr 9 beq .Loop_xts_dec6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_dec6x_zero cmpwi 5,0x20 blt .Lxts_dec6x_one nop beq .Lxts_dec6x_two cmpwi 5,0x40 blt .Lxts_dec6x_three nop beq .Lxts_dec6x_four .Lxts_dec6x_five: vxor 7,1,17 vxor 12,2,18 vxor 13,3,19 vxor 14,4,20 vxor 15,5,21 bl _aesp8_xts_dec5x vor 17,22,22 vxor 18,8,23 .long 0x7CE02799 vxor 7,0,18 .long 0x7D832799 .long 0x7DBA2799 .long 0x7DDB2799 .long 0x7DFC2799 addi 4,4,0x50 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_four: vxor 7,2,17 vxor 12,3,18 vxor 13,4,19 vxor 14,5,20 vxor 15,15,15 bl _aesp8_xts_dec5x vor 17,21,21 vor 18,22,22 .long 0x7CE02799 vxor 7,0,22 .long 0x7D832799 .long 0x7DBA2799 .long 0x7DDB2799 addi 4,4,0x40 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_three: vxor 7,3,17 vxor 12,4,18 vxor 13,5,19 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_dec5x vor 17,20,20 vor 18,21,21 .long 0x7CE02799 vxor 7,0,21 .long 0x7D832799 .long 0x7DBA2799 addi 4,4,0x30 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_two: vxor 7,4,17 vxor 12,5,18 vxor 13,13,13 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_dec5x vor 17,19,19 vor 18,20,20 .long 0x7CE02799 vxor 7,0,20 .long 0x7D832799 addi 4,4,0x20 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_one: vxor 7,5,17 nop .Loop_xts_dec1x: .long 0x10E7C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 lvx 25,3,7 bdnz .Loop_xts_dec1x subi 0,31,1 .long 0x10E7C548 andi. 0,0,16 cmpwi 31,0 .long 0x10E7CD48 sub 10,10,0 .long 0x10E7D548 .long 0x7C005699 .long 0x10E7DD48 addi 7,1,64+15 .long 0x10E7E548 lvx 24,0,7 .long 0x10E7ED48 lvx 25,3,7 vxor 17,17,31 .long 0x10E7F548 mtctr 9 .long 0x10E78D49 vor 17,18,18 vor 18,19,19 .long 0x7CE02799 addi 4,4,0x10 vxor 7,0,19 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_zero: cmpwi 31,0 beq .Lxts_dec6x_done .long 0x7C005699 vxor 7,0,18 .Lxts_dec6x_steal: .long 0x10E7C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 lvx 25,3,7 bdnz .Lxts_dec6x_steal add 10,10,31 .long 0x10E7C548 cmpwi 31,0 .long 0x10E7CD48 .long 0x7C005699 .long 0x10E7D548 lvsr 5,0,31 .long 0x10E7DD48 addi 7,1,64+15 .long 0x10E7E548 lvx 24,0,7 .long 0x10E7ED48 lvx 25,3,7 vxor 18,18,31 .long 0x10E7F548 vperm 0,0,0,5 .long 0x11679549 .long 0x7D602799 vxor 7,7,7 vspltisb 12,-1 vperm 7,7,12,5 vsel 7,0,11,7 vxor 7,7,17 subi 30,4,1 mtctr 31 .Loop_xts_dec6x_steal: lbzu 0,1(30) stb 0,16(30) bdnz .Loop_xts_dec6x_steal li 31,0 mtctr 9 b .Loop_xts_dec1x .align 4 .Lxts_dec6x_done: cmpldi 8,0 beq .Lxts_dec6x_ret vxor 8,17,23 .long 0x7D004799 .Lxts_dec6x_ret: mtlr 11 li 10,79 li 11,95 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,1,0x80,6,6,0 .long 0 .align 5 _aesp8_xts_dec5x: .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 lvx 25,3,7 bdnz _aesp8_xts_dec5x subi 0,31,1 .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 andi. 0,0,16 cmpwi 31,0 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 vxor 17,17,31 sub 10,10,0 .long 0x10E7D548 .long 0x118CD548 .long 0x11ADD548 .long 0x11CED548 .long 0x11EFD548 vxor 1,18,31 .long 0x10E7DD48 .long 0x7C005699 .long 0x118CDD48 .long 0x11ADDD48 .long 0x11CEDD48 .long 0x11EFDD48 vxor 2,19,31 addi 7,1,64+15 .long 0x10E7E548 .long 0x118CE548 .long 0x11ADE548 .long 0x11CEE548 .long 0x11EFE548 lvx 24,0,7 vxor 3,20,31 .long 0x10E7ED48 .long 0x118CED48 .long 0x11ADED48 .long 0x11CEED48 .long 0x11EFED48 lvx 25,3,7 vxor 4,21,31 .long 0x10E7F548 .long 0x118CF548 .long 0x11ADF548 .long 0x11CEF548 .long 0x11EFF548 .long 0x10E78D49 .long 0x118C0D49 .long 0x11AD1549 .long 0x11CE1D49 .long 0x11EF2549 mtctr 9 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 diff --git a/sys/crypto/openssl/powerpc64/poly1305-ppc.S b/sys/crypto/openssl/powerpc64/poly1305-ppc.S index 523a590809cd..689c611c29e5 100644 --- a/sys/crypto/openssl/powerpc64/poly1305-ppc.S +++ b/sys/crypto/openssl/powerpc64/poly1305-ppc.S @@ -1,1142 +1,1142 @@ /* Do not modify. This file is auto-generated from poly1305-ppc.pl. */ .machine "any" .abiversion 2 .text .globl poly1305_init_int .type poly1305_init_int,@function .align 4 poly1305_init_int: .localentry poly1305_init_int,0 xor 0,0,0 std 0,0(3) std 0,8(3) std 0,16(3) stw 0,24(3) cmpld 4,0 beq- .Lno_key li 7,4 lwbrx 10,0,4 li 11,8 lwbrx 7,7,4 li 8,12 lwbrx 11,11,4 lwbrx 8,8,4 insrdi 10,7,32,0 insrdi 11,8,32,0 lis 8,0xfff ori 8,8,0xfffc insrdi 8,8,32,0 ori 7,8,3 and 10,10,7 and 11,11,8 std 10,32(3) std 11,40(3) .Lno_key: xor 3,3,3 blr .long 0 .byte 0,12,0x14,0,0,0,2,0 .size poly1305_init_int,.-poly1305_init_int .globl poly1305_blocks .type poly1305_blocks,@function .align 4 poly1305_blocks: .localentry poly1305_blocks,0 .Lpoly1305_blocks: srdi. 5,5,4 beq- .Labort stdu 1,-192(1) mflr 0 std 27,152(1) std 28,160(1) std 29,168(1) std 30,176(1) std 31,184(1) std 0,208(1) ld 27,32(3) ld 28,40(3) ld 7,0(3) ld 8,8(3) ld 9,16(3) srdi 29,28,2 mtctr 5 add 29,29,28 li 0,3 b .Loop .align 4 .Loop: li 10,4 lwbrx 30,0,4 li 31,8 lwbrx 10,10,4 li 11,12 lwbrx 31,31,4 lwbrx 11,11,4 insrdi 30,10,32,0 insrdi 31,11,32,0 addi 4,4,16 addc 7,7,30 adde 8,8,31 mulld 10,7,27 mulhdu 11,7,27 adde 9,9,6 mulld 30,8,29 mulhdu 31,8,29 addc 10,10,30 adde 11,11,31 mulld 30,7,28 mulhdu 12,7,28 addc 11,11,30 addze 12,12 mulld 30,8,27 mulhdu 31,8,27 addc 11,11,30 adde 12,12,31 mulld 30,9,29 mulld 31,9,27 addc 11,11,30 adde 12,12,31 andc 30,12,0 and 9,12,0 srdi 31,30,2 add 30,30,31 addc 7,10,30 addze 8,11 addze 9,9 bdnz .Loop std 7,0(3) std 8,8(3) std 9,16(3) ld 27,152(1) ld 28,160(1) ld 29,168(1) ld 30,176(1) ld 31,184(1) addi 1,1,192 .Labort: blr .long 0 .byte 0,12,4,1,0x80,5,4,0 .size poly1305_blocks,.-poly1305_blocks .globl poly1305_emit .type poly1305_emit,@function .align 5 poly1305_emit: .localentry poly1305_emit,0 lwz 7,0(3) lwz 8,4(3) lwz 9,8(3) lwz 10,12(3) lwz 11,16(3) lwz 0,24(3) sldi 8,8,26 sldi 12,9,52 srdi 9,9,12 sldi 10,10,14 add 7,7,8 addc 7,7,12 sldi 12,11,40 srdi 11,11,24 adde 8,9,10 addc 8,8,12 addze 9,11 ld 10,0(3) ld 11,8(3) ld 12,16(3) neg 0,0 xor 7,7,10 xor 8,8,11 xor 9,9,12 and 7,7,0 and 8,8,0 and 9,9,0 xor 7,7,10 xor 8,8,11 xor 9,9,12 addic 10,7,5 addze 11,8 addze 12,9 srdi 12,12,2 neg 12,12 andc 7,7,12 and 10,10,12 andc 8,8,12 and 11,11,12 or 7,7,10 or 8,8,11 lwz 12,4(5) lwz 9,12(5) lwz 10,0(5) lwz 11,8(5) insrdi 10,12,32,0 insrdi 11,9,32,0 addc 7,7,10 adde 8,8,11 addi 3,4,-1 addi 4,4,7 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) stbu 8,1(4) blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .size poly1305_emit,.-poly1305_emit .globl poly1305_blocks_vsx .type poly1305_blocks_vsx,@function .align 5 poly1305_blocks_vsx: .localentry poly1305_blocks_vsx,0 lwz 7,24(3) cmpldi 5,128 bge __poly1305_blocks_vsx neg 0,7 lwz 7,0(3) lwz 8,4(3) lwz 9,8(3) lwz 10,12(3) lwz 11,16(3) sldi 8,8,26 sldi 12,9,52 add 7,7,8 srdi 9,9,12 sldi 10,10,14 addc 7,7,12 sldi 8,11,40 adde 9,9,10 srdi 11,11,24 addc 9,9,8 addze 11,11 ld 8,0(3) ld 10,8(3) ld 12,16(3) xor 7,7,8 xor 9,9,10 xor 11,11,12 and 7,7,0 and 9,9,0 and 11,11,0 xor 7,7,8 xor 9,9,10 xor 11,11,12 li 0,0 std 7,0(3) std 9,8(3) std 11,16(3) stw 0,24(3) b .Lpoly1305_blocks .long 0 .byte 0,12,0x14,0,0,0,4,0 .size poly1305_blocks_vsx,.-poly1305_blocks_vsx .align 5 __poly1305_mul: mulld 9,6,27 mulhdu 10,6,27 mulld 30,7,29 mulhdu 31,7,29 addc 9,9,30 adde 10,10,31 mulld 30,6,28 mulhdu 11,6,28 addc 10,10,30 addze 11,11 mulld 30,7,27 mulhdu 31,7,27 addc 10,10,30 adde 11,11,31 mulld 30,8,29 mulld 31,8,27 addc 10,10,30 adde 11,11,31 andc 30,11,0 and 8,11,0 srdi 31,30,2 add 30,30,31 addc 6,9,30 addze 7,10 addze 8,8 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .size __poly1305_mul,.-__poly1305_mul .align 5 __poly1305_splat: rldicl 9,6,0,38 rldicl 10,6,38,38 stw 9,0x00(31) rldicl 11,6,12,52 slwi 9,10,2 stw 10,0x10(31) add 9,9,10 stw 9,0x20(31) insrdi 11,7,14,38 slwi 9,11,2 stw 11,0x30(31) add 9,9,11 stw 9,0x40(31) rldicl 10,7,50,38 rldicl 11,7,24,40 slwi 9,10,2 stw 10,0x50(31) add 9,9,10 stw 9,0x60(31) insrdi 11,8,3,37 slwi 9,11,2 stw 11,0x70(31) add 9,9,11 stw 9,0x80(31) blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .size __poly1305_splat,.-__poly1305_splat .align 5 __poly1305_blocks_vsx: - stdu 1,-432(1) + stdu 1,-416(1) mflr 0 li 10,191 li 11,207 li 12,-1 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 - stvx 23,10,1 - addi 10,10,32 - stvx 24,11,1 + stvx 23,11,1 addi 11,11,32 - stvx 25,10,1 + stvx 24,10,1 addi 10,10,32 + stvx 25,11,1 + addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 - stw 12,388(1) + stw 12,372(1) li 12,-1 or 12,12,12 - std 27,392(1) - std 28,400(1) - std 29,408(1) - std 30,416(1) - std 31,424(1) - std 0,448(1) + std 27,376(1) + std 28,384(1) + std 29,392(1) + std 30,400(1) + std 31,408(1) + std 0,432(1) bl .LPICmeup li 27,0x10 li 28,0x20 li 29,0x30 li 30,0x40 li 31,0x50 .long 0x7FA06699 .long 0x7F3B6699 .long 0x7F7C6699 .long 0x7FFD6699 .long 0x7FDE6699 cmplwi 7,0 bne .Lskip_init_vsx ld 27,32(3) ld 28,40(3) srdi 29,28,2 li 0,3 add 29,29,28 mr 6,27 mr 7,28 li 8,0 addi 31,3,56 bl __poly1305_splat bl __poly1305_mul addi 31,3,48 bl __poly1305_splat bl __poly1305_mul addi 31,3,60 bl __poly1305_splat bl __poly1305_mul addi 31,3,52 bl __poly1305_splat ld 6,0(3) ld 7,8(3) ld 8,16(3) rldicl 9,6,0,38 rldicl 10,6,38,38 rldicl 11,6,12,52 .long 0x7C0901E7 insrdi 11,7,14,38 .long 0x7C2A01E7 rldicl 10,7,50,38 .long 0x7C4B01E7 rldicl 11,7,24,40 .long 0x7C6A01E7 insrdi 11,8,3,37 .long 0x7C8B01E7 li 0,1 stw 0,24(3) b .Loaded_vsx .align 4 .Lskip_init_vsx: li 27,4 li 28,8 li 29,12 li 30,16 .long 0x7C001819 .long 0x7C3B1819 .long 0x7C5C1819 .long 0x7C7D1819 .long 0x7C9E1819 .Loaded_vsx: li 27,0x10 li 28,0x20 li 29,0x30 li 30,0x40 li 31,0x50 li 7,0x60 li 8,0x70 addi 10,3,64 addi 11,1,63 vxor 20,20,20 .long 0xF000A057 .long 0xF021A057 .long 0xF042A057 .long 0xF063A057 .long 0xF084A057 .long 0x7F5F6699 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 vperm 21,21,21,26 vperm 22,22,22,26 vperm 23,23,23,26 vperm 24,24,24,26 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 vspltisb 28,14 .long 0xF115B357 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0xF2B7C057 vperm 22,23,24,31 .long 0xF2F7C357 .long 0x1295CEC4 .long 0x12D6D6C4 .long 0x1317DEC4 .long 0x12F7E6C4 vand 21,21,29 vand 20,20,29 vand 22,22,29 vand 23,23,29 .long 0x11384E8C .long 0x10B52E8C .long 0x10D4368C .long 0x10F63E8C .long 0x1117468C vor 9,9,30 .long 0x7D5D1A99 .long 0x7D605299 .long 0x7D9B5299 .long 0x7DBC5299 .long 0x7DDD5299 .long 0x7EBE5299 .long 0x7EDF5299 .long 0x7EE75299 .long 0x7F085299 stvx 11,0,11 stvx 12,27,11 stvx 13,28,11 stvx 14,29,11 stvx 21,30,11 stvx 22,31,11 stvx 23,7,11 stvx 24,8,11 addi 4,4,0x40 addi 12,12,0x50 addi 0,5,-64 srdi 0,0,6 mtctr 0 b .Loop_vsx .align 4 .Loop_vsx: .long 0x11E55288 .long 0x12055A88 .long 0x12256A88 .long 0x12466A88 .long 0x12865288 .long 0x1210A0C0 .long 0x12865A88 .long 0x1231A0C0 .long 0x12676A88 .long 0x12896288 .long 0x11EFA0C0 .long 0x12875A88 .long 0x1252A0C0 lvx 12,31,11 .long 0x12885A88 .long 0x1273A0C0 lvx 11,30,11 .long 0x104238C0 .long 0x100028C0 .long 0x106340C0 .long 0x102130C0 .long 0x108448C0 .long 0x12887288 .long 0x11EFA0C0 .long 0x12897288 .long 0x1210A0C0 .long 0x12875288 .long 0x1231A0C0 .long 0x12885288 .long 0x1252A0C0 lvx 14,8,11 .long 0x12895288 .long 0x1273A0C0 lvx 13,7,11 .long 0x12876288 .long 0x11EFA0C0 .long 0x12886288 .long 0x1210A0C0 .long 0x12896288 .long 0x1231A0C0 .long 0x12855A88 .long 0x1252A0C0 .long 0x12865A88 .long 0x1273A0C0 .long 0x7F406699 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 vperm 21,21,21,26 vperm 22,22,22,26 vperm 23,23,23,26 vperm 24,24,24,26 .long 0x12867288 .long 0x11EFA0C0 .long 0x12877288 .long 0x1210A0C0 .long 0x12887288 .long 0x1231A0C0 .long 0x12897288 .long 0x1252A0C0 .long 0x12856A88 .long 0x1273A0C0 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 .long 0xF115B357 .long 0x12805088 .long 0x11EFA0C0 .long 0x12815088 .long 0x1210A0C0 .long 0x12825088 .long 0x1231A0C0 .long 0x12835088 .long 0x1252A0C0 .long 0x12845088 .long 0x1273A0C0 .long 0xF2B7C057 vperm 22,23,24,31 .long 0xF2F7C357 .long 0x12826088 .long 0x11EFA0C0 .long 0x12836088 .long 0x1210A0C0 .long 0x12846088 .long 0x1231A0C0 .long 0x12805888 .long 0x1252A0C0 lvx 12,27,11 .long 0x12815888 .long 0x1273A0C0 lvx 11,0,11 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 .long 0x12817088 .long 0x11EFA0C0 .long 0x12827088 .long 0x1210A0C0 .long 0x12837088 .long 0x1231A0C0 .long 0x12847088 .long 0x1252A0C0 lvx 14,29,11 .long 0x12806888 .long 0x1273A0C0 lvx 13,28,11 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0x12846088 .long 0x11EFA0C0 .long 0x12805888 .long 0x1210A0C0 .long 0x12815888 .long 0x1231A0C0 .long 0x12825888 .long 0x1252A0C0 .long 0x12835888 .long 0x1273A0C0 .long 0x12D6D6C4 .long 0x1355CEC4 .long 0x1317DEC4 .long 0x12F7E6C4 .long 0x12837088 .long 0x11EFA0C0 .long 0x12847088 .long 0x1210A0C0 .long 0x12806888 .long 0x1231A0C0 .long 0x12816888 .long 0x1252A0C0 .long 0x12826888 .long 0x1273A0C0 vand 21,21,29 vand 26,26,29 vand 22,22,29 vand 23,23,29 vspltisb 20,2 .long 0x1092CEC4 .long 0x102FCEC4 vand 3,18,29 vand 0,15,29 .long 0x108498C0 .long 0x102180C0 .long 0x11384E8C .long 0x10B52E8C .long 0x10DA368C .long 0x10F63E8C .long 0x1117468C vor 9,9,30 .long 0x1264CEC4 .long 0x1201CEC4 vand 4,4,29 vand 1,1,29 .long 0x100098C0 .long 0x105180C0 .long 0x1273A5C4 .long 0x1222CEC4 vand 2,2,29 .long 0x100098C0 .long 0x106388C0 .long 0x11E0CEC4 .long 0x1243CEC4 vand 0,0,29 vand 3,3,29 .long 0x102178C0 .long 0x108490C0 addi 4,4,0x40 bdnz .Loop_vsx neg 5,5 andi. 5,5,0x30 sub 4,4,5 .long 0x7D5D1E99 .long 0x7D605699 .long 0x7D9B5699 .long 0x7DBC5699 .long 0x7DDD5699 .Last_vsx: .long 0x11E55288 .long 0x12065288 .long 0x12275288 .long 0x12485288 .long 0x12695288 .long 0x12896288 .long 0x11EFA0C0 .long 0x12855A88 .long 0x1210A0C0 .long 0x12865A88 .long 0x1231A0C0 .long 0x12875A88 .long 0x1252A0C0 .long 0x7D9F5699 .long 0x12885A88 .long 0x1273A0C0 .long 0x7D7E5699 .long 0x104238C0 .long 0x100028C0 .long 0x106340C0 .long 0x102130C0 .long 0x108448C0 .long 0x12887288 .long 0x11EFA0C0 .long 0x12897288 .long 0x1210A0C0 .long 0x12856A88 .long 0x1231A0C0 .long 0x12866A88 .long 0x1252A0C0 .long 0x7DC85699 .long 0x12876A88 .long 0x1273A0C0 .long 0x7DA75699 .long 0x12876288 .long 0x11EFA0C0 .long 0x12886288 .long 0x1210A0C0 .long 0x12896288 .long 0x1231A0C0 .long 0x12855A88 .long 0x1252A0C0 .long 0x12865A88 .long 0x1273A0C0 .long 0x12867288 .long 0x11EFA0C0 .long 0x12877288 .long 0x1210A0C0 .long 0x12887288 .long 0x1231A0C0 .long 0x12897288 .long 0x1252A0C0 .long 0x12856A88 .long 0x1273A0C0 .long 0x12805088 .long 0x11EFA0C0 .long 0x12815088 .long 0x1210A0C0 .long 0x12825088 .long 0x1231A0C0 .long 0x12835088 .long 0x1252A0C0 .long 0x12845088 .long 0x1273A0C0 .long 0x12826088 .long 0x11EFA0C0 .long 0x12836088 .long 0x1210A0C0 .long 0x12846088 .long 0x1231A0C0 .long 0x12805888 .long 0x1252A0C0 .long 0x7D9B5699 .long 0x12815888 .long 0x1273A0C0 .long 0x7D605699 .long 0x12817088 .long 0x11EFA0C0 .long 0x12827088 .long 0x1210A0C0 .long 0x12837088 .long 0x1231A0C0 .long 0x12847088 .long 0x1252A0C0 .long 0x7DDD5699 .long 0x12806888 .long 0x1273A0C0 .long 0x7DBC5699 .long 0x12846088 .long 0x11EFA0C0 .long 0x12805888 .long 0x1210A0C0 .long 0x12815888 .long 0x1231A0C0 .long 0x12825888 .long 0x1252A0C0 .long 0x12835888 .long 0x1273A0C0 .long 0x12837088 .long 0x11EFA0C0 .long 0x12847088 .long 0x1210A0C0 .long 0x12806888 .long 0x1231A0C0 .long 0x12816888 .long 0x1252A0C0 .long 0x12826888 .long 0x1273A0C0 .long 0xF00F7A57 .long 0xF0308257 .long 0xF0518A57 .long 0xF0729257 .long 0xF0939A57 .long 0x11EF00C0 .long 0x121008C0 .long 0x123110C0 .long 0x125218C0 .long 0x127320C0 vspltisb 20,2 .long 0x1092CEC4 .long 0x102FCEC4 vand 3,18,29 vand 0,15,29 .long 0x108498C0 .long 0x102180C0 .long 0x1264CEC4 .long 0x1201CEC4 vand 4,4,29 vand 1,1,29 .long 0x100098C0 .long 0x105180C0 .long 0x1273A5C4 .long 0x1222CEC4 vand 2,2,29 .long 0x100098C0 .long 0x106388C0 .long 0x11E0CEC4 .long 0x1243CEC4 vand 0,0,29 vand 3,3,29 .long 0x102178C0 .long 0x108490C0 beq .Ldone_vsx add 6,12,5 .long 0x7F406699 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 vperm 21,21,21,26 vperm 22,22,22,26 vperm 23,23,23,26 vperm 24,24,24,26 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 .long 0xF115B357 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0xF297C057 vperm 21,23,24,31 .long 0xF2D7C357 .long 0x7DE03699 .long 0x7E1D3699 .long 0x12F4CEC4 .long 0x12B5D6C4 .long 0x1316DEC4 .long 0x12D6E6C4 vand 20,20,29 vand 23,23,29 vand 21,21,29 vand 22,22,29 .long 0x11384E8C .long 0x10B42E8C .long 0x10D7368C .long 0x10F53E8C .long 0x1116468C vor 9,9,30 vperm 0,0,0,15 vand 5,5, 16 vperm 1,1,1,15 vand 6,6, 16 vperm 2,2,2,15 vand 7,7, 16 vperm 3,3,3,15 vand 8,8, 16 vperm 4,4,4,15 vand 9,9, 16 .long 0x10A500C0 vxor 0,0,0 .long 0x10C608C0 vxor 1,1,1 .long 0x10E710C0 vxor 2,2,2 .long 0x110818C0 vxor 3,3,3 .long 0x112920C0 vxor 4,4,4 xor. 5,5,5 b .Last_vsx .align 4 .Ldone_vsx: - ld 0,448(1) + ld 0,432(1) li 27,4 li 28,8 li 29,12 li 30,16 .long 0x7C001919 .long 0x7C3B1919 .long 0x7C5C1919 .long 0x7C7D1919 .long 0x7C9E1919 - lwz 12,388(1) + lwz 12,372(1) mtlr 0 li 10,191 li 11,207 or 12,12,12 lvx 20,10,1 addi 10,10,32 - lvx 21,10,1 - addi 10,10,32 - lvx 22,11,1 + lvx 21,11,1 addi 11,11,32 - lvx 23,10,1 + lvx 22,10,1 addi 10,10,32 - lvx 24,11,1 + lvx 23,11,1 addi 11,11,32 - lvx 25,10,1 + lvx 24,10,1 addi 10,10,32 - lvx 26,11,1 + lvx 25,11,1 addi 11,11,32 - lvx 27,10,1 + lvx 26,10,1 addi 10,10,32 - lvx 28,11,1 + lvx 27,11,1 addi 11,11,32 - lvx 29,10,1 + lvx 28,10,1 addi 10,10,32 - lvx 30,11,1 - lvx 31,10,1 - ld 27,392(1) - ld 28,400(1) - ld 29,408(1) - ld 30,416(1) - ld 31,424(1) - addi 1,1,432 + lvx 29,11,1 + addi 11,11,32 + lvx 30,10,1 + lvx 31,11,1 + ld 27,376(1) + ld 28,384(1) + ld 29,392(1) + ld 30,400(1) + ld 31,408(1) + addi 1,1,416 blr .long 0 .byte 0,12,0x04,1,0x80,5,4,0 .long 0 .size __poly1305_blocks_vsx,.-__poly1305_blocks_vsx .align 6 .LPICmeup: mflr 0 bcl 20,31,$+4 mflr 12 addi 12,12,56 mtlr 0 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .space 28 .long 0x00000000,0x03ffffff .long 0x00000000,0x03ffffff .long 0x00000000,0x0000001a .long 0x00000000,0x0000001a .long 0x00000000,0x00000028 .long 0x00000000,0x00000028 .long 0x00000000,0x0e0f0001 .long 0x00000000,0x1e1f1011 .long 0x01000000,0x01000000 .long 0x01000000,0x01000000 .long 0x07060504,0x03020100 .long 0x0f0e0d0c,0x0b0a0908 .long 0x00000000,0x00000000 .long 0x00000000,0x04050607 .long 0x04050607,0x00000000 .long 0x00000000,0x00000000 .long 0x00000000,0x00000000 .long 0x04050607,0x00000000 .long 0xffffffff,0x00000000 .long 0xffffffff,0xffffffff .long 0xffffffff,0x00000000 .long 0xffffffff,0x00000000 .long 0x00000000,0x00000000 .long 0xffffffff,0x00000000 .byte 80,111,108,121,49,51,48,53,32,102,111,114,32,80,80,67,44,67,82,89,80,84,79,71,65,77,83,32,98,121,32,64,100,111,116,45,97,115,109,0 .align 2 diff --git a/sys/crypto/openssl/powerpc64le/aesp8-ppc.S b/sys/crypto/openssl/powerpc64le/aesp8-ppc.S index 922f45cc0e46..72e5d47335c3 100644 --- a/sys/crypto/openssl/powerpc64le/aesp8-ppc.S +++ b/sys/crypto/openssl/powerpc64le/aesp8-ppc.S @@ -1,3659 +1,3704 @@ /* Do not modify. This file is auto-generated from aesp8-ppc.pl. */ .machine "any" .abiversion 2 .text .align 7 rcon: .byte 0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01,0x00,0x00,0x00,0x01 .byte 0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b,0x00,0x00,0x00,0x1b .byte 0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d,0x0c,0x0f,0x0e,0x0d .byte 0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00,0x00 +.long 0x0f102132, 0x43546576, 0x8798a9ba, 0xcbdcedfe .Lconsts: mflr 0 bcl 20,31,$+4 mflr 6 - addi 6,6,-0x48 + addi 6,6,-0x58 mtlr 0 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .byte 65,69,83,32,102,111,114,32,80,111,119,101,114,73,83,65,32,50,46,48,55,44,67,82,89,80,84,79,71,65,77,83,32,98,121,32,60,97,112,112,114,111,64,111,112,101,110,115,115,108,46,111,114,103,62,0 .align 2 .globl aes_p8_set_encrypt_key .type aes_p8_set_encrypt_key,@function .align 5 aes_p8_set_encrypt_key: .localentry aes_p8_set_encrypt_key,0 .Lset_encrypt_key: mflr 11 std 11,16(1) li 6,-1 cmpldi 3,0 beq- .Lenc_key_abort cmpldi 5,0 beq- .Lenc_key_abort li 6,-2 cmpwi 4,128 blt- .Lenc_key_abort cmpwi 4,256 bgt- .Lenc_key_abort andi. 0,4,0x3f bne- .Lenc_key_abort lis 0,0xfff0 li 12,-1 or 0,0,0 bl .Lconsts mtlr 11 neg 9,3 lvx 1,0,3 addi 3,3,15 lvsr 3,0,9 li 8,0x20 cmpwi 4,192 lvx 2,0,3 vspltisb 5,0x0f lvx 4,0,6 vxor 3,3,5 lvx 5,8,6 addi 6,6,0x10 vperm 1,1,2,3 li 7,8 vxor 0,0,0 mtctr 7 lvsl 8,0,5 vspltisb 9,-1 lvx 10,0,5 vperm 9,9,0,8 blt .Loop128 addi 3,3,8 beq .L192 addi 3,3,8 b .L256 .align 4 .Loop128: vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 bdnz .Loop128 lvx 4,0,6 vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 vperm 3,1,1,5 vsldoi 6,0,1,12 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vxor 1,1,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,0x50 li 8,10 b .Ldone .align 4 .L192: lvx 6,0,3 li 7,4 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 5,5,16 vperm 2,2,6,3 vspltisb 3,8 mtctr 7 vsububm 5,5,3 .Loop192: vperm 3,2,2,5 vsldoi 6,0,1,12 .long 0x10632509 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 7,0,2,8 vspltw 6,1,3 vxor 6,6,2 vsldoi 2,0,2,12 vadduwm 4,4,4 vxor 2,2,6 vxor 1,1,3 vxor 2,2,3 vsldoi 7,7,1,8 vperm 3,2,2,5 vsldoi 6,0,1,12 vperm 11,7,7,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vsldoi 7,1,2,8 vxor 1,1,6 vsldoi 6,0,6,12 vperm 11,7,7,8 vsel 7,10,11,9 vor 10,11,11 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 stvx 7,0,5 addi 5,5,16 vspltw 6,1,3 vxor 6,6,2 vsldoi 2,0,2,12 vadduwm 4,4,4 vxor 2,2,6 vxor 1,1,3 vxor 2,2,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,16 bdnz .Loop192 li 8,12 addi 5,5,0x20 b .Ldone .align 4 .L256: lvx 6,0,3 li 7,7 li 8,14 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 5,5,16 vperm 2,2,6,3 mtctr 7 .Loop256: vperm 3,2,2,5 vsldoi 6,0,1,12 vperm 11,2,2,8 vsel 7,10,11,9 vor 10,11,11 .long 0x10632509 stvx 7,0,5 addi 5,5,16 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vsldoi 6,0,6,12 vxor 1,1,6 vadduwm 4,4,4 vxor 1,1,3 vperm 11,1,1,8 vsel 7,10,11,9 vor 10,11,11 stvx 7,0,5 addi 3,5,15 addi 5,5,16 bdz .Ldone vspltw 3,1,3 vsldoi 6,0,2,12 .long 0x106305C8 vxor 2,2,6 vsldoi 6,0,6,12 vxor 2,2,6 vsldoi 6,0,6,12 vxor 2,2,6 vxor 2,2,3 b .Loop256 .align 4 .Ldone: lvx 2,0,3 vsel 2,10,2,9 stvx 2,0,3 li 6,0 or 12,12,12 stw 8,0(5) .Lenc_key_abort: mr 3,6 blr .long 0 .byte 0,12,0x14,1,0,0,3,0 .long 0 .size aes_p8_set_encrypt_key,.-aes_p8_set_encrypt_key .globl aes_p8_set_decrypt_key .type aes_p8_set_decrypt_key,@function .align 5 aes_p8_set_decrypt_key: .localentry aes_p8_set_decrypt_key,0 stdu 1,-64(1) mflr 10 std 10,64+16(1) bl .Lset_encrypt_key mtlr 10 cmpwi 3,0 bne- .Ldec_key_abort slwi 7,8,4 subi 3,5,240 srwi 8,8,1 add 5,3,7 mtctr 8 .Ldeckey: lwz 0, 0(3) lwz 6, 4(3) lwz 7, 8(3) lwz 8, 12(3) addi 3,3,16 lwz 9, 0(5) lwz 10,4(5) lwz 11,8(5) lwz 12,12(5) stw 0, 0(5) stw 6, 4(5) stw 7, 8(5) stw 8, 12(5) subi 5,5,16 stw 9, -16(3) stw 10,-12(3) stw 11,-8(3) stw 12,-4(3) bdnz .Ldeckey xor 3,3,3 .Ldec_key_abort: addi 1,1,64 blr .long 0 .byte 0,12,4,1,0x80,0,3,0 .long 0 .size aes_p8_set_decrypt_key,.-aes_p8_set_decrypt_key .globl aes_p8_encrypt .type aes_p8_encrypt,@function .align 5 aes_p8_encrypt: .localentry aes_p8_encrypt,0 lwz 6,240(5) lis 0,0xfc00 li 12,-1 li 7,15 or 0,0,0 lvx 0,0,3 neg 11,4 lvx 1,7,3 lvsl 2,0,3 vspltisb 4,0x0f lvsr 3,0,11 vxor 2,2,4 li 7,16 vperm 0,0,1,2 lvx 1,0,5 lvsr 5,0,5 srwi 6,6,1 lvx 2,7,5 addi 7,7,16 subi 6,6,1 vperm 1,2,1,5 vxor 0,0,1 lvx 1,7,5 addi 7,7,16 mtctr 6 .Loop_enc: vperm 2,1,2,5 .long 0x10001508 lvx 2,7,5 addi 7,7,16 vperm 1,2,1,5 .long 0x10000D08 lvx 1,7,5 addi 7,7,16 bdnz .Loop_enc vperm 2,1,2,5 .long 0x10001508 lvx 2,7,5 vperm 1,2,1,5 .long 0x10000D09 vspltisb 2,-1 vxor 1,1,1 li 7,15 vperm 2,2,1,3 vxor 3,3,4 lvx 1,0,4 vperm 0,0,0,3 vsel 1,1,0,2 lvx 4,7,4 stvx 1,0,4 vsel 0,0,4,2 stvx 0,7,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .long 0 .size aes_p8_encrypt,.-aes_p8_encrypt .globl aes_p8_decrypt .type aes_p8_decrypt,@function .align 5 aes_p8_decrypt: .localentry aes_p8_decrypt,0 lwz 6,240(5) lis 0,0xfc00 li 12,-1 li 7,15 or 0,0,0 lvx 0,0,3 neg 11,4 lvx 1,7,3 lvsl 2,0,3 vspltisb 4,0x0f lvsr 3,0,11 vxor 2,2,4 li 7,16 vperm 0,0,1,2 lvx 1,0,5 lvsr 5,0,5 srwi 6,6,1 lvx 2,7,5 addi 7,7,16 subi 6,6,1 vperm 1,2,1,5 vxor 0,0,1 lvx 1,7,5 addi 7,7,16 mtctr 6 .Loop_dec: vperm 2,1,2,5 .long 0x10001548 lvx 2,7,5 addi 7,7,16 vperm 1,2,1,5 .long 0x10000D48 lvx 1,7,5 addi 7,7,16 bdnz .Loop_dec vperm 2,1,2,5 .long 0x10001548 lvx 2,7,5 vperm 1,2,1,5 .long 0x10000D49 vspltisb 2,-1 vxor 1,1,1 li 7,15 vperm 2,2,1,3 vxor 3,3,4 lvx 1,0,4 vperm 0,0,0,3 vsel 1,1,0,2 lvx 4,7,4 stvx 1,0,4 vsel 0,0,4,2 stvx 0,7,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .long 0 .size aes_p8_decrypt,.-aes_p8_decrypt .globl aes_p8_cbc_encrypt .type aes_p8_cbc_encrypt,@function .align 5 aes_p8_cbc_encrypt: .localentry aes_p8_cbc_encrypt,0 cmpldi 5,16 .long 0x4dc00020 cmpwi 8,0 lis 0,0xffe0 li 12,-1 or 0,0,0 li 10,15 vxor 0,0,0 vspltisb 3,0x0f lvx 4,0,7 lvsl 6,0,7 lvx 5,10,7 vxor 6,6,3 vperm 4,4,5,6 neg 11,3 lvsr 10,0,6 lwz 9,240(6) lvsr 6,0,11 lvx 5,0,3 addi 3,3,15 vxor 6,6,3 lvsl 8,0,4 vspltisb 9,-1 lvx 7,0,4 vperm 9,9,0,8 vxor 8,8,3 srwi 9,9,1 li 10,16 subi 9,9,1 beq .Lcbc_dec .Lcbc_enc: vor 2,5,5 lvx 5,0,3 addi 3,3,16 mtctr 9 subi 5,5,16 lvx 0,0,6 vperm 2,2,5,6 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 vxor 2,2,0 lvx 0,10,6 addi 10,10,16 vxor 2,2,4 .Loop_cbc_enc: vperm 1,0,1,10 .long 0x10420D08 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 .long 0x10420508 lvx 0,10,6 addi 10,10,16 bdnz .Loop_cbc_enc vperm 1,0,1,10 .long 0x10420D08 lvx 1,10,6 li 10,16 vperm 0,1,0,10 .long 0x10820509 cmpldi 5,16 vperm 3,4,4,8 vsel 2,7,3,9 vor 7,3,3 stvx 2,0,4 addi 4,4,16 bge .Lcbc_enc b .Lcbc_done .align 4 .Lcbc_dec: cmpldi 5,128 bge _aesp8_cbc_decrypt8x vor 3,5,5 lvx 5,0,3 addi 3,3,16 mtctr 9 subi 5,5,16 lvx 0,0,6 vperm 3,3,5,6 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 vxor 2,3,0 lvx 0,10,6 addi 10,10,16 .Loop_cbc_dec: vperm 1,0,1,10 .long 0x10420D48 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 .long 0x10420548 lvx 0,10,6 addi 10,10,16 bdnz .Loop_cbc_dec vperm 1,0,1,10 .long 0x10420D48 lvx 1,10,6 li 10,16 vperm 0,1,0,10 .long 0x10420549 cmpldi 5,16 vxor 2,2,4 vor 4,3,3 vperm 3,2,2,8 vsel 2,7,3,9 vor 7,3,3 stvx 2,0,4 addi 4,4,16 bge .Lcbc_dec .Lcbc_done: addi 4,4,-1 lvx 2,0,4 vsel 2,7,2,9 stvx 2,0,4 neg 8,7 li 10,15 vxor 0,0,0 vspltisb 9,-1 vspltisb 3,0x0f lvsr 8,0,8 vperm 9,9,0,8 vxor 8,8,3 lvx 7,0,7 vperm 4,4,4,8 vsel 2,7,4,9 lvx 5,10,7 stvx 2,0,7 vsel 2,4,5,9 stvx 2,10,7 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,6,0 .long 0 .align 5 _aesp8_cbc_decrypt8x: stdu 1,-448(1) li 10,207 li 11,223 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 li 0,-1 stw 12,396(1) li 8,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 subi 9,9,3 subi 5,5,128 lvx 23,0,6 lvx 30,8,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,30,23,10 addi 11,1,64+15 mtctr 9 .Load_cbc_dec_key: vperm 24,31,30,10 lvx 30,8,6 addi 6,6,0x20 stvx 24,0,11 vperm 25,30,31,10 lvx 31,0,6 stvx 25,8,11 addi 11,11,0x20 bdnz .Load_cbc_dec_key lvx 26,8,6 vperm 24,31,30,10 lvx 27,26,6 stvx 24,0,11 vperm 25,26,31,10 lvx 28,27,6 stvx 25,8,11 addi 11,1,64+15 vperm 26,27,26,10 lvx 29,28,6 vperm 27,28,27,10 lvx 30,29,6 vperm 28,29,28,10 lvx 31,30,6 vperm 29,30,29,10 lvx 14,31,6 vperm 30,31,30,10 lvx 24,0,11 vperm 31,14,31,10 lvx 25,8,11 subi 3,3,15 li 10,8 .long 0x7C001E99 lvsl 6,0,10 vspltisb 3,0x0f .long 0x7C281E99 vxor 6,6,3 .long 0x7C5A1E99 vperm 0,0,0,6 .long 0x7C7B1E99 vperm 1,1,1,6 .long 0x7D5C1E99 vperm 2,2,2,6 vxor 14,0,23 .long 0x7D7D1E99 vperm 3,3,3,6 vxor 15,1,23 .long 0x7D9E1E99 vperm 10,10,10,6 vxor 16,2,23 .long 0x7DBF1E99 addi 3,3,0x80 vperm 11,11,11,6 vxor 17,3,23 vperm 12,12,12,6 vxor 18,10,23 vperm 13,13,13,6 vxor 19,11,23 vxor 20,12,23 vxor 21,13,23 mtctr 9 b .Loop_cbc_dec8x .align 5 .Loop_cbc_dec8x: .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 lvx 24,26,11 addi 11,11,0x20 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 lvx 25,8,11 bdnz .Loop_cbc_dec8x subic 5,5,128 .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 subfe. 0,0,0 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 and 0,0,5 .long 0x11CED548 .long 0x11EFD548 .long 0x1210D548 .long 0x1231D548 .long 0x1252D548 .long 0x1273D548 .long 0x1294D548 .long 0x12B5D548 add 3,3,0 .long 0x11CEDD48 .long 0x11EFDD48 .long 0x1210DD48 .long 0x1231DD48 .long 0x1252DD48 .long 0x1273DD48 .long 0x1294DD48 .long 0x12B5DD48 addi 11,1,64+15 .long 0x11CEE548 .long 0x11EFE548 .long 0x1210E548 .long 0x1231E548 .long 0x1252E548 .long 0x1273E548 .long 0x1294E548 .long 0x12B5E548 lvx 24,0,11 .long 0x11CEED48 .long 0x11EFED48 .long 0x1210ED48 .long 0x1231ED48 .long 0x1252ED48 .long 0x1273ED48 .long 0x1294ED48 .long 0x12B5ED48 lvx 25,8,11 .long 0x11CEF548 vxor 4,4,31 .long 0x11EFF548 vxor 0,0,31 .long 0x1210F548 vxor 1,1,31 .long 0x1231F548 vxor 2,2,31 .long 0x1252F548 vxor 3,3,31 .long 0x1273F548 vxor 10,10,31 .long 0x1294F548 vxor 11,11,31 .long 0x12B5F548 vxor 12,12,31 .long 0x11CE2549 .long 0x11EF0549 .long 0x7C001E99 .long 0x12100D49 .long 0x7C281E99 .long 0x12311549 vperm 0,0,0,6 .long 0x7C5A1E99 .long 0x12521D49 vperm 1,1,1,6 .long 0x7C7B1E99 .long 0x12735549 vperm 2,2,2,6 .long 0x7D5C1E99 .long 0x12945D49 vperm 3,3,3,6 .long 0x7D7D1E99 .long 0x12B56549 vperm 10,10,10,6 .long 0x7D9E1E99 vor 4,13,13 vperm 11,11,11,6 .long 0x7DBF1E99 addi 3,3,0x80 vperm 14,14,14,6 vperm 15,15,15,6 .long 0x7DC02799 vperm 12,12,12,6 vxor 14,0,23 vperm 16,16,16,6 .long 0x7DE82799 vperm 13,13,13,6 vxor 15,1,23 vperm 17,17,17,6 .long 0x7E1A2799 vxor 16,2,23 vperm 18,18,18,6 .long 0x7E3B2799 vxor 17,3,23 vperm 19,19,19,6 .long 0x7E5C2799 vxor 18,10,23 vperm 20,20,20,6 .long 0x7E7D2799 vxor 19,11,23 vperm 21,21,21,6 .long 0x7E9E2799 vxor 20,12,23 .long 0x7EBF2799 addi 4,4,0x80 vxor 21,13,23 mtctr 9 beq .Loop_cbc_dec8x addic. 5,5,128 beq .Lcbc_dec8x_done nop nop .Loop_cbc_dec8x_tail: .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 lvx 24,26,11 addi 11,11,0x20 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 lvx 25,8,11 bdnz .Loop_cbc_dec8x_tail .long 0x11EFC548 .long 0x1210C548 .long 0x1231C548 .long 0x1252C548 .long 0x1273C548 .long 0x1294C548 .long 0x12B5C548 .long 0x11EFCD48 .long 0x1210CD48 .long 0x1231CD48 .long 0x1252CD48 .long 0x1273CD48 .long 0x1294CD48 .long 0x12B5CD48 .long 0x11EFD548 .long 0x1210D548 .long 0x1231D548 .long 0x1252D548 .long 0x1273D548 .long 0x1294D548 .long 0x12B5D548 .long 0x11EFDD48 .long 0x1210DD48 .long 0x1231DD48 .long 0x1252DD48 .long 0x1273DD48 .long 0x1294DD48 .long 0x12B5DD48 .long 0x11EFE548 .long 0x1210E548 .long 0x1231E548 .long 0x1252E548 .long 0x1273E548 .long 0x1294E548 .long 0x12B5E548 .long 0x11EFED48 .long 0x1210ED48 .long 0x1231ED48 .long 0x1252ED48 .long 0x1273ED48 .long 0x1294ED48 .long 0x12B5ED48 .long 0x11EFF548 vxor 4,4,31 .long 0x1210F548 vxor 1,1,31 .long 0x1231F548 vxor 2,2,31 .long 0x1252F548 vxor 3,3,31 .long 0x1273F548 vxor 10,10,31 .long 0x1294F548 vxor 11,11,31 .long 0x12B5F548 vxor 12,12,31 cmplwi 5,32 blt .Lcbc_dec8x_one nop beq .Lcbc_dec8x_two cmplwi 5,64 blt .Lcbc_dec8x_three nop beq .Lcbc_dec8x_four cmplwi 5,96 blt .Lcbc_dec8x_five nop beq .Lcbc_dec8x_six .Lcbc_dec8x_seven: .long 0x11EF2549 .long 0x12100D49 .long 0x12311549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 vperm 20,20,20,6 .long 0x7E7C2799 vperm 21,21,21,6 .long 0x7E9D2799 .long 0x7EBE2799 addi 4,4,0x70 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_six: .long 0x12102549 .long 0x12311549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 16,16,16,6 vperm 17,17,17,6 .long 0x7E002799 vperm 18,18,18,6 .long 0x7E282799 vperm 19,19,19,6 .long 0x7E5A2799 vperm 20,20,20,6 .long 0x7E7B2799 vperm 21,21,21,6 .long 0x7E9C2799 .long 0x7EBD2799 addi 4,4,0x60 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_five: .long 0x12312549 .long 0x12521D49 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 17,17,17,6 vperm 18,18,18,6 .long 0x7E202799 vperm 19,19,19,6 .long 0x7E482799 vperm 20,20,20,6 .long 0x7E7A2799 vperm 21,21,21,6 .long 0x7E9B2799 .long 0x7EBC2799 addi 4,4,0x50 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_four: .long 0x12522549 .long 0x12735549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 18,18,18,6 vperm 19,19,19,6 .long 0x7E402799 vperm 20,20,20,6 .long 0x7E682799 vperm 21,21,21,6 .long 0x7E9A2799 .long 0x7EBB2799 addi 4,4,0x40 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_three: .long 0x12732549 .long 0x12945D49 .long 0x12B56549 vor 4,13,13 vperm 19,19,19,6 vperm 20,20,20,6 .long 0x7E602799 vperm 21,21,21,6 .long 0x7E882799 .long 0x7EBA2799 addi 4,4,0x30 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_two: .long 0x12942549 .long 0x12B56549 vor 4,13,13 vperm 20,20,20,6 vperm 21,21,21,6 .long 0x7E802799 .long 0x7EA82799 addi 4,4,0x20 b .Lcbc_dec8x_done .align 5 .Lcbc_dec8x_one: .long 0x12B52549 vor 4,13,13 vperm 21,21,21,6 .long 0x7EA02799 addi 4,4,0x10 .Lcbc_dec8x_done: vperm 4,4,4,6 .long 0x7C803F99 li 10,79 li 11,95 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_cbc_encrypt,.-aes_p8_cbc_encrypt .globl aes_p8_ctr32_encrypt_blocks .type aes_p8_ctr32_encrypt_blocks,@function .align 5 aes_p8_ctr32_encrypt_blocks: .localentry aes_p8_ctr32_encrypt_blocks,0 cmpldi 5,1 .long 0x4dc00020 lis 0,0xfff0 li 12,-1 or 0,0,0 li 10,15 vxor 0,0,0 vspltisb 3,0x0f lvx 4,0,7 lvsl 6,0,7 lvx 5,10,7 vspltisb 11,1 vxor 6,6,3 vperm 4,4,5,6 vsldoi 11,0,11,1 neg 11,3 lvsr 10,0,6 lwz 9,240(6) lvsr 6,0,11 lvx 5,0,3 addi 3,3,15 vxor 6,6,3 srwi 9,9,1 li 10,16 subi 9,9,1 cmpldi 5,8 bge _aesp8_ctr32_encrypt8x lvsl 8,0,4 vspltisb 9,-1 lvx 7,0,4 vperm 9,9,0,8 vxor 8,8,3 lvx 0,0,6 mtctr 9 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 vxor 2,4,0 lvx 0,10,6 addi 10,10,16 b .Loop_ctr32_enc .align 5 .Loop_ctr32_enc: vperm 1,0,1,10 .long 0x10420D08 lvx 1,10,6 addi 10,10,16 vperm 0,1,0,10 .long 0x10420508 lvx 0,10,6 addi 10,10,16 bdnz .Loop_ctr32_enc vadduwm 4,4,11 vor 3,5,5 lvx 5,0,3 addi 3,3,16 subic. 5,5,1 vperm 1,0,1,10 .long 0x10420D08 lvx 1,10,6 vperm 3,3,5,6 li 10,16 vperm 1,1,0,10 lvx 0,0,6 vxor 3,3,1 .long 0x10421D09 lvx 1,10,6 addi 10,10,16 vperm 2,2,2,8 vsel 3,7,2,9 mtctr 9 vperm 0,1,0,10 vor 7,2,2 vxor 2,4,0 lvx 0,10,6 addi 10,10,16 stvx 3,0,4 addi 4,4,16 bne .Loop_ctr32_enc addi 4,4,-1 lvx 2,0,4 vsel 2,7,2,9 stvx 2,0,4 or 12,12,12 blr .long 0 .byte 0,12,0x14,0,0,0,6,0 .long 0 .align 5 _aesp8_ctr32_encrypt8x: stdu 1,-448(1) li 10,207 li 11,223 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 stvx 23,11,1 addi 11,11,32 stvx 24,10,1 addi 10,10,32 stvx 25,11,1 addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 li 0,-1 stw 12,396(1) li 8,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 subi 9,9,3 lvx 23,0,6 lvx 30,8,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,30,23,10 addi 11,1,64+15 mtctr 9 .Load_ctr32_enc_key: vperm 24,31,30,10 lvx 30,8,6 addi 6,6,0x20 stvx 24,0,11 vperm 25,30,31,10 lvx 31,0,6 stvx 25,8,11 addi 11,11,0x20 bdnz .Load_ctr32_enc_key lvx 26,8,6 vperm 24,31,30,10 lvx 27,26,6 stvx 24,0,11 vperm 25,26,31,10 lvx 28,27,6 stvx 25,8,11 addi 11,1,64+15 vperm 26,27,26,10 lvx 29,28,6 vperm 27,28,27,10 lvx 30,29,6 vperm 28,29,28,10 lvx 31,30,6 vperm 29,30,29,10 lvx 15,31,6 vperm 30,31,30,10 lvx 24,0,11 vperm 31,15,31,10 lvx 25,8,11 vadduwm 7,11,11 subi 3,3,15 sldi 5,5,4 vadduwm 16,4,11 vadduwm 17,4,7 vxor 15,4,23 li 10,8 vadduwm 18,16,7 vxor 16,16,23 lvsl 6,0,10 vadduwm 19,17,7 vxor 17,17,23 vspltisb 3,0x0f vadduwm 20,18,7 vxor 18,18,23 vxor 6,6,3 vadduwm 21,19,7 vxor 19,19,23 vadduwm 22,20,7 vxor 20,20,23 vadduwm 4,21,7 vxor 21,21,23 vxor 22,22,23 mtctr 9 b .Loop_ctr32_enc8x .align 5 .Loop_ctr32_enc8x: .long 0x11EFC508 .long 0x1210C508 .long 0x1231C508 .long 0x1252C508 .long 0x1273C508 .long 0x1294C508 .long 0x12B5C508 .long 0x12D6C508 .Loop_ctr32_enc8x_middle: lvx 24,26,11 addi 11,11,0x20 .long 0x11EFCD08 .long 0x1210CD08 .long 0x1231CD08 .long 0x1252CD08 .long 0x1273CD08 .long 0x1294CD08 .long 0x12B5CD08 .long 0x12D6CD08 lvx 25,8,11 bdnz .Loop_ctr32_enc8x subic 11,5,256 .long 0x11EFC508 .long 0x1210C508 .long 0x1231C508 .long 0x1252C508 .long 0x1273C508 .long 0x1294C508 .long 0x12B5C508 .long 0x12D6C508 subfe 0,0,0 .long 0x11EFCD08 .long 0x1210CD08 .long 0x1231CD08 .long 0x1252CD08 .long 0x1273CD08 .long 0x1294CD08 .long 0x12B5CD08 .long 0x12D6CD08 and 0,0,11 addi 11,1,64+15 .long 0x11EFD508 .long 0x1210D508 .long 0x1231D508 .long 0x1252D508 .long 0x1273D508 .long 0x1294D508 .long 0x12B5D508 .long 0x12D6D508 lvx 24,0,11 subic 5,5,129 .long 0x11EFDD08 addi 5,5,1 .long 0x1210DD08 .long 0x1231DD08 .long 0x1252DD08 .long 0x1273DD08 .long 0x1294DD08 .long 0x12B5DD08 .long 0x12D6DD08 lvx 25,8,11 .long 0x11EFE508 .long 0x7C001E99 .long 0x1210E508 .long 0x7C281E99 .long 0x1231E508 .long 0x7C5A1E99 .long 0x1252E508 .long 0x7C7B1E99 .long 0x1273E508 .long 0x7D5C1E99 .long 0x1294E508 .long 0x7D9D1E99 .long 0x12B5E508 .long 0x7DBE1E99 .long 0x12D6E508 .long 0x7DDF1E99 addi 3,3,0x80 .long 0x11EFED08 vperm 0,0,0,6 .long 0x1210ED08 vperm 1,1,1,6 .long 0x1231ED08 vperm 2,2,2,6 .long 0x1252ED08 vperm 3,3,3,6 .long 0x1273ED08 vperm 10,10,10,6 .long 0x1294ED08 vperm 12,12,12,6 .long 0x12B5ED08 vperm 13,13,13,6 .long 0x12D6ED08 vperm 14,14,14,6 add 3,3,0 subfe. 0,0,0 .long 0x11EFF508 vxor 0,0,31 .long 0x1210F508 vxor 1,1,31 .long 0x1231F508 vxor 2,2,31 .long 0x1252F508 vxor 3,3,31 .long 0x1273F508 vxor 10,10,31 .long 0x1294F508 vxor 12,12,31 .long 0x12B5F508 vxor 13,13,31 .long 0x12D6F508 vxor 14,14,31 bne .Lctr32_enc8x_break .long 0x100F0509 .long 0x10300D09 vadduwm 16,4,11 .long 0x10511509 vadduwm 17,4,7 vxor 15,4,23 .long 0x10721D09 vadduwm 18,16,7 vxor 16,16,23 .long 0x11535509 vadduwm 19,17,7 vxor 17,17,23 .long 0x11946509 vadduwm 20,18,7 vxor 18,18,23 .long 0x11B56D09 vadduwm 21,19,7 vxor 19,19,23 .long 0x11D67509 vadduwm 22,20,7 vxor 20,20,23 vperm 0,0,0,6 vadduwm 4,21,7 vxor 21,21,23 vperm 1,1,1,6 vxor 22,22,23 mtctr 9 .long 0x11EFC508 .long 0x7C002799 vperm 2,2,2,6 .long 0x1210C508 .long 0x7C282799 vperm 3,3,3,6 .long 0x1231C508 .long 0x7C5A2799 vperm 10,10,10,6 .long 0x1252C508 .long 0x7C7B2799 vperm 12,12,12,6 .long 0x1273C508 .long 0x7D5C2799 vperm 13,13,13,6 .long 0x1294C508 .long 0x7D9D2799 vperm 14,14,14,6 .long 0x12B5C508 .long 0x7DBE2799 .long 0x12D6C508 .long 0x7DDF2799 addi 4,4,0x80 b .Loop_ctr32_enc8x_middle .align 5 .Lctr32_enc8x_break: cmpwi 5,-0x60 blt .Lctr32_enc8x_one nop beq .Lctr32_enc8x_two cmpwi 5,-0x40 blt .Lctr32_enc8x_three nop beq .Lctr32_enc8x_four cmpwi 5,-0x20 blt .Lctr32_enc8x_five nop beq .Lctr32_enc8x_six cmpwi 5,0x00 blt .Lctr32_enc8x_seven .Lctr32_enc8x_eight: .long 0x11EF0509 .long 0x12100D09 .long 0x12311509 .long 0x12521D09 .long 0x12735509 .long 0x12946509 .long 0x12B56D09 .long 0x12D67509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 vperm 20,20,20,6 .long 0x7E7C2799 vperm 21,21,21,6 .long 0x7E9D2799 vperm 22,22,22,6 .long 0x7EBE2799 .long 0x7EDF2799 addi 4,4,0x80 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_seven: .long 0x11EF0D09 .long 0x12101509 .long 0x12311D09 .long 0x12525509 .long 0x12736509 .long 0x12946D09 .long 0x12B57509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 vperm 20,20,20,6 .long 0x7E7C2799 vperm 21,21,21,6 .long 0x7E9D2799 .long 0x7EBE2799 addi 4,4,0x70 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_six: .long 0x11EF1509 .long 0x12101D09 .long 0x12315509 .long 0x12526509 .long 0x12736D09 .long 0x12947509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 vperm 20,20,20,6 .long 0x7E7C2799 .long 0x7E9D2799 addi 4,4,0x60 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_five: .long 0x11EF1D09 .long 0x12105509 .long 0x12316509 .long 0x12526D09 .long 0x12737509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 vperm 19,19,19,6 .long 0x7E5B2799 .long 0x7E7C2799 addi 4,4,0x50 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_four: .long 0x11EF5509 .long 0x12106509 .long 0x12316D09 .long 0x12527509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 vperm 18,18,18,6 .long 0x7E3A2799 .long 0x7E5B2799 addi 4,4,0x40 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_three: .long 0x11EF6509 .long 0x12106D09 .long 0x12317509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 vperm 17,17,17,6 .long 0x7E082799 .long 0x7E3A2799 addi 4,4,0x30 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_two: .long 0x11EF6D09 .long 0x12107509 vperm 15,15,15,6 vperm 16,16,16,6 .long 0x7DE02799 .long 0x7E082799 addi 4,4,0x20 b .Lctr32_enc8x_done .align 5 .Lctr32_enc8x_one: .long 0x11EF7509 vperm 15,15,15,6 .long 0x7DE02799 addi 4,4,0x10 .Lctr32_enc8x_done: li 10,79 li 11,95 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 stvx 6,10,1 addi 10,10,32 stvx 6,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_ctr32_encrypt_blocks,.-aes_p8_ctr32_encrypt_blocks .globl aes_p8_xts_encrypt .type aes_p8_xts_encrypt,@function .align 5 aes_p8_xts_encrypt: .localentry aes_p8_xts_encrypt,0 mr 10,3 li 3,-1 cmpldi 5,16 .long 0x4dc00020 lis 0,0xfff0 li 12,-1 li 11,0 or 0,0,0 vspltisb 9,0x07 lvsl 6,11,11 vspltisb 11,0x0f vxor 6,6,9 li 3,15 lvx 8,0,8 lvsl 5,0,8 lvx 4,3,8 vxor 5,5,11 vperm 8,8,4,5 neg 11,10 lvsr 5,0,11 lvx 2,0,10 addi 10,10,15 vxor 5,5,11 cmpldi 7,0 beq .Lxts_enc_no_key2 lvsr 7,0,7 lwz 9,240(7) srwi 9,9,1 subi 9,9,1 li 3,16 lvx 0,0,7 lvx 1,3,7 addi 3,3,16 vperm 0,1,0,7 vxor 8,8,0 lvx 0,3,7 addi 3,3,16 mtctr 9 .Ltweak_xts_enc: vperm 1,0,1,7 .long 0x11080D08 lvx 1,3,7 addi 3,3,16 vperm 0,1,0,7 .long 0x11080508 lvx 0,3,7 addi 3,3,16 bdnz .Ltweak_xts_enc vperm 1,0,1,7 .long 0x11080D08 lvx 1,3,7 vperm 0,1,0,7 .long 0x11080509 li 8,0 b .Lxts_enc .Lxts_enc_no_key2: li 3,-16 and 5,5,3 .Lxts_enc: lvx 4,0,10 addi 10,10,16 lvsr 7,0,6 lwz 9,240(6) srwi 9,9,1 subi 9,9,1 li 3,16 vslb 10,9,9 vor 10,10,9 vspltisb 11,1 vsldoi 10,10,11,15 cmpldi 5,96 bge _aesp8_xts_encrypt6x andi. 7,5,15 subic 0,5,32 subi 7,7,16 subfe 0,0,0 and 0,0,7 add 10,10,0 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,1,0,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 b .Loop_xts_enc .align 5 .Loop_xts_enc: vperm 1,0,1,7 .long 0x10420D08 lvx 1,3,6 addi 3,3,16 vperm 0,1,0,7 .long 0x10420508 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_enc vperm 1,0,1,7 .long 0x10420D08 lvx 1,3,6 li 3,16 vperm 0,1,0,7 vxor 0,0,8 .long 0x10620509 vperm 11,3,3,6 .long 0x7D602799 addi 4,4,16 subic. 5,5,16 beq .Lxts_enc_done vor 2,4,4 lvx 4,0,10 addi 10,10,16 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 subic 0,5,32 subfe 0,0,0 and 0,0,7 add 10,10,0 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 2,2,4,5 vperm 0,1,0,7 vxor 2,2,8 vxor 3,3,0 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 bge .Loop_xts_enc vxor 3,3,8 lvsr 5,0,5 vxor 4,4,4 vspltisb 11,-1 vperm 4,4,11,5 vsel 2,2,3,4 subi 11,4,17 subi 4,4,16 mtctr 5 li 5,16 .Loop_xts_enc_steal: lbzu 0,1(11) stb 0,16(11) bdnz .Loop_xts_enc_steal mtctr 9 b .Loop_xts_enc .Lxts_enc_done: cmpldi 8,0 beq .Lxts_enc_ret vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 8,8,8,6 .long 0x7D004799 .Lxts_enc_ret: or 12,12,12 li 3,0 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_xts_encrypt,.-aes_p8_xts_encrypt .globl aes_p8_xts_decrypt .type aes_p8_xts_decrypt,@function .align 5 aes_p8_xts_decrypt: .localentry aes_p8_xts_decrypt,0 mr 10,3 li 3,-1 cmpldi 5,16 .long 0x4dc00020 lis 0,0xfff8 li 12,-1 li 11,0 or 0,0,0 andi. 0,5,15 neg 0,0 andi. 0,0,16 sub 5,5,0 vspltisb 9,0x07 lvsl 6,11,11 vspltisb 11,0x0f vxor 6,6,9 li 3,15 lvx 8,0,8 lvsl 5,0,8 lvx 4,3,8 vxor 5,5,11 vperm 8,8,4,5 neg 11,10 lvsr 5,0,11 lvx 2,0,10 addi 10,10,15 vxor 5,5,11 cmpldi 7,0 beq .Lxts_dec_no_key2 lvsr 7,0,7 lwz 9,240(7) srwi 9,9,1 subi 9,9,1 li 3,16 lvx 0,0,7 lvx 1,3,7 addi 3,3,16 vperm 0,1,0,7 vxor 8,8,0 lvx 0,3,7 addi 3,3,16 mtctr 9 .Ltweak_xts_dec: vperm 1,0,1,7 .long 0x11080D08 lvx 1,3,7 addi 3,3,16 vperm 0,1,0,7 .long 0x11080508 lvx 0,3,7 addi 3,3,16 bdnz .Ltweak_xts_dec vperm 1,0,1,7 .long 0x11080D08 lvx 1,3,7 vperm 0,1,0,7 .long 0x11080509 li 8,0 b .Lxts_dec .Lxts_dec_no_key2: neg 3,5 andi. 3,3,15 add 5,5,3 .Lxts_dec: lvx 4,0,10 addi 10,10,16 lvsr 7,0,6 lwz 9,240(6) srwi 9,9,1 subi 9,9,1 li 3,16 vslb 10,9,9 vor 10,10,9 vspltisb 11,1 vsldoi 10,10,11,15 cmpldi 5,96 bge _aesp8_xts_decrypt6x lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,1,0,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 blt .Ltail_xts_dec .align 5 .Loop_xts_dec: vperm 1,0,1,7 .long 0x10420D48 lvx 1,3,6 addi 3,3,16 vperm 0,1,0,7 .long 0x10420548 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_dec vperm 1,0,1,7 .long 0x10420D48 lvx 1,3,6 li 3,16 vperm 0,1,0,7 vxor 0,0,8 .long 0x10620549 vperm 11,3,3,6 .long 0x7D602799 addi 4,4,16 subic. 5,5,16 beq .Lxts_dec_done vor 2,4,4 lvx 4,0,10 addi 10,10,16 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 2,2,4,5 vperm 0,1,0,7 vxor 2,2,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 mtctr 9 cmpldi 5,16 bge .Loop_xts_dec .Ltail_xts_dec: vsrab 11,8,9 vaddubm 12,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 12,12,11 subi 10,10,16 add 10,10,5 vxor 2,2,8 vxor 2,2,12 .Loop_xts_dec_short: vperm 1,0,1,7 .long 0x10420D48 lvx 1,3,6 addi 3,3,16 vperm 0,1,0,7 .long 0x10420548 lvx 0,3,6 addi 3,3,16 bdnz .Loop_xts_dec_short vperm 1,0,1,7 .long 0x10420D48 lvx 1,3,6 li 3,16 vperm 0,1,0,7 vxor 0,0,12 .long 0x10620549 vperm 11,3,3,6 .long 0x7D602799 vor 2,4,4 lvx 4,0,10 lvx 0,0,6 lvx 1,3,6 addi 3,3,16 vperm 2,2,4,5 vperm 0,1,0,7 lvsr 5,0,5 vxor 4,4,4 vspltisb 11,-1 vperm 4,4,11,5 vsel 2,2,3,4 vxor 0,0,8 vxor 2,2,0 lvx 0,3,6 addi 3,3,16 subi 11,4,1 mtctr 5 li 5,16 .Loop_xts_dec_steal: lbzu 0,1(11) stb 0,16(11) bdnz .Loop_xts_dec_steal mtctr 9 b .Loop_xts_dec .Lxts_dec_done: cmpldi 8,0 beq .Lxts_dec_ret vsrab 11,8,9 vaddubm 8,8,8 vsldoi 11,11,11,15 vand 11,11,10 vxor 8,8,11 vperm 8,8,8,6 .long 0x7D004799 .Lxts_dec_ret: or 12,12,12 li 3,0 blr .long 0 .byte 0,12,0x04,0,0x80,6,6,0 .long 0 .size aes_p8_xts_decrypt,.-aes_p8_xts_decrypt .align 5 _aesp8_xts_encrypt6x: stdu 1,-448(1) mflr 11 li 7,207 li 3,223 std 11,464(1) stvx 20,7,1 addi 7,7,32 stvx 21,3,1 addi 3,3,32 stvx 22,7,1 addi 7,7,32 stvx 23,3,1 addi 3,3,32 stvx 24,7,1 addi 7,7,32 stvx 25,3,1 addi 3,3,32 stvx 26,7,1 addi 7,7,32 stvx 27,3,1 addi 3,3,32 stvx 28,7,1 addi 7,7,32 stvx 29,3,1 addi 3,3,32 stvx 30,7,1 stvx 31,3,1 li 0,-1 stw 12,396(1) li 3,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 lvx 30,3,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,30,23,7 addi 7,1,64+15 mtctr 9 .Load_xts_enc_key: vperm 24,31,30,7 lvx 30,3,6 addi 6,6,0x20 stvx 24,0,7 vperm 25,30,31,7 lvx 31,0,6 stvx 25,3,7 addi 7,7,0x20 bdnz .Load_xts_enc_key lvx 26,3,6 vperm 24,31,30,7 lvx 27,26,6 stvx 24,0,7 vperm 25,26,31,7 lvx 28,27,6 stvx 25,3,7 addi 7,1,64+15 vperm 26,27,26,7 lvx 29,28,6 vperm 27,28,27,7 lvx 30,29,6 vperm 28,29,28,7 lvx 31,30,6 vperm 29,30,29,7 lvx 22,31,6 vperm 30,31,30,7 lvx 24,0,7 vperm 31,22,31,7 lvx 25,3,7 + + + + + + + + vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 1,1,1,6 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 2,2,2,6 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 3,3,3,6 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 4,4,4,6 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 5,5,5,6 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 b .Loop_xts_enc6x .align 5 .Loop_xts_enc6x: .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 .long 0x1210C508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 .long 0x1210CD08 lvx 25,3,7 bdnz .Loop_xts_enc6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C508 .long 0x118CC508 vsrab 11,8,9 vxor 17,8,23 vaddubm 8,8,8 .long 0x11ADC508 .long 0x11CEC508 - vsldoi 11,11,11,15 .long 0x11EFC508 .long 0x1210C508 subfe. 0,0,0 vand 11,11,10 .long 0x10E7CD08 .long 0x118CCD08 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD08 .long 0x11CECD08 vxor 1,18,31 vsrab 11,8,9 vxor 18,8,23 .long 0x11EFCD08 .long 0x1210CD08 and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D508 .long 0x118CD508 vand 11,11,10 .long 0x11ADD508 .long 0x11CED508 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD508 .long 0x1210D508 add 10,10,0 vxor 2,19,31 vsrab 11,8,9 vxor 19,8,23 vaddubm 8,8,8 .long 0x10E7DD08 .long 0x118CDD08 - vsldoi 11,11,11,15 .long 0x11ADDD08 .long 0x11CEDD08 vand 11,11,10 .long 0x11EFDD08 .long 0x1210DD08 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E508 .long 0x118CE508 vxor 3,20,31 vsrab 11,8,9 vxor 20,8,23 .long 0x11ADE508 .long 0x11CEE508 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE508 .long 0x1210E508 lvx 24,0,7 vand 11,11,10 .long 0x10E7ED08 .long 0x118CED08 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED08 .long 0x11CEED08 vxor 4,21,31 vsrab 11,8,9 vxor 21,8,23 .long 0x11EFED08 .long 0x1210ED08 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F508 .long 0x118CF508 vand 11,11,10 .long 0x11ADF508 .long 0x11CEF508 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF508 .long 0x1210F508 vxor 5,22,31 vsrab 11,8,9 vxor 22,8,23 .long 0x10E70509 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D09 .long 0x7C235699 .long 0x11AD1509 vperm 0,0,0,6 .long 0x7C5A5699 vand 11,11,10 .long 0x11CE1D09 vperm 1,1,1,6 .long 0x7C7B5699 .long 0x11EF2509 vperm 2,2,2,6 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x11702D09 vperm 3,3,3,6 .long 0x7CBD5699 addi 10,10,0x60 vperm 4,4,4,6 vperm 5,5,5,6 vperm 7,7,7,6 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,17 vperm 13,13,13,6 .long 0x7D832799 vxor 12,1,18 vperm 14,14,14,6 .long 0x7DBA2799 vxor 13,2,19 vperm 15,15,15,6 .long 0x7DDB2799 vxor 14,3,20 vperm 16,11,11,6 .long 0x7DFC2799 vxor 15,4,21 .long 0x7E1D2799 vxor 16,5,22 addi 4,4,0x60 mtctr 9 beq .Loop_xts_enc6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_enc6x_zero cmpwi 5,0x20 blt .Lxts_enc6x_one nop beq .Lxts_enc6x_two cmpwi 5,0x40 blt .Lxts_enc6x_three nop beq .Lxts_enc6x_four .Lxts_enc6x_five: vxor 7,1,17 vxor 12,2,18 vxor 13,3,19 vxor 14,4,20 vxor 15,5,21 bl _aesp8_xts_enc5x vperm 7,7,7,6 vor 17,22,22 vperm 12,12,12,6 .long 0x7CE02799 vperm 13,13,13,6 .long 0x7D832799 vperm 14,14,14,6 .long 0x7DBA2799 vxor 11,15,22 vperm 15,15,15,6 .long 0x7DDB2799 .long 0x7DFC2799 addi 4,4,0x50 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_four: vxor 7,2,17 vxor 12,3,18 vxor 13,4,19 vxor 14,5,20 vxor 15,15,15 bl _aesp8_xts_enc5x vperm 7,7,7,6 vor 17,21,21 vperm 12,12,12,6 .long 0x7CE02799 vperm 13,13,13,6 .long 0x7D832799 vxor 11,14,21 vperm 14,14,14,6 .long 0x7DBA2799 .long 0x7DDB2799 addi 4,4,0x40 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_three: vxor 7,3,17 vxor 12,4,18 vxor 13,5,19 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_enc5x vperm 7,7,7,6 vor 17,20,20 vperm 12,12,12,6 .long 0x7CE02799 vxor 11,13,20 vperm 13,13,13,6 .long 0x7D832799 .long 0x7DBA2799 addi 4,4,0x30 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_two: vxor 7,4,17 vxor 12,5,18 vxor 13,13,13 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_enc5x vperm 7,7,7,6 vor 17,19,19 vxor 11,12,19 vperm 12,12,12,6 .long 0x7CE02799 .long 0x7D832799 addi 4,4,0x20 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_one: vxor 7,5,17 nop .Loop_xts_enc1x: .long 0x10E7C508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 lvx 25,3,7 bdnz .Loop_xts_enc1x add 10,10,31 cmpwi 31,0 .long 0x10E7C508 subi 10,10,16 .long 0x10E7CD08 lvsr 5,0,31 .long 0x10E7D508 .long 0x7C005699 .long 0x10E7DD08 addi 7,1,64+15 .long 0x10E7E508 lvx 24,0,7 .long 0x10E7ED08 lvx 25,3,7 vxor 17,17,31 vperm 0,0,0,6 .long 0x10E7F508 vperm 0,0,0,5 .long 0x10E78D09 vor 17,18,18 vxor 11,7,18 vperm 7,7,7,6 .long 0x7CE02799 addi 4,4,0x10 bne .Lxts_enc6x_steal b .Lxts_enc6x_done .align 4 .Lxts_enc6x_zero: cmpwi 31,0 beq .Lxts_enc6x_done add 10,10,31 subi 10,10,16 .long 0x7C005699 lvsr 5,0,31 vperm 0,0,0,6 vperm 0,0,0,5 vxor 11,11,17 .Lxts_enc6x_steal: vxor 0,0,17 vxor 7,7,7 vspltisb 12,-1 vperm 7,7,12,5 vsel 7,0,11,7 subi 30,4,17 subi 4,4,16 mtctr 31 .Loop_xts_enc6x_steal: lbzu 0,1(30) stb 0,16(30) bdnz .Loop_xts_enc6x_steal li 31,0 mtctr 9 b .Loop_xts_enc1x .align 4 .Lxts_enc6x_done: cmpldi 8,0 beq .Lxts_enc6x_ret vxor 8,17,23 vperm 8,8,8,6 .long 0x7D004799 .Lxts_enc6x_ret: mtlr 11 li 10,79 li 11,95 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,1,0x80,6,6,0 .long 0 .align 5 _aesp8_xts_enc5x: .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 lvx 25,3,7 bdnz _aesp8_xts_enc5x add 10,10,31 cmpwi 31,0 .long 0x10E7C508 .long 0x118CC508 .long 0x11ADC508 .long 0x11CEC508 .long 0x11EFC508 subi 10,10,16 .long 0x10E7CD08 .long 0x118CCD08 .long 0x11ADCD08 .long 0x11CECD08 .long 0x11EFCD08 vxor 17,17,31 .long 0x10E7D508 lvsr 5,0,31 .long 0x118CD508 .long 0x11ADD508 .long 0x11CED508 .long 0x11EFD508 vxor 1,18,31 .long 0x10E7DD08 .long 0x7C005699 .long 0x118CDD08 .long 0x11ADDD08 .long 0x11CEDD08 .long 0x11EFDD08 vxor 2,19,31 addi 7,1,64+15 .long 0x10E7E508 .long 0x118CE508 .long 0x11ADE508 .long 0x11CEE508 .long 0x11EFE508 lvx 24,0,7 vxor 3,20,31 .long 0x10E7ED08 vperm 0,0,0,6 .long 0x118CED08 .long 0x11ADED08 .long 0x11CEED08 .long 0x11EFED08 lvx 25,3,7 vxor 4,21,31 .long 0x10E7F508 vperm 0,0,0,5 .long 0x118CF508 .long 0x11ADF508 .long 0x11CEF508 .long 0x11EFF508 .long 0x10E78D09 .long 0x118C0D09 .long 0x11AD1509 .long 0x11CE1D09 .long 0x11EF2509 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .align 5 _aesp8_xts_decrypt6x: stdu 1,-448(1) mflr 11 li 7,207 li 3,223 std 11,464(1) stvx 20,7,1 addi 7,7,32 stvx 21,3,1 addi 3,3,32 stvx 22,7,1 addi 7,7,32 stvx 23,3,1 addi 3,3,32 stvx 24,7,1 addi 7,7,32 stvx 25,3,1 addi 3,3,32 stvx 26,7,1 addi 7,7,32 stvx 27,3,1 addi 3,3,32 stvx 28,7,1 addi 7,7,32 stvx 29,3,1 addi 3,3,32 stvx 30,7,1 stvx 31,3,1 li 0,-1 stw 12,396(1) li 3,0x10 std 26,400(1) li 26,0x20 std 27,408(1) li 27,0x30 std 28,416(1) li 28,0x40 std 29,424(1) li 29,0x50 std 30,432(1) li 30,0x60 std 31,440(1) li 31,0x70 or 0,0,0 + + xxlor 2, 32+10, 32+10 + vsldoi 10,11,10,1 + xxlor 1, 32+10, 32+10 + + + mr 31, 6 + bl .Lconsts + lxvw4x 0, 28, 6 + mr 6, 31 + li 31,0x70 + subi 9,9,3 lvx 23,0,6 lvx 30,3,6 addi 6,6,0x20 lvx 31,0,6 vperm 23,30,23,7 addi 7,1,64+15 mtctr 9 .Load_xts_dec_key: vperm 24,31,30,7 lvx 30,3,6 addi 6,6,0x20 stvx 24,0,7 vperm 25,30,31,7 lvx 31,0,6 stvx 25,3,7 addi 7,7,0x20 bdnz .Load_xts_dec_key lvx 26,3,6 vperm 24,31,30,7 lvx 27,26,6 stvx 24,0,7 vperm 25,26,31,7 lvx 28,27,6 stvx 25,3,7 addi 7,1,64+15 vperm 26,27,26,7 lvx 29,28,6 vperm 27,28,27,7 lvx 30,29,6 vperm 28,29,28,7 lvx 31,30,6 vperm 29,30,29,7 lvx 22,31,6 vperm 30,31,30,7 lvx 24,0,7 vperm 31,22,31,7 lvx 25,3,7 vperm 0,2,4,5 subi 10,10,31 vxor 17,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vand 11,11,10 vxor 7,0,17 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x7C235699 vxor 18,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 1,1,1,6 vand 11,11,10 vxor 12,1,18 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x7C5A5699 andi. 31,5,15 vxor 19,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 2,2,2,6 vand 11,11,10 vxor 13,2,19 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x7C7B5699 sub 5,5,31 vxor 20,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 3,3,3,6 vand 11,11,10 vxor 14,3,20 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x7C9C5699 subi 5,5,0x60 vxor 21,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 4,4,4,6 vand 11,11,10 vxor 15,4,21 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x7CBD5699 addi 10,10,0x60 vxor 22,8,23 vsrab 11,8,9 vaddubm 8,8,8 - vsldoi 11,11,11,15 vperm 5,5,5,6 vand 11,11,10 vxor 16,5,22 - vxor 8,8,11 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 vxor 31,31,23 mtctr 9 b .Loop_xts_dec6x .align 5 .Loop_xts_dec6x: .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 .long 0x1210C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 .long 0x1210CD48 lvx 25,3,7 bdnz .Loop_xts_dec6x + xxlor 32+10, 1, 1 + subic 5,5,96 vxor 0,17,31 .long 0x10E7C548 .long 0x118CC548 vsrab 11,8,9 vxor 17,8,23 vaddubm 8,8,8 .long 0x11ADC548 .long 0x11CEC548 - vsldoi 11,11,11,15 .long 0x11EFC548 .long 0x1210C548 subfe. 0,0,0 vand 11,11,10 .long 0x10E7CD48 .long 0x118CCD48 - vxor 8,8,11 + xxlor 32+1, 0, 0 + vpermxor 8, 8, 11, 1 .long 0x11ADCD48 .long 0x11CECD48 vxor 1,18,31 vsrab 11,8,9 vxor 18,8,23 .long 0x11EFCD48 .long 0x1210CD48 and 0,0,5 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7D548 .long 0x118CD548 vand 11,11,10 .long 0x11ADD548 .long 0x11CED548 - vxor 8,8,11 + xxlor 32+2, 0, 0 + vpermxor 8, 8, 11, 2 .long 0x11EFD548 .long 0x1210D548 add 10,10,0 vxor 2,19,31 vsrab 11,8,9 vxor 19,8,23 vaddubm 8,8,8 .long 0x10E7DD48 .long 0x118CDD48 - vsldoi 11,11,11,15 .long 0x11ADDD48 .long 0x11CEDD48 vand 11,11,10 .long 0x11EFDD48 .long 0x1210DD48 addi 7,1,64+15 - vxor 8,8,11 + xxlor 32+3, 0, 0 + vpermxor 8, 8, 11, 3 .long 0x10E7E548 .long 0x118CE548 vxor 3,20,31 vsrab 11,8,9 vxor 20,8,23 .long 0x11ADE548 .long 0x11CEE548 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x11EFE548 .long 0x1210E548 lvx 24,0,7 vand 11,11,10 .long 0x10E7ED48 .long 0x118CED48 - vxor 8,8,11 + xxlor 32+4, 0, 0 + vpermxor 8, 8, 11, 4 .long 0x11ADED48 .long 0x11CEED48 vxor 4,21,31 vsrab 11,8,9 vxor 21,8,23 .long 0x11EFED48 .long 0x1210ED48 lvx 25,3,7 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x10E7F548 .long 0x118CF548 vand 11,11,10 .long 0x11ADF548 .long 0x11CEF548 - vxor 8,8,11 + xxlor 32+5, 0, 0 + vpermxor 8, 8, 11, 5 .long 0x11EFF548 .long 0x1210F548 vxor 5,22,31 vsrab 11,8,9 vxor 22,8,23 .long 0x10E70549 .long 0x7C005699 vaddubm 8,8,8 - vsldoi 11,11,11,15 .long 0x118C0D49 .long 0x7C235699 .long 0x11AD1549 vperm 0,0,0,6 .long 0x7C5A5699 vand 11,11,10 .long 0x11CE1D49 vperm 1,1,1,6 .long 0x7C7B5699 .long 0x11EF2549 vperm 2,2,2,6 .long 0x7C9C5699 - vxor 8,8,11 + xxlor 10, 32+0, 32+0 + xxlor 32+0, 0, 0 + vpermxor 8, 8, 11, 0 + xxlor 32+0, 10, 10 .long 0x12102D49 vperm 3,3,3,6 .long 0x7CBD5699 addi 10,10,0x60 vperm 4,4,4,6 vperm 5,5,5,6 vperm 7,7,7,6 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,17 vperm 13,13,13,6 .long 0x7D832799 vxor 12,1,18 vperm 14,14,14,6 .long 0x7DBA2799 vxor 13,2,19 vperm 15,15,15,6 .long 0x7DDB2799 vxor 14,3,20 vperm 16,16,16,6 .long 0x7DFC2799 vxor 15,4,21 .long 0x7E1D2799 vxor 16,5,22 addi 4,4,0x60 mtctr 9 beq .Loop_xts_dec6x + xxlor 32+10, 2, 2 + addic. 5,5,0x60 beq .Lxts_dec6x_zero cmpwi 5,0x20 blt .Lxts_dec6x_one nop beq .Lxts_dec6x_two cmpwi 5,0x40 blt .Lxts_dec6x_three nop beq .Lxts_dec6x_four .Lxts_dec6x_five: vxor 7,1,17 vxor 12,2,18 vxor 13,3,19 vxor 14,4,20 vxor 15,5,21 bl _aesp8_xts_dec5x vperm 7,7,7,6 vor 17,22,22 vxor 18,8,23 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,18 vperm 13,13,13,6 .long 0x7D832799 vperm 14,14,14,6 .long 0x7DBA2799 vperm 15,15,15,6 .long 0x7DDB2799 .long 0x7DFC2799 addi 4,4,0x50 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_four: vxor 7,2,17 vxor 12,3,18 vxor 13,4,19 vxor 14,5,20 vxor 15,15,15 bl _aesp8_xts_dec5x vperm 7,7,7,6 vor 17,21,21 vor 18,22,22 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,22 vperm 13,13,13,6 .long 0x7D832799 vperm 14,14,14,6 .long 0x7DBA2799 .long 0x7DDB2799 addi 4,4,0x40 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_three: vxor 7,3,17 vxor 12,4,18 vxor 13,5,19 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_dec5x vperm 7,7,7,6 vor 17,20,20 vor 18,21,21 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,21 vperm 13,13,13,6 .long 0x7D832799 .long 0x7DBA2799 addi 4,4,0x30 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_two: vxor 7,4,17 vxor 12,5,18 vxor 13,13,13 vxor 14,14,14 vxor 15,15,15 bl _aesp8_xts_dec5x vperm 7,7,7,6 vor 17,19,19 vor 18,20,20 vperm 12,12,12,6 .long 0x7CE02799 vxor 7,0,20 .long 0x7D832799 addi 4,4,0x20 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_one: vxor 7,5,17 nop .Loop_xts_dec1x: .long 0x10E7C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 lvx 25,3,7 bdnz .Loop_xts_dec1x subi 0,31,1 .long 0x10E7C548 andi. 0,0,16 cmpwi 31,0 .long 0x10E7CD48 sub 10,10,0 .long 0x10E7D548 .long 0x7C005699 .long 0x10E7DD48 addi 7,1,64+15 .long 0x10E7E548 lvx 24,0,7 .long 0x10E7ED48 lvx 25,3,7 vxor 17,17,31 vperm 0,0,0,6 .long 0x10E7F548 mtctr 9 .long 0x10E78D49 vor 17,18,18 vor 18,19,19 vperm 7,7,7,6 .long 0x7CE02799 addi 4,4,0x10 vxor 7,0,19 bne .Lxts_dec6x_steal b .Lxts_dec6x_done .align 4 .Lxts_dec6x_zero: cmpwi 31,0 beq .Lxts_dec6x_done .long 0x7C005699 vperm 0,0,0,6 vxor 7,0,18 .Lxts_dec6x_steal: .long 0x10E7C548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 lvx 25,3,7 bdnz .Lxts_dec6x_steal add 10,10,31 .long 0x10E7C548 cmpwi 31,0 .long 0x10E7CD48 .long 0x7C005699 .long 0x10E7D548 lvsr 5,0,31 .long 0x10E7DD48 addi 7,1,64+15 .long 0x10E7E548 lvx 24,0,7 .long 0x10E7ED48 lvx 25,3,7 vxor 18,18,31 vperm 0,0,0,6 .long 0x10E7F548 vperm 0,0,0,5 .long 0x11679549 vperm 7,11,11,6 .long 0x7CE02799 vxor 7,7,7 vspltisb 12,-1 vperm 7,7,12,5 vsel 7,0,11,7 vxor 7,7,17 subi 30,4,1 mtctr 31 .Loop_xts_dec6x_steal: lbzu 0,1(30) stb 0,16(30) bdnz .Loop_xts_dec6x_steal li 31,0 mtctr 9 b .Loop_xts_dec1x .align 4 .Lxts_dec6x_done: cmpldi 8,0 beq .Lxts_dec6x_ret vxor 8,17,23 vperm 8,8,8,6 .long 0x7D004799 .Lxts_dec6x_ret: mtlr 11 li 10,79 li 11,95 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 stvx 9,10,1 addi 10,10,32 stvx 9,11,1 addi 11,11,32 or 12,12,12 lvx 20,10,1 addi 10,10,32 lvx 21,11,1 addi 11,11,32 lvx 22,10,1 addi 10,10,32 lvx 23,11,1 addi 11,11,32 lvx 24,10,1 addi 10,10,32 lvx 25,11,1 addi 11,11,32 lvx 26,10,1 addi 10,10,32 lvx 27,11,1 addi 11,11,32 lvx 28,10,1 addi 10,10,32 lvx 29,11,1 addi 11,11,32 lvx 30,10,1 lvx 31,11,1 ld 26,400(1) ld 27,408(1) ld 28,416(1) ld 29,424(1) ld 30,432(1) ld 31,440(1) addi 1,1,448 blr .long 0 .byte 0,12,0x04,1,0x80,6,6,0 .long 0 .align 5 _aesp8_xts_dec5x: .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 lvx 24,26,7 addi 7,7,0x20 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 lvx 25,3,7 bdnz _aesp8_xts_dec5x subi 0,31,1 .long 0x10E7C548 .long 0x118CC548 .long 0x11ADC548 .long 0x11CEC548 .long 0x11EFC548 andi. 0,0,16 cmpwi 31,0 .long 0x10E7CD48 .long 0x118CCD48 .long 0x11ADCD48 .long 0x11CECD48 .long 0x11EFCD48 vxor 17,17,31 sub 10,10,0 .long 0x10E7D548 .long 0x118CD548 .long 0x11ADD548 .long 0x11CED548 .long 0x11EFD548 vxor 1,18,31 .long 0x10E7DD48 .long 0x7C005699 .long 0x118CDD48 .long 0x11ADDD48 .long 0x11CEDD48 .long 0x11EFDD48 vxor 2,19,31 addi 7,1,64+15 .long 0x10E7E548 .long 0x118CE548 .long 0x11ADE548 .long 0x11CEE548 .long 0x11EFE548 lvx 24,0,7 vxor 3,20,31 .long 0x10E7ED48 vperm 0,0,0,6 .long 0x118CED48 .long 0x11ADED48 .long 0x11CEED48 .long 0x11EFED48 lvx 25,3,7 vxor 4,21,31 .long 0x10E7F548 .long 0x118CF548 .long 0x11ADF548 .long 0x11CEF548 .long 0x11EFF548 .long 0x10E78D49 .long 0x118C0D49 .long 0x11AD1549 .long 0x11CE1D49 .long 0x11EF2549 mtctr 9 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 diff --git a/sys/crypto/openssl/powerpc64le/poly1305-ppc.S b/sys/crypto/openssl/powerpc64le/poly1305-ppc.S index 69862b94b2c3..e70d34053cc8 100644 --- a/sys/crypto/openssl/powerpc64le/poly1305-ppc.S +++ b/sys/crypto/openssl/powerpc64le/poly1305-ppc.S @@ -1,1128 +1,1128 @@ /* Do not modify. This file is auto-generated from poly1305-ppc.pl. */ .machine "any" .abiversion 2 .text .globl poly1305_init_int .type poly1305_init_int,@function .align 4 poly1305_init_int: .localentry poly1305_init_int,0 xor 0,0,0 std 0,0(3) std 0,8(3) std 0,16(3) stw 0,24(3) cmpld 4,0 beq- .Lno_key ld 10,0(4) ld 11,8(4) lis 8,0xfff ori 8,8,0xfffc insrdi 8,8,32,0 ori 7,8,3 and 10,10,7 and 11,11,8 std 10,32(3) std 11,40(3) .Lno_key: xor 3,3,3 blr .long 0 .byte 0,12,0x14,0,0,0,2,0 .size poly1305_init_int,.-poly1305_init_int .globl poly1305_blocks .type poly1305_blocks,@function .align 4 poly1305_blocks: .localentry poly1305_blocks,0 .Lpoly1305_blocks: srdi. 5,5,4 beq- .Labort stdu 1,-192(1) mflr 0 std 27,152(1) std 28,160(1) std 29,168(1) std 30,176(1) std 31,184(1) std 0,208(1) ld 27,32(3) ld 28,40(3) ld 7,0(3) ld 8,8(3) ld 9,16(3) srdi 29,28,2 mtctr 5 add 29,29,28 li 0,3 b .Loop .align 4 .Loop: ld 30,0(4) ld 31,8(4) addi 4,4,16 addc 7,7,30 adde 8,8,31 mulld 10,7,27 mulhdu 11,7,27 adde 9,9,6 mulld 30,8,29 mulhdu 31,8,29 addc 10,10,30 adde 11,11,31 mulld 30,7,28 mulhdu 12,7,28 addc 11,11,30 addze 12,12 mulld 30,8,27 mulhdu 31,8,27 addc 11,11,30 adde 12,12,31 mulld 30,9,29 mulld 31,9,27 addc 11,11,30 adde 12,12,31 andc 30,12,0 and 9,12,0 srdi 31,30,2 add 30,30,31 addc 7,10,30 addze 8,11 addze 9,9 bdnz .Loop std 7,0(3) std 8,8(3) std 9,16(3) ld 27,152(1) ld 28,160(1) ld 29,168(1) ld 30,176(1) ld 31,184(1) addi 1,1,192 .Labort: blr .long 0 .byte 0,12,4,1,0x80,5,4,0 .size poly1305_blocks,.-poly1305_blocks .globl poly1305_emit .type poly1305_emit,@function .align 5 poly1305_emit: .localentry poly1305_emit,0 lwz 7,0(3) lwz 8,4(3) lwz 9,8(3) lwz 10,12(3) lwz 11,16(3) lwz 0,24(3) sldi 8,8,26 sldi 12,9,52 srdi 9,9,12 sldi 10,10,14 add 7,7,8 addc 7,7,12 sldi 12,11,40 srdi 11,11,24 adde 8,9,10 addc 8,8,12 addze 9,11 ld 10,0(3) ld 11,8(3) ld 12,16(3) neg 0,0 xor 7,7,10 xor 8,8,11 xor 9,9,12 and 7,7,0 and 8,8,0 and 9,9,0 xor 7,7,10 xor 8,8,11 xor 9,9,12 addic 10,7,5 addze 11,8 addze 12,9 srdi 12,12,2 neg 12,12 andc 7,7,12 and 10,10,12 andc 8,8,12 and 11,11,12 or 7,7,10 or 8,8,11 lwz 12,4(5) lwz 9,12(5) lwz 10,0(5) lwz 11,8(5) insrdi 10,12,32,0 insrdi 11,9,32,0 addc 7,7,10 adde 8,8,11 addi 3,4,-1 addi 4,4,7 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) srdi 7,7,8 stbu 8,1(4) srdi 8,8,8 stbu 7,1(3) stbu 8,1(4) blr .long 0 .byte 0,12,0x14,0,0,0,3,0 .size poly1305_emit,.-poly1305_emit .globl poly1305_blocks_vsx .type poly1305_blocks_vsx,@function .align 5 poly1305_blocks_vsx: .localentry poly1305_blocks_vsx,0 lwz 7,24(3) cmpldi 5,128 bge __poly1305_blocks_vsx neg 0,7 lwz 7,0(3) lwz 8,4(3) lwz 9,8(3) lwz 10,12(3) lwz 11,16(3) sldi 8,8,26 sldi 12,9,52 add 7,7,8 srdi 9,9,12 sldi 10,10,14 addc 7,7,12 sldi 8,11,40 adde 9,9,10 srdi 11,11,24 addc 9,9,8 addze 11,11 ld 8,0(3) ld 10,8(3) ld 12,16(3) xor 7,7,8 xor 9,9,10 xor 11,11,12 and 7,7,0 and 9,9,0 and 11,11,0 xor 7,7,8 xor 9,9,10 xor 11,11,12 li 0,0 std 7,0(3) std 9,8(3) std 11,16(3) stw 0,24(3) b .Lpoly1305_blocks .long 0 .byte 0,12,0x14,0,0,0,4,0 .size poly1305_blocks_vsx,.-poly1305_blocks_vsx .align 5 __poly1305_mul: mulld 9,6,27 mulhdu 10,6,27 mulld 30,7,29 mulhdu 31,7,29 addc 9,9,30 adde 10,10,31 mulld 30,6,28 mulhdu 11,6,28 addc 10,10,30 addze 11,11 mulld 30,7,27 mulhdu 31,7,27 addc 10,10,30 adde 11,11,31 mulld 30,8,29 mulld 31,8,27 addc 10,10,30 adde 11,11,31 andc 30,11,0 and 8,11,0 srdi 31,30,2 add 30,30,31 addc 6,9,30 addze 7,10 addze 8,8 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .size __poly1305_mul,.-__poly1305_mul .align 5 __poly1305_splat: rldicl 9,6,0,38 rldicl 10,6,38,38 stw 9,0x00(31) rldicl 11,6,12,52 slwi 9,10,2 stw 10,0x10(31) add 9,9,10 stw 9,0x20(31) insrdi 11,7,14,38 slwi 9,11,2 stw 11,0x30(31) add 9,9,11 stw 9,0x40(31) rldicl 10,7,50,38 rldicl 11,7,24,40 slwi 9,10,2 stw 10,0x50(31) add 9,9,10 stw 9,0x60(31) insrdi 11,8,3,37 slwi 9,11,2 stw 11,0x70(31) add 9,9,11 stw 9,0x80(31) blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .size __poly1305_splat,.-__poly1305_splat .align 5 __poly1305_blocks_vsx: - stdu 1,-432(1) + stdu 1,-416(1) mflr 0 li 10,191 li 11,207 li 12,-1 stvx 20,10,1 addi 10,10,32 stvx 21,11,1 addi 11,11,32 stvx 22,10,1 addi 10,10,32 - stvx 23,10,1 - addi 10,10,32 - stvx 24,11,1 + stvx 23,11,1 addi 11,11,32 - stvx 25,10,1 + stvx 24,10,1 addi 10,10,32 + stvx 25,11,1 + addi 11,11,32 stvx 26,10,1 addi 10,10,32 stvx 27,11,1 addi 11,11,32 stvx 28,10,1 addi 10,10,32 stvx 29,11,1 addi 11,11,32 stvx 30,10,1 stvx 31,11,1 - stw 12,388(1) + stw 12,372(1) li 12,-1 or 12,12,12 - std 27,392(1) - std 28,400(1) - std 29,408(1) - std 30,416(1) - std 31,424(1) - std 0,448(1) + std 27,376(1) + std 28,384(1) + std 29,392(1) + std 30,400(1) + std 31,408(1) + std 0,432(1) bl .LPICmeup li 27,0x10 li 28,0x20 li 29,0x30 li 30,0x40 li 31,0x50 .long 0x7FA06699 .long 0x7F3B6699 .long 0x7F7C6699 .long 0x7FFD6699 .long 0x7FDE6699 cmplwi 7,0 bne .Lskip_init_vsx ld 27,32(3) ld 28,40(3) srdi 29,28,2 li 0,3 add 29,29,28 mr 6,27 mr 7,28 li 8,0 addi 31,3,60 bl __poly1305_splat bl __poly1305_mul addi 31,3,52 bl __poly1305_splat bl __poly1305_mul addi 31,3,56 bl __poly1305_splat bl __poly1305_mul addi 31,3,48 bl __poly1305_splat ld 6,0(3) ld 7,8(3) ld 8,16(3) rldicl 9,6,0,38 rldicl 10,6,38,38 rldicl 11,6,12,52 .long 0x7C0901E7 insrdi 11,7,14,38 .long 0x7C2A01E7 rldicl 10,7,50,38 .long 0x7C4B01E7 rldicl 11,7,24,40 .long 0x7C6A01E7 insrdi 11,8,3,37 .long 0x7C8B01E7 li 0,1 stw 0,24(3) b .Loaded_vsx .align 4 .Lskip_init_vsx: li 27,4 li 28,8 li 29,12 li 30,16 .long 0x7C001819 .long 0x7C3B1819 .long 0x7C5C1819 .long 0x7C7D1819 .long 0x7C9E1819 .Loaded_vsx: li 27,0x10 li 28,0x20 li 29,0x30 li 30,0x40 li 31,0x50 li 7,0x60 li 8,0x70 addi 10,3,64 addi 11,1,63 vxor 20,20,20 .long 0xF000A057 .long 0xF021A057 .long 0xF042A057 .long 0xF063A057 .long 0xF084A057 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 vspltisb 28,14 .long 0xF115B357 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0xF2B7C057 vperm 22,23,24,31 .long 0xF2F7C357 .long 0x1295CEC4 .long 0x12D6D6C4 .long 0x1317DEC4 .long 0x12F7E6C4 vand 21,21,29 vand 20,20,29 vand 22,22,29 vand 23,23,29 .long 0x11384E8C .long 0x10B52E8C .long 0x10D4368C .long 0x10F63E8C .long 0x1117468C vor 9,9,30 .long 0x7D5D1A99 .long 0x7D605299 .long 0x7D9B5299 .long 0x7DBC5299 .long 0x7DDD5299 .long 0x7EBE5299 .long 0x7EDF5299 .long 0x7EE75299 .long 0x7F085299 stvx 11,0,11 stvx 12,27,11 stvx 13,28,11 stvx 14,29,11 stvx 21,30,11 stvx 22,31,11 stvx 23,7,11 stvx 24,8,11 addi 4,4,0x40 addi 12,12,0x50 addi 0,5,-64 srdi 0,0,6 mtctr 0 b .Loop_vsx .align 4 .Loop_vsx: .long 0x11E55288 .long 0x12055A88 .long 0x12256A88 .long 0x12466A88 .long 0x12865288 .long 0x1210A0C0 .long 0x12865A88 .long 0x1231A0C0 .long 0x12676A88 .long 0x12896288 .long 0x11EFA0C0 .long 0x12875A88 .long 0x1252A0C0 lvx 12,31,11 .long 0x12885A88 .long 0x1273A0C0 lvx 11,30,11 .long 0x104238C0 .long 0x100028C0 .long 0x106340C0 .long 0x102130C0 .long 0x108448C0 .long 0x12887288 .long 0x11EFA0C0 .long 0x12897288 .long 0x1210A0C0 .long 0x12875288 .long 0x1231A0C0 .long 0x12885288 .long 0x1252A0C0 lvx 14,8,11 .long 0x12895288 .long 0x1273A0C0 lvx 13,7,11 .long 0x12876288 .long 0x11EFA0C0 .long 0x12886288 .long 0x1210A0C0 .long 0x12896288 .long 0x1231A0C0 .long 0x12855A88 .long 0x1252A0C0 .long 0x12865A88 .long 0x1273A0C0 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 .long 0x12867288 .long 0x11EFA0C0 .long 0x12877288 .long 0x1210A0C0 .long 0x12887288 .long 0x1231A0C0 .long 0x12897288 .long 0x1252A0C0 .long 0x12856A88 .long 0x1273A0C0 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 .long 0xF115B357 .long 0x12805088 .long 0x11EFA0C0 .long 0x12815088 .long 0x1210A0C0 .long 0x12825088 .long 0x1231A0C0 .long 0x12835088 .long 0x1252A0C0 .long 0x12845088 .long 0x1273A0C0 .long 0xF2B7C057 vperm 22,23,24,31 .long 0xF2F7C357 .long 0x12826088 .long 0x11EFA0C0 .long 0x12836088 .long 0x1210A0C0 .long 0x12846088 .long 0x1231A0C0 .long 0x12805888 .long 0x1252A0C0 lvx 12,27,11 .long 0x12815888 .long 0x1273A0C0 lvx 11,0,11 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 .long 0x12817088 .long 0x11EFA0C0 .long 0x12827088 .long 0x1210A0C0 .long 0x12837088 .long 0x1231A0C0 .long 0x12847088 .long 0x1252A0C0 lvx 14,29,11 .long 0x12806888 .long 0x1273A0C0 lvx 13,28,11 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0x12846088 .long 0x11EFA0C0 .long 0x12805888 .long 0x1210A0C0 .long 0x12815888 .long 0x1231A0C0 .long 0x12825888 .long 0x1252A0C0 .long 0x12835888 .long 0x1273A0C0 .long 0x12D6D6C4 .long 0x1355CEC4 .long 0x1317DEC4 .long 0x12F7E6C4 .long 0x12837088 .long 0x11EFA0C0 .long 0x12847088 .long 0x1210A0C0 .long 0x12806888 .long 0x1231A0C0 .long 0x12816888 .long 0x1252A0C0 .long 0x12826888 .long 0x1273A0C0 vand 21,21,29 vand 26,26,29 vand 22,22,29 vand 23,23,29 vspltisb 20,2 .long 0x1092CEC4 .long 0x102FCEC4 vand 3,18,29 vand 0,15,29 .long 0x108498C0 .long 0x102180C0 .long 0x11384E8C .long 0x10B52E8C .long 0x10DA368C .long 0x10F63E8C .long 0x1117468C vor 9,9,30 .long 0x1264CEC4 .long 0x1201CEC4 vand 4,4,29 vand 1,1,29 .long 0x100098C0 .long 0x105180C0 .long 0x1273A5C4 .long 0x1222CEC4 vand 2,2,29 .long 0x100098C0 .long 0x106388C0 .long 0x11E0CEC4 .long 0x1243CEC4 vand 0,0,29 vand 3,3,29 .long 0x102178C0 .long 0x108490C0 addi 4,4,0x40 bdnz .Loop_vsx neg 5,5 andi. 5,5,0x30 sub 4,4,5 .long 0x7D5D1E99 .long 0x7D605699 .long 0x7D9B5699 .long 0x7DBC5699 .long 0x7DDD5699 .Last_vsx: .long 0x11E55288 .long 0x12065288 .long 0x12275288 .long 0x12485288 .long 0x12695288 .long 0x12896288 .long 0x11EFA0C0 .long 0x12855A88 .long 0x1210A0C0 .long 0x12865A88 .long 0x1231A0C0 .long 0x12875A88 .long 0x1252A0C0 .long 0x7D9F5699 .long 0x12885A88 .long 0x1273A0C0 .long 0x7D7E5699 .long 0x104238C0 .long 0x100028C0 .long 0x106340C0 .long 0x102130C0 .long 0x108448C0 .long 0x12887288 .long 0x11EFA0C0 .long 0x12897288 .long 0x1210A0C0 .long 0x12856A88 .long 0x1231A0C0 .long 0x12866A88 .long 0x1252A0C0 .long 0x7DC85699 .long 0x12876A88 .long 0x1273A0C0 .long 0x7DA75699 .long 0x12876288 .long 0x11EFA0C0 .long 0x12886288 .long 0x1210A0C0 .long 0x12896288 .long 0x1231A0C0 .long 0x12855A88 .long 0x1252A0C0 .long 0x12865A88 .long 0x1273A0C0 .long 0x12867288 .long 0x11EFA0C0 .long 0x12877288 .long 0x1210A0C0 .long 0x12887288 .long 0x1231A0C0 .long 0x12897288 .long 0x1252A0C0 .long 0x12856A88 .long 0x1273A0C0 .long 0x12805088 .long 0x11EFA0C0 .long 0x12815088 .long 0x1210A0C0 .long 0x12825088 .long 0x1231A0C0 .long 0x12835088 .long 0x1252A0C0 .long 0x12845088 .long 0x1273A0C0 .long 0x12826088 .long 0x11EFA0C0 .long 0x12836088 .long 0x1210A0C0 .long 0x12846088 .long 0x1231A0C0 .long 0x12805888 .long 0x1252A0C0 .long 0x7D9B5699 .long 0x12815888 .long 0x1273A0C0 .long 0x7D605699 .long 0x12817088 .long 0x11EFA0C0 .long 0x12827088 .long 0x1210A0C0 .long 0x12837088 .long 0x1231A0C0 .long 0x12847088 .long 0x1252A0C0 .long 0x7DDD5699 .long 0x12806888 .long 0x1273A0C0 .long 0x7DBC5699 .long 0x12846088 .long 0x11EFA0C0 .long 0x12805888 .long 0x1210A0C0 .long 0x12815888 .long 0x1231A0C0 .long 0x12825888 .long 0x1252A0C0 .long 0x12835888 .long 0x1273A0C0 .long 0x12837088 .long 0x11EFA0C0 .long 0x12847088 .long 0x1210A0C0 .long 0x12806888 .long 0x1231A0C0 .long 0x12816888 .long 0x1252A0C0 .long 0x12826888 .long 0x1273A0C0 .long 0xF00F7A57 .long 0xF0308257 .long 0xF0518A57 .long 0xF0729257 .long 0xF0939A57 .long 0x11EF00C0 .long 0x121008C0 .long 0x123110C0 .long 0x125218C0 .long 0x127320C0 vspltisb 20,2 .long 0x1092CEC4 .long 0x102FCEC4 vand 3,18,29 vand 0,15,29 .long 0x108498C0 .long 0x102180C0 .long 0x1264CEC4 .long 0x1201CEC4 vand 4,4,29 vand 1,1,29 .long 0x100098C0 .long 0x105180C0 .long 0x1273A5C4 .long 0x1222CEC4 vand 2,2,29 .long 0x100098C0 .long 0x106388C0 .long 0x11E0CEC4 .long 0x1243CEC4 vand 0,0,29 vand 3,3,29 .long 0x102178C0 .long 0x108490C0 beq .Ldone_vsx add 6,12,5 .long 0x7EA02699 .long 0x7EDB2699 .long 0x7EFC2699 .long 0x7F1D2699 .long 0xF0B5B057 vspltisb 26,4 vperm 7,21,22,31 .long 0xF115B357 .long 0x10C5CEC4 .long 0x10E7D6C4 .long 0x1128DEC4 .long 0x1108E6C4 vand 5,5,29 vand 6,6,29 vand 7,7,29 vand 8,8,29 .long 0xF297C057 vperm 21,23,24,31 .long 0xF2D7C357 .long 0x7DE03699 .long 0x7E1D3699 .long 0x12F4CEC4 .long 0x12B5D6C4 .long 0x1316DEC4 .long 0x12D6E6C4 vand 20,20,29 vand 23,23,29 vand 21,21,29 vand 22,22,29 .long 0x11384E8C .long 0x10B42E8C .long 0x10D7368C .long 0x10F53E8C .long 0x1116468C vor 9,9,30 vperm 0,0,0,15 vand 5,5, 16 vperm 1,1,1,15 vand 6,6, 16 vperm 2,2,2,15 vand 7,7, 16 vperm 3,3,3,15 vand 8,8, 16 vperm 4,4,4,15 vand 9,9, 16 .long 0x10A500C0 vxor 0,0,0 .long 0x10C608C0 vxor 1,1,1 .long 0x10E710C0 vxor 2,2,2 .long 0x110818C0 vxor 3,3,3 .long 0x112920C0 vxor 4,4,4 xor. 5,5,5 b .Last_vsx .align 4 .Ldone_vsx: - ld 0,448(1) + ld 0,432(1) li 27,4 li 28,8 li 29,12 li 30,16 .long 0x7C001919 .long 0x7C3B1919 .long 0x7C5C1919 .long 0x7C7D1919 .long 0x7C9E1919 - lwz 12,388(1) + lwz 12,372(1) mtlr 0 li 10,191 li 11,207 or 12,12,12 lvx 20,10,1 addi 10,10,32 - lvx 21,10,1 - addi 10,10,32 - lvx 22,11,1 + lvx 21,11,1 addi 11,11,32 - lvx 23,10,1 + lvx 22,10,1 addi 10,10,32 - lvx 24,11,1 + lvx 23,11,1 addi 11,11,32 - lvx 25,10,1 + lvx 24,10,1 addi 10,10,32 - lvx 26,11,1 + lvx 25,11,1 addi 11,11,32 - lvx 27,10,1 + lvx 26,10,1 addi 10,10,32 - lvx 28,11,1 + lvx 27,11,1 addi 11,11,32 - lvx 29,10,1 + lvx 28,10,1 addi 10,10,32 - lvx 30,11,1 - lvx 31,10,1 - ld 27,392(1) - ld 28,400(1) - ld 29,408(1) - ld 30,416(1) - ld 31,424(1) - addi 1,1,432 + lvx 29,11,1 + addi 11,11,32 + lvx 30,10,1 + lvx 31,11,1 + ld 27,376(1) + ld 28,384(1) + ld 29,392(1) + ld 30,400(1) + ld 31,408(1) + addi 1,1,416 blr .long 0 .byte 0,12,0x04,1,0x80,5,4,0 .long 0 .size __poly1305_blocks_vsx,.-__poly1305_blocks_vsx .align 6 .LPICmeup: mflr 0 bcl 20,31,$+4 mflr 12 addi 12,12,56 mtlr 0 blr .long 0 .byte 0,12,0x14,0,0,0,0,0 .space 28 .long 0x03ffffff,0x00000000 .long 0x03ffffff,0x00000000 .long 0x0000001a,0x00000000 .long 0x0000001a,0x00000000 .long 0x00000028,0x00000000 .long 0x00000028,0x00000000 .long 0x0e0f0001,0x00000000 .long 0x1e1f1011,0x00000000 .long 0x01000000,0x01000000 .long 0x01000000,0x01000000 .long 0x03020100,0x07060504 .long 0x0b0a0908,0x0f0e0d0c .long 0x00000000,0x00000000 .long 0x04050607,0x00000000 .long 0x00000000,0x04050607 .long 0x00000000,0x00000000 .long 0x00000000,0x00000000 .long 0x00000000,0x04050607 .long 0x00000000,0xffffffff .long 0xffffffff,0xffffffff .long 0x00000000,0xffffffff .long 0x00000000,0xffffffff .long 0x00000000,0x00000000 .long 0x00000000,0xffffffff .byte 80,111,108,121,49,51,48,53,32,102,111,114,32,80,80,67,44,67,82,89,80,84,79,71,65,77,83,32,98,121,32,64,100,111,116,45,97,115,109,0 .align 2