4 Star 5 Fork 4

Gitee 极速下载/cryptopp

加入 Gitee
与超过 1200万 开发者一起发现、参与优秀开源项目,私有仓库也完全免费 :)
免费加入
此仓库是为了提升国内下载速度的镜像仓库,每日同步一次。 原始仓库: https://github.com/weidai11/cryptopp
克隆/下载
x64dll.asm 46.71 KB
一键复制 编辑 原始数据 按行查看 历史
12345678910111213141516171819202122232425262728293031323334353637383940414243444546474849505152535455565758596061626364656667686970717273747576777879808182838485868788899091929394959697989910010110210310410510610710810911011111211311411511611711811912012112212312412512612712812913013113213313413513613713813914014114214314414514614714814915015115215315415515615715815916016116216316416516616716816917017117217317417517617717817918018118218318418518618718818919019119219319419519619719819920020120220320420520620720820921021121221321421521621721821922022122222322422522622722822923023123223323423523623723823924024124224324424524624724824925025125225325425525625725825926026126226326426526626726826927027127227327427527627727827928028128228328428528628728828929029129229329429529629729829930030130230330430530630730830931031131231331431531631731831932032132232332432532632732832933033133233333433533633733833934034134234334434534634734834935035135235335435535635735835936036136236336436536636736836937037137237337437537637737837938038138238338438538638738838939039139239339439539639739839940040140240340440540640740840941041141241341441541641741841942042142242342442542642742842943043143243343443543643743843944044144244344444544644744844945045145245345445545645745845946046146246346446546646746846947047147247347447547647747847948048148248348448548648748848949049149249349449549649749849950050150250350450550650750850951051151251351451551651751851952052152252352452552652752852953053153253353453553653753853954054154254354454554654754854955055155255355455555655755855956056156256356456556656756856957057157257357457557657757857958058158258358458558658758858959059159259359459559659759859960060160260360460560660760860961061161261361461561661761861962062162262362462562662762862963063163263363463563663763863964064164264364464564664764864965065165265365465565665765865966066166266366466566666766866967067167267367467567667767867968068168268368468568668768868969069169269369469569669769869970070170270370470570670770870971071171271371471571671771871972072172272372472572672772872973073173273373473573673773873974074174274374474574674774874975075175275375475575675775875976076176276376476576676776876977077177277377477577677777877978078178278378478578678778878979079179279379479579679779879980080180280380480580680780880981081181281381481581681781881982082182282382482582682782882983083183283383483583683783883984084184284384484584684784884985085185285385485585685785885986086186286386486586686786886987087187287387487587687787887988088188288388488588688788888989089189289389489589689789889990090190290390490590690790890991091191291391491591691791891992092192292392492592692792892993093193293393493593693793893994094194294394494594694794894995095195295395495595695795895996096196296396496596696796896997097197297397497597697797897998098198298398498598698798898999099199299399499599699799899910001001100210031004100510061007100810091010101110121013101410151016101710181019102010211022102310241025102610271028102910301031103210331034103510361037103810391040104110421043104410451046104710481049105010511052105310541055105610571058105910601061106210631064106510661067106810691070107110721073107410751076107710781079108010811082108310841085108610871088108910901091109210931094109510961097109810991100110111021103110411051106110711081109111011111112111311141115111611171118111911201121112211231124112511261127112811291130113111321133113411351136113711381139114011411142114311441145114611471148114911501151115211531154115511561157115811591160116111621163116411651166116711681169117011711172117311741175117611771178117911801181118211831184118511861187118811891190119111921193119411951196119711981199120012011202120312041205120612071208120912101211121212131214121512161217121812191220122112221223122412251226122712281229123012311232123312341235123612371238123912401241124212431244124512461247124812491250125112521253125412551256125712581259126012611262126312641265126612671268126912701271127212731274127512761277127812791280128112821283128412851286128712881289129012911292129312941295129612971298129913001301130213031304130513061307130813091310131113121313131413151316131713181319132013211322132313241325132613271328132913301331133213331334133513361337133813391340134113421343134413451346134713481349135013511352135313541355135613571358135913601361136213631364136513661367136813691370137113721373137413751376137713781379138013811382138313841385138613871388138913901391139213931394139513961397139813991400140114021403140414051406140714081409141014111412141314141415141614171418141914201421142214231424142514261427142814291430143114321433143414351436143714381439144014411442144314441445144614471448144914501451145214531454145514561457145814591460146114621463146414651466146714681469147014711472147314741475147614771478147914801481148214831484148514861487148814891490149114921493149414951496149714981499150015011502150315041505150615071508150915101511151215131514151515161517151815191520152115221523152415251526152715281529153015311532153315341535153615371538153915401541154215431544154515461547154815491550155115521553155415551556155715581559156015611562156315641565156615671568156915701571157215731574157515761577157815791580158115821583158415851586158715881589159015911592159315941595159615971598159916001601160216031604160516061607160816091610161116121613161416151616161716181619162016211622162316241625162616271628162916301631163216331634163516361637163816391640164116421643164416451646164716481649165016511652165316541655165616571658165916601661166216631664166516661667166816691670167116721673167416751676167716781679168016811682168316841685168616871688168916901691169216931694169516961697169816991700170117021703170417051706170717081709171017111712171317141715171617171718171917201721172217231724172517261727172817291730173117321733173417351736173717381739174017411742174317441745174617471748174917501751175217531754175517561757175817591760176117621763176417651766176717681769177017711772177317741775177617771778177917801781178217831784178517861787178817891790179117921793179417951796179717981799180018011802180318041805180618071808180918101811181218131814181518161817181818191820182118221823182418251826182718281829183018311832183318341835183618371838183918401841184218431844184518461847184818491850185118521853185418551856185718581859186018611862186318641865186618671868186918701871187218731874187518761877187818791880188118821883188418851886188718881889189018911892189318941895189618971898189919001901190219031904190519061907190819091910191119121913191419151916191719181919192019211922192319241925192619271928192919301931193219331934193519361937193819391940194119421943194419451946194719481949195019511952195319541955195619571958195919601961196219631964196519661967196819691970197119721973197419751976197719781979
;; https://docs.microsoft.com/en-us/cpp/build/x64-calling-convention
;; The first four integer arguments are passed in registers.
;; Integer values are passed in left-to-right order in RCX,
;; RDX, R8, and R9, respectively. Arguments five and higher
;; are passed on the stack.
;; The registers RAX, RCX, RDX, R8, R9, R10, R11, XMM0-5,
;; and the upper portions of YMM0-15 and ZMM0-15 are
;; considered volatile and must be considered destroyed on
;; function calls.
include ksamd64.inc
EXTERNDEF ?Te@rdtable@CryptoPP@@3PA_KA:FAR
EXTERNDEF ?g_cacheLineSize@CryptoPP@@3IA:FAR
EXTERNDEF ?SHA256_K@CryptoPP@@3QBIB:FAR
.CODE
ALIGN 8
Baseline_Add PROC
lea rdx, [rdx+8*rcx]
lea r8, [r8+8*rcx]
lea r9, [r9+8*rcx]
neg rcx ; rcx is negative index
jz $1@Baseline_Add
mov rax,[r8+8*rcx]
add rax,[r9+8*rcx]
mov [rdx+8*rcx],rax
$0@Baseline_Add:
mov rax,[r8+8*rcx+8]
adc rax,[r9+8*rcx+8]
mov [rdx+8*rcx+8],rax
lea rcx,[rcx+2] ; advance index, avoid inc which causes slowdown on Intel Core 2
jrcxz $1@Baseline_Add ; loop until rcx overflows and becomes zero
mov rax,[r8+8*rcx]
adc rax,[r9+8*rcx]
mov [rdx+8*rcx],rax
jmp $0@Baseline_Add
$1@Baseline_Add:
mov rax, 0
adc rax, rax ; store carry into rax (return result register)
ret
Baseline_Add ENDP
ALIGN 8
Baseline_Sub PROC
lea rdx, [rdx+8*rcx]
lea r8, [r8+8*rcx]
lea r9, [r9+8*rcx]
neg rcx ; rcx is negative index
jz $1@Baseline_Sub
mov rax,[r8+8*rcx]
sub rax,[r9+8*rcx]
mov [rdx+8*rcx],rax
$0@Baseline_Sub:
mov rax,[r8+8*rcx+8]
sbb rax,[r9+8*rcx+8]
mov [rdx+8*rcx+8],rax
lea rcx,[rcx+2] ; advance index, avoid inc which causes slowdown on Intel Core 2
jrcxz $1@Baseline_Sub ; loop until rcx overflows and becomes zero
mov rax,[r8+8*rcx]
sbb rax,[r9+8*rcx]
mov [rdx+8*rcx],rax
jmp $0@Baseline_Sub
$1@Baseline_Sub:
mov rax, 0
adc rax, rax ; store carry into rax (return result register)
ret
Baseline_Sub ENDP
ALIGN 8
Rijndael_Enc_AdvancedProcessBlocks_SSE2 PROC FRAME
rex_push_reg rsi
push_reg rdi
push_reg rbx
push_reg r12
.endprolog
mov r8, rcx
mov r11, ?Te@rdtable@CryptoPP@@3PA_KA
mov edi, DWORD PTR [?g_cacheLineSize@CryptoPP@@3IA]
mov rsi, [(r8+16*19)]
mov rax, 16
and rax, rsi
movdqa xmm3, XMMWORD PTR [rdx+16+rax]
movdqa [(r8+16*12)], xmm3
lea rax, [rdx+rax+2*16]
sub rax, rsi
label0:
movdqa xmm0, [rax+rsi]
movdqa XMMWORD PTR [(r8+0)+rsi], xmm0
add rsi, 16
cmp rsi, 16*12
jl label0
movdqa xmm4, [rax+rsi]
movdqa xmm1, [rdx]
mov r12d, [rdx+4*4]
mov ebx, [rdx+5*4]
mov ecx, [rdx+6*4]
mov edx, [rdx+7*4]
xor rax, rax
label9:
mov esi, [r11+rax]
add rax, rdi
mov esi, [r11+rax]
add rax, rdi
mov esi, [r11+rax]
add rax, rdi
mov esi, [r11+rax]
add rax, rdi
cmp rax, 2048
jl label9
lfence
test DWORD PTR [(r8+16*18+8)], 1
jz label8
mov rsi, [(r8+16*14)]
movdqu xmm2, [rsi]
pxor xmm2, xmm1
psrldq xmm1, 14
movd eax, xmm1
mov al, BYTE PTR [rsi+15]
mov r10d, eax
movd eax, xmm2
psrldq xmm2, 4
movd edi, xmm2
psrldq xmm2, 4
movzx esi, al
xor r12d, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ah
xor edx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
shr eax, 16
movzx esi, al
xor ecx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, ah
xor ebx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov eax, edi
movd edi, xmm2
psrldq xmm2, 4
movzx esi, al
xor ebx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ah
xor r12d, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
shr eax, 16
movzx esi, al
xor edx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, ah
xor ecx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov eax, edi
movd edi, xmm2
movzx esi, al
xor ecx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ah
xor ebx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
shr eax, 16
movzx esi, al
xor r12d, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, ah
xor edx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov eax, edi
movzx esi, al
xor edx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ah
xor ecx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
shr eax, 16
movzx esi, al
xor ebx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
psrldq xmm2, 3
mov eax, [(r8+16*12)+0*4]
mov edi, [(r8+16*12)+2*4]
mov r9d, [(r8+16*12)+3*4]
movzx esi, cl
xor r9d, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
movzx esi, bl
xor edi, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
movzx esi, bh
xor r9d, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr ebx, 16
movzx esi, bl
xor eax, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, bh
mov ebx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
xor ebx, [(r8+16*12)+1*4]
movzx esi, ch
xor eax, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr ecx, 16
movzx esi, dl
xor eax, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
movzx esi, dh
xor ebx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr edx, 16
movzx esi, ch
xor edi, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, cl
xor ebx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, dl
xor edi, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, dh
xor r9d, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movd ecx, xmm2
mov edx, r12d
mov [(r8+0)+3*4], r9d
mov [(r8+0)+0*4], eax
mov [(r8+0)+1*4], ebx
mov [(r8+0)+2*4], edi
jmp label5
label3:
mov r12d, [(r8+16*12)+0*4]
mov ebx, [(r8+16*12)+1*4]
mov ecx, [(r8+16*12)+2*4]
mov edx, [(r8+16*12)+3*4]
label8:
mov rax, [(r8+16*14)]
movdqu xmm2, [rax]
mov rsi, [(r8+16*14)+8]
movdqu xmm5, [rsi]
pxor xmm2, xmm1
pxor xmm2, xmm5
movd eax, xmm2
psrldq xmm2, 4
movd edi, xmm2
psrldq xmm2, 4
movzx esi, al
xor r12d, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ah
xor edx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
shr eax, 16
movzx esi, al
xor ecx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, ah
xor ebx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov eax, edi
movd edi, xmm2
psrldq xmm2, 4
movzx esi, al
xor ebx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ah
xor r12d, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
shr eax, 16
movzx esi, al
xor edx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, ah
xor ecx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov eax, edi
movd edi, xmm2
movzx esi, al
xor ecx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ah
xor ebx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
shr eax, 16
movzx esi, al
xor r12d, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, ah
xor edx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov eax, edi
movzx esi, al
xor edx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ah
xor ecx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
shr eax, 16
movzx esi, al
xor ebx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, ah
xor r12d, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov eax, r12d
add r8, [(r8+16*19)]
add r8, 4*16
jmp label2
label1:
mov ecx, r10d
mov edx, r12d
mov eax, [(r8+0)+0*4]
mov ebx, [(r8+0)+1*4]
xor cl, ch
and rcx, 255
label5:
add r10d, 1
xor edx, DWORD PTR [r11+rcx*8+3]
movzx esi, dl
xor ebx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
movzx esi, dh
mov ecx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr edx, 16
xor ecx, [(r8+0)+2*4]
movzx esi, dh
xor eax, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, dl
mov edx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
xor edx, [(r8+0)+3*4]
add r8, [(r8+16*19)]
add r8, 3*16
jmp label4
label2:
mov r9d, [(r8+0)-4*16+3*4]
mov edi, [(r8+0)-4*16+2*4]
movzx esi, cl
xor r9d, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov cl, al
movzx esi, ah
xor edi, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr eax, 16
movzx esi, bl
xor edi, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
movzx esi, bh
xor r9d, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr ebx, 16
movzx esi, al
xor r9d, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, ah
mov eax, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, bl
xor eax, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, bh
mov ebx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ch
xor eax, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, cl
xor ebx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
shr ecx, 16
movzx esi, dl
xor eax, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
movzx esi, dh
xor ebx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr edx, 16
movzx esi, ch
xor edi, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, cl
xor ebx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, dl
xor edi, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, dh
xor r9d, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
mov ecx, edi
xor eax, [(r8+0)-4*16+0*4]
xor ebx, [(r8+0)-4*16+1*4]
mov edx, r9d
label4:
mov r9d, [(r8+0)-4*16+7*4]
mov edi, [(r8+0)-4*16+6*4]
movzx esi, cl
xor r9d, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
mov cl, al
movzx esi, ah
xor edi, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr eax, 16
movzx esi, bl
xor edi, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
movzx esi, bh
xor r9d, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr ebx, 16
movzx esi, al
xor r9d, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, ah
mov eax, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, bl
xor eax, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, bh
mov ebx, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, ch
xor eax, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
movzx esi, cl
xor ebx, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
shr ecx, 16
movzx esi, dl
xor eax, DWORD PTR [r11+8*rsi+(((3+3) MOD (4))+1)]
movzx esi, dh
xor ebx, DWORD PTR [r11+8*rsi+(((2+3) MOD (4))+1)]
shr edx, 16
movzx esi, ch
xor edi, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
movzx esi, cl
xor ebx, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, dl
xor edi, DWORD PTR [r11+8*rsi+(((1+3) MOD (4))+1)]
movzx esi, dh
xor r9d, DWORD PTR [r11+8*rsi+(((0+3) MOD (4))+1)]
mov ecx, edi
xor eax, [(r8+0)-4*16+4*4]
xor ebx, [(r8+0)-4*16+5*4]
mov edx, r9d
add r8, 32
test r8, 255
jnz label2
sub r8, 16*16
movzx esi, ch
movzx edi, BYTE PTR [r11+rsi*8+1]
movzx esi, dl
xor edi, DWORD PTR [r11+rsi*8+0]
mov WORD PTR [(r8+16*13)+2], di
movzx esi, dh
movzx edi, BYTE PTR [r11+rsi*8+1]
movzx esi, al
xor edi, DWORD PTR [r11+rsi*8+0]
mov WORD PTR [(r8+16*13)+6], di
shr edx, 16
movzx esi, ah
movzx edi, BYTE PTR [r11+rsi*8+1]
movzx esi, bl
xor edi, DWORD PTR [r11+rsi*8+0]
mov WORD PTR [(r8+16*13)+10], di
shr eax, 16
movzx esi, bh
movzx edi, BYTE PTR [r11+rsi*8+1]
movzx esi, cl
xor edi, DWORD PTR [r11+rsi*8+0]
mov WORD PTR [(r8+16*13)+14], di
shr ebx, 16
movzx esi, dh
movzx edi, BYTE PTR [r11+rsi*8+1]
movzx esi, al
xor edi, DWORD PTR [r11+rsi*8+0]
mov WORD PTR [(r8+16*13)+12], di
shr ecx, 16
movzx esi, ah
movzx edi, BYTE PTR [r11+rsi*8+1]
movzx esi, bl
xor edi, DWORD PTR [r11+rsi*8+0]
mov WORD PTR [(r8+16*13)+0], di
movzx esi, bh
movzx edi, BYTE PTR [r11+rsi*8+1]
movzx esi, cl
xor edi, DWORD PTR [r11+rsi*8+0]
mov WORD PTR [(r8+16*13)+4], di
movzx esi, ch
movzx edi, BYTE PTR [r11+rsi*8+1]
movzx esi, dl
xor edi, DWORD PTR [r11+rsi*8+0]
mov WORD PTR [(r8+16*13)+8], di
mov rax, [(r8+16*14)+16]
mov rbx, [(r8+16*14)+24]
mov rcx, [(r8+16*18+8)]
sub rcx, 16
movdqu xmm2, [rax]
pxor xmm2, xmm4
movdqa xmm0, [(r8+16*16)+16]
paddq xmm0, [(r8+16*14)+16]
movdqa [(r8+16*14)+16], xmm0
pxor xmm2, [(r8+16*13)]
movdqu [rbx], xmm2
jle label7
mov [(r8+16*18+8)], rcx
test rcx, 1
jnz label1
movdqa xmm0, [(r8+16*16)]
paddq xmm0, [(r8+16*14)]
movdqa [(r8+16*14)], xmm0
jmp label3
label7:
xorps xmm0, xmm0
lea rax, [(r8+0)+7*16]
movaps [rax-7*16], xmm0
movaps [rax-6*16], xmm0
movaps [rax-5*16], xmm0
movaps [rax-4*16], xmm0
movaps [rax-3*16], xmm0
movaps [rax-2*16], xmm0
movaps [rax-1*16], xmm0
movaps [rax+0*16], xmm0
movaps [rax+1*16], xmm0
movaps [rax+2*16], xmm0
movaps [rax+3*16], xmm0
movaps [rax+4*16], xmm0
movaps [rax+5*16], xmm0
movaps [rax+6*16], xmm0
pop r12
pop rbx
pop rdi
pop rsi
ret
Rijndael_Enc_AdvancedProcessBlocks_SSE2 ENDP
ALIGN 8
GCM_AuthenticateBlocks_2K_SSE2 PROC FRAME
rex_push_reg rsi
push_reg rdi
push_reg rbx
.endprolog
mov rsi, r8
mov r11, r9
movdqa xmm0, [rsi]
label0:
movdqu xmm4, [rcx]
pxor xmm0, xmm4
movd ebx, xmm0
mov eax, 0f0f0f0f0h
and eax, ebx
shl ebx, 4
and ebx, 0f0f0f0f0h
movzx edi, ah
movdqa xmm5, XMMWORD PTR [rsi + 32 + 1024 + rdi]
movzx edi, al
movdqa xmm4, XMMWORD PTR [rsi + 32 + 1024 + rdi]
shr eax, 16
movzx edi, ah
movdqa xmm3, XMMWORD PTR [rsi + 32 + 1024 + rdi]
movzx edi, al
movdqa xmm2, XMMWORD PTR [rsi + 32 + 1024 + rdi]
psrldq xmm0, 4
movd eax, xmm0
and eax, 0f0f0f0f0h
movzx edi, bh
pxor xmm5, XMMWORD PTR [rsi + 32 + (1-1)*256 + rdi]
movzx edi, bl
pxor xmm4, XMMWORD PTR [rsi + 32 + (1-1)*256 + rdi]
shr ebx, 16
movzx edi, bh
pxor xmm3, XMMWORD PTR [rsi + 32 + (1-1)*256 + rdi]
movzx edi, bl
pxor xmm2, XMMWORD PTR [rsi + 32 + (1-1)*256 + rdi]
movd ebx, xmm0
shl ebx, 4
and ebx, 0f0f0f0f0h
movzx edi, ah
pxor xmm5, XMMWORD PTR [rsi + 32 + 1024 + 1*256 + rdi]
movzx edi, al
pxor xmm4, XMMWORD PTR [rsi + 32 + 1024 + 1*256 + rdi]
shr eax, 16
movzx edi, ah
pxor xmm3, XMMWORD PTR [rsi + 32 + 1024 + 1*256 + rdi]
movzx edi, al
pxor xmm2, XMMWORD PTR [rsi + 32 + 1024 + 1*256 + rdi]
psrldq xmm0, 4
movd eax, xmm0
and eax, 0f0f0f0f0h
movzx edi, bh
pxor xmm5, XMMWORD PTR [rsi + 32 + (2-1)*256 + rdi]
movzx edi, bl
pxor xmm4, XMMWORD PTR [rsi + 32 + (2-1)*256 + rdi]
shr ebx, 16
movzx edi, bh
pxor xmm3, XMMWORD PTR [rsi + 32 + (2-1)*256 + rdi]
movzx edi, bl
pxor xmm2, XMMWORD PTR [rsi + 32 + (2-1)*256 + rdi]
movd ebx, xmm0
shl ebx, 4
and ebx, 0f0f0f0f0h
movzx edi, ah
pxor xmm5, XMMWORD PTR [rsi + 32 + 1024 + 2*256 + rdi]
movzx edi, al
pxor xmm4, XMMWORD PTR [rsi + 32 + 1024 + 2*256 + rdi]
shr eax, 16
movzx edi, ah
pxor xmm3, XMMWORD PTR [rsi + 32 + 1024 + 2*256 + rdi]
movzx edi, al
pxor xmm2, XMMWORD PTR [rsi + 32 + 1024 + 2*256 + rdi]
psrldq xmm0, 4
movd eax, xmm0
and eax, 0f0f0f0f0h
movzx edi, bh
pxor xmm5, XMMWORD PTR [rsi + 32 + (3-1)*256 + rdi]
movzx edi, bl
pxor xmm4, XMMWORD PTR [rsi + 32 + (3-1)*256 + rdi]
shr ebx, 16
movzx edi, bh
pxor xmm3, XMMWORD PTR [rsi + 32 + (3-1)*256 + rdi]
movzx edi, bl
pxor xmm2, XMMWORD PTR [rsi + 32 + (3-1)*256 + rdi]
movd ebx, xmm0
shl ebx, 4
and ebx, 0f0f0f0f0h
movzx edi, ah
pxor xmm5, XMMWORD PTR [rsi + 32 + 1024 + 3*256 + rdi]
movzx edi, al
pxor xmm4, XMMWORD PTR [rsi + 32 + 1024 + 3*256 + rdi]
shr eax, 16
movzx edi, ah
pxor xmm3, XMMWORD PTR [rsi + 32 + 1024 + 3*256 + rdi]
movzx edi, al
pxor xmm2, XMMWORD PTR [rsi + 32 + 1024 + 3*256 + rdi]
movzx edi, bh
pxor xmm5, XMMWORD PTR [rsi + 32 + 3*256 + rdi]
movzx edi, bl
pxor xmm4, XMMWORD PTR [rsi + 32 + 3*256 + rdi]
shr ebx, 16
movzx edi, bh
pxor xmm3, XMMWORD PTR [rsi + 32 + 3*256 + rdi]
movzx edi, bl
pxor xmm2, XMMWORD PTR [rsi + 32 + 3*256 + rdi]
movdqa xmm0, xmm3
pslldq xmm3, 1
pxor xmm2, xmm3
movdqa xmm1, xmm2
pslldq xmm2, 1
pxor xmm5, xmm2
psrldq xmm0, 15
movd rdi, xmm0
movzx eax, WORD PTR [r11 + rdi*2]
shl eax, 8
movdqa xmm0, xmm5
pslldq xmm5, 1
pxor xmm4, xmm5
psrldq xmm1, 15
movd rdi, xmm1
xor ax, WORD PTR [r11 + rdi*2]
shl eax, 8
psrldq xmm0, 15
movd rdi, xmm0
xor ax, WORD PTR [r11 + rdi*2]
movd xmm0, eax
pxor xmm0, xmm4
add rcx, 16
sub rdx, 1
jnz label0
movdqa [rsi], xmm0
pop rbx
pop rdi
pop rsi
ret
GCM_AuthenticateBlocks_2K_SSE2 ENDP
ALIGN 8
GCM_AuthenticateBlocks_64K_SSE2 PROC FRAME
rex_push_reg rsi
push_reg rdi
.endprolog
mov rsi, r8
movdqa xmm0, [rsi]
label1:
movdqu xmm1, [rcx]
pxor xmm1, xmm0
pxor xmm0, xmm0
movd eax, xmm1
psrldq xmm1, 4
movzx edi, al
add rdi, rdi
pxor xmm0, [rsi + 32 + (0*4+0)*256*16 + rdi*8]
movzx edi, ah
add rdi, rdi
pxor xmm0, [rsi + 32 + (0*4+1)*256*16 + rdi*8]
shr eax, 16
movzx edi, al
add rdi, rdi
pxor xmm0, [rsi + 32 + (0*4+2)*256*16 + rdi*8]
movzx edi, ah
add rdi, rdi
pxor xmm0, [rsi + 32 + (0*4+3)*256*16 + rdi*8]
movd eax, xmm1
psrldq xmm1, 4
movzx edi, al
add rdi, rdi
pxor xmm0, [rsi + 32 + (1*4+0)*256*16 + rdi*8]
movzx edi, ah
add rdi, rdi
pxor xmm0, [rsi + 32 + (1*4+1)*256*16 + rdi*8]
shr eax, 16
movzx edi, al
add rdi, rdi
pxor xmm0, [rsi + 32 + (1*4+2)*256*16 + rdi*8]
movzx edi, ah
add rdi, rdi
pxor xmm0, [rsi + 32 + (1*4+3)*256*16 + rdi*8]
movd eax, xmm1
psrldq xmm1, 4
movzx edi, al
add rdi, rdi
pxor xmm0, [rsi + 32 + (2*4+0)*256*16 + rdi*8]
movzx edi, ah
add rdi, rdi
pxor xmm0, [rsi + 32 + (2*4+1)*256*16 + rdi*8]
shr eax, 16
movzx edi, al
add rdi, rdi
pxor xmm0, [rsi + 32 + (2*4+2)*256*16 + rdi*8]
movzx edi, ah
add rdi, rdi
pxor xmm0, [rsi + 32 + (2*4+3)*256*16 + rdi*8]
movd eax, xmm1
psrldq xmm1, 4
movzx edi, al
add rdi, rdi
pxor xmm0, [rsi + 32 + (3*4+0)*256*16 + rdi*8]
movzx edi, ah
add rdi, rdi
pxor xmm0, [rsi + 32 + (3*4+1)*256*16 + rdi*8]
shr eax, 16
movzx edi, al
add rdi, rdi
pxor xmm0, [rsi + 32 + (3*4+2)*256*16 + rdi*8]
movzx edi, ah
add rdi, rdi
pxor xmm0, [rsi + 32 + (3*4+3)*256*16 + rdi*8]
add rcx, 16
sub rdx, 1
jnz label1
movdqa [rsi], xmm0
pop rdi
pop rsi
ret
GCM_AuthenticateBlocks_64K_SSE2 ENDP
ALIGN 8
SHA256_HashMultipleBlocks_SSE2 PROC FRAME
rex_push_reg rsi
push_reg rdi
push_reg rbx
push_reg rbp
alloc_stack(8*4 + 16*4 + 4*8 + 8)
.endprolog
mov rdi, r8
lea rsi, [?SHA256_K@CryptoPP@@3QBIB + 48*4]
mov [rsp+8*4+16*4+1*8], rcx
mov [rsp+8*4+16*4+2*8], rdx
add rdi, rdx
mov [rsp+8*4+16*4+3*8], rdi
movdqa xmm0, XMMWORD PTR [rcx+0*16]
movdqa xmm1, XMMWORD PTR [rcx+1*16]
mov [rsp+8*4+16*4+0*8], rsi
label0:
sub rsi, 48*4
movdqa [rsp+((1024+7-(0+3)) MOD (8))*4], xmm1
movdqa [rsp+((1024+7-(0+7)) MOD (8))*4], xmm0
mov rbx, [rdx+0*8]
bswap rbx
mov [rsp+8*4+((1024+15-(0*(1+1)+1)) MOD (16))*4], rbx
mov rbx, [rdx+1*8]
bswap rbx
mov [rsp+8*4+((1024+15-(1*(1+1)+1)) MOD (16))*4], rbx
mov rbx, [rdx+2*8]
bswap rbx
mov [rsp+8*4+((1024+15-(2*(1+1)+1)) MOD (16))*4], rbx
mov rbx, [rdx+3*8]
bswap rbx
mov [rsp+8*4+((1024+15-(3*(1+1)+1)) MOD (16))*4], rbx
mov rbx, [rdx+4*8]
bswap rbx
mov [rsp+8*4+((1024+15-(4*(1+1)+1)) MOD (16))*4], rbx
mov rbx, [rdx+5*8]
bswap rbx
mov [rsp+8*4+((1024+15-(5*(1+1)+1)) MOD (16))*4], rbx
mov rbx, [rdx+6*8]
bswap rbx
mov [rsp+8*4+((1024+15-(6*(1+1)+1)) MOD (16))*4], rbx
mov rbx, [rdx+7*8]
bswap rbx
mov [rsp+8*4+((1024+15-(7*(1+1)+1)) MOD (16))*4], rbx
mov edi, [rsp+((1024+7-(0+3)) MOD (8))*4]
mov eax, [rsp+((1024+7-(0+6)) MOD (8))*4]
xor eax, [rsp+((1024+7-(0+5)) MOD (8))*4]
mov ecx, [rsp+((1024+7-(0+7)) MOD (8))*4]
mov edx, [rsp+((1024+7-(0+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(0+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(0+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
add edx, [rsi+(0)*4]
add edx, [rsp+8*4+((1024+15-(0)) MOD (16))*4]
add edx, [rsp+((1024+7-(0)) MOD (8))*4]
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(0+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(0+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(0+4)) MOD (8))*4]
mov [rsp+((1024+7-(0+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(0)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(1+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(1+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(1+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
add edi, [rsi+(1)*4]
add edi, [rsp+8*4+((1024+15-(1)) MOD (16))*4]
add edi, [rsp+((1024+7-(1)) MOD (8))*4]
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(1+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(1+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(1+4)) MOD (8))*4]
mov [rsp+((1024+7-(1+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(1)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(2+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(2+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(2+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
add edx, [rsi+(2)*4]
add edx, [rsp+8*4+((1024+15-(2)) MOD (16))*4]
add edx, [rsp+((1024+7-(2)) MOD (8))*4]
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(2+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(2+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(2+4)) MOD (8))*4]
mov [rsp+((1024+7-(2+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(2)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(3+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(3+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(3+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
add edi, [rsi+(3)*4]
add edi, [rsp+8*4+((1024+15-(3)) MOD (16))*4]
add edi, [rsp+((1024+7-(3)) MOD (8))*4]
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(3+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(3+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(3+4)) MOD (8))*4]
mov [rsp+((1024+7-(3+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(3)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(4+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(4+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(4+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
add edx, [rsi+(4)*4]
add edx, [rsp+8*4+((1024+15-(4)) MOD (16))*4]
add edx, [rsp+((1024+7-(4)) MOD (8))*4]
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(4+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(4+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(4+4)) MOD (8))*4]
mov [rsp+((1024+7-(4+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(4)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(5+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(5+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(5+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
add edi, [rsi+(5)*4]
add edi, [rsp+8*4+((1024+15-(5)) MOD (16))*4]
add edi, [rsp+((1024+7-(5)) MOD (8))*4]
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(5+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(5+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(5+4)) MOD (8))*4]
mov [rsp+((1024+7-(5+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(5)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(6+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(6+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(6+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
add edx, [rsi+(6)*4]
add edx, [rsp+8*4+((1024+15-(6)) MOD (16))*4]
add edx, [rsp+((1024+7-(6)) MOD (8))*4]
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(6+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(6+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(6+4)) MOD (8))*4]
mov [rsp+((1024+7-(6+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(6)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(7+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(7+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(7+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
add edi, [rsi+(7)*4]
add edi, [rsp+8*4+((1024+15-(7)) MOD (16))*4]
add edi, [rsp+((1024+7-(7)) MOD (8))*4]
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(7+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(7+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(7+4)) MOD (8))*4]
mov [rsp+((1024+7-(7+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(7)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(8+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(8+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(8+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
add edx, [rsi+(8)*4]
add edx, [rsp+8*4+((1024+15-(8)) MOD (16))*4]
add edx, [rsp+((1024+7-(8)) MOD (8))*4]
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(8+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(8+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(8+4)) MOD (8))*4]
mov [rsp+((1024+7-(8+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(8)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(9+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(9+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(9+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
add edi, [rsi+(9)*4]
add edi, [rsp+8*4+((1024+15-(9)) MOD (16))*4]
add edi, [rsp+((1024+7-(9)) MOD (8))*4]
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(9+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(9+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(9+4)) MOD (8))*4]
mov [rsp+((1024+7-(9+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(9)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(10+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(10+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(10+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
add edx, [rsi+(10)*4]
add edx, [rsp+8*4+((1024+15-(10)) MOD (16))*4]
add edx, [rsp+((1024+7-(10)) MOD (8))*4]
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(10+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(10+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(10+4)) MOD (8))*4]
mov [rsp+((1024+7-(10+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(10)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(11+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(11+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(11+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
add edi, [rsi+(11)*4]
add edi, [rsp+8*4+((1024+15-(11)) MOD (16))*4]
add edi, [rsp+((1024+7-(11)) MOD (8))*4]
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(11+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(11+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(11+4)) MOD (8))*4]
mov [rsp+((1024+7-(11+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(11)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(12+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(12+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(12+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
add edx, [rsi+(12)*4]
add edx, [rsp+8*4+((1024+15-(12)) MOD (16))*4]
add edx, [rsp+((1024+7-(12)) MOD (8))*4]
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(12+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(12+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(12+4)) MOD (8))*4]
mov [rsp+((1024+7-(12+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(12)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(13+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(13+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(13+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
add edi, [rsi+(13)*4]
add edi, [rsp+8*4+((1024+15-(13)) MOD (16))*4]
add edi, [rsp+((1024+7-(13)) MOD (8))*4]
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(13+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(13+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(13+4)) MOD (8))*4]
mov [rsp+((1024+7-(13+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(13)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(14+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(14+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(14+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
add edx, [rsi+(14)*4]
add edx, [rsp+8*4+((1024+15-(14)) MOD (16))*4]
add edx, [rsp+((1024+7-(14)) MOD (8))*4]
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(14+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(14+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(14+4)) MOD (8))*4]
mov [rsp+((1024+7-(14+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(14)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(15+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(15+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(15+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
add edi, [rsi+(15)*4]
add edi, [rsp+8*4+((1024+15-(15)) MOD (16))*4]
add edi, [rsp+((1024+7-(15)) MOD (8))*4]
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(15+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(15+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(15+4)) MOD (8))*4]
mov [rsp+((1024+7-(15+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(15)) MOD (8))*4], ecx
label1:
add rsi, 4*16
mov edx, [rsp+((1024+7-(0+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(0+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(0+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebp, [rsp+8*4+((1024+15-((0)-2)) MOD (16))*4]
mov edi, [rsp+8*4+((1024+15-((0)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((0)-7)) MOD (16))*4]
mov ebp, edi
shr ebp, 3
ror edi, 7
add ebx, [rsp+8*4+((1024+15-(0)) MOD (16))*4]
xor ebp, edi
add edx, [rsi+(0)*4]
ror edi, 11
add edx, [rsp+((1024+7-(0)) MOD (8))*4]
xor ebp, edi
add ebp, ebx
mov [rsp+8*4+((1024+15-(0)) MOD (16))*4], ebp
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(0+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(0+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(0+4)) MOD (8))*4]
mov [rsp+((1024+7-(0+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(0)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(1+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(1+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(1+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebp, [rsp+8*4+((1024+15-((1)-2)) MOD (16))*4]
mov edx, [rsp+8*4+((1024+15-((1)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((1)-7)) MOD (16))*4]
mov ebp, edx
shr ebp, 3
ror edx, 7
add ebx, [rsp+8*4+((1024+15-(1)) MOD (16))*4]
xor ebp, edx
add edi, [rsi+(1)*4]
ror edx, 11
add edi, [rsp+((1024+7-(1)) MOD (8))*4]
xor ebp, edx
add ebp, ebx
mov [rsp+8*4+((1024+15-(1)) MOD (16))*4], ebp
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(1+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(1+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(1+4)) MOD (8))*4]
mov [rsp+((1024+7-(1+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(1)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(2+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(2+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(2+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebp, [rsp+8*4+((1024+15-((2)-2)) MOD (16))*4]
mov edi, [rsp+8*4+((1024+15-((2)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((2)-7)) MOD (16))*4]
mov ebp, edi
shr ebp, 3
ror edi, 7
add ebx, [rsp+8*4+((1024+15-(2)) MOD (16))*4]
xor ebp, edi
add edx, [rsi+(2)*4]
ror edi, 11
add edx, [rsp+((1024+7-(2)) MOD (8))*4]
xor ebp, edi
add ebp, ebx
mov [rsp+8*4+((1024+15-(2)) MOD (16))*4], ebp
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(2+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(2+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(2+4)) MOD (8))*4]
mov [rsp+((1024+7-(2+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(2)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(3+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(3+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(3+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebp, [rsp+8*4+((1024+15-((3)-2)) MOD (16))*4]
mov edx, [rsp+8*4+((1024+15-((3)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((3)-7)) MOD (16))*4]
mov ebp, edx
shr ebp, 3
ror edx, 7
add ebx, [rsp+8*4+((1024+15-(3)) MOD (16))*4]
xor ebp, edx
add edi, [rsi+(3)*4]
ror edx, 11
add edi, [rsp+((1024+7-(3)) MOD (8))*4]
xor ebp, edx
add ebp, ebx
mov [rsp+8*4+((1024+15-(3)) MOD (16))*4], ebp
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(3+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(3+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(3+4)) MOD (8))*4]
mov [rsp+((1024+7-(3+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(3)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(4+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(4+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(4+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebp, [rsp+8*4+((1024+15-((4)-2)) MOD (16))*4]
mov edi, [rsp+8*4+((1024+15-((4)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((4)-7)) MOD (16))*4]
mov ebp, edi
shr ebp, 3
ror edi, 7
add ebx, [rsp+8*4+((1024+15-(4)) MOD (16))*4]
xor ebp, edi
add edx, [rsi+(4)*4]
ror edi, 11
add edx, [rsp+((1024+7-(4)) MOD (8))*4]
xor ebp, edi
add ebp, ebx
mov [rsp+8*4+((1024+15-(4)) MOD (16))*4], ebp
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(4+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(4+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(4+4)) MOD (8))*4]
mov [rsp+((1024+7-(4+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(4)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(5+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(5+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(5+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebp, [rsp+8*4+((1024+15-((5)-2)) MOD (16))*4]
mov edx, [rsp+8*4+((1024+15-((5)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((5)-7)) MOD (16))*4]
mov ebp, edx
shr ebp, 3
ror edx, 7
add ebx, [rsp+8*4+((1024+15-(5)) MOD (16))*4]
xor ebp, edx
add edi, [rsi+(5)*4]
ror edx, 11
add edi, [rsp+((1024+7-(5)) MOD (8))*4]
xor ebp, edx
add ebp, ebx
mov [rsp+8*4+((1024+15-(5)) MOD (16))*4], ebp
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(5+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(5+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(5+4)) MOD (8))*4]
mov [rsp+((1024+7-(5+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(5)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(6+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(6+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(6+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebp, [rsp+8*4+((1024+15-((6)-2)) MOD (16))*4]
mov edi, [rsp+8*4+((1024+15-((6)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((6)-7)) MOD (16))*4]
mov ebp, edi
shr ebp, 3
ror edi, 7
add ebx, [rsp+8*4+((1024+15-(6)) MOD (16))*4]
xor ebp, edi
add edx, [rsi+(6)*4]
ror edi, 11
add edx, [rsp+((1024+7-(6)) MOD (8))*4]
xor ebp, edi
add ebp, ebx
mov [rsp+8*4+((1024+15-(6)) MOD (16))*4], ebp
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(6+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(6+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(6+4)) MOD (8))*4]
mov [rsp+((1024+7-(6+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(6)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(7+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(7+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(7+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebp, [rsp+8*4+((1024+15-((7)-2)) MOD (16))*4]
mov edx, [rsp+8*4+((1024+15-((7)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((7)-7)) MOD (16))*4]
mov ebp, edx
shr ebp, 3
ror edx, 7
add ebx, [rsp+8*4+((1024+15-(7)) MOD (16))*4]
xor ebp, edx
add edi, [rsi+(7)*4]
ror edx, 11
add edi, [rsp+((1024+7-(7)) MOD (8))*4]
xor ebp, edx
add ebp, ebx
mov [rsp+8*4+((1024+15-(7)) MOD (16))*4], ebp
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(7+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(7+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(7+4)) MOD (8))*4]
mov [rsp+((1024+7-(7+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(7)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(8+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(8+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(8+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebp, [rsp+8*4+((1024+15-((8)-2)) MOD (16))*4]
mov edi, [rsp+8*4+((1024+15-((8)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((8)-7)) MOD (16))*4]
mov ebp, edi
shr ebp, 3
ror edi, 7
add ebx, [rsp+8*4+((1024+15-(8)) MOD (16))*4]
xor ebp, edi
add edx, [rsi+(8)*4]
ror edi, 11
add edx, [rsp+((1024+7-(8)) MOD (8))*4]
xor ebp, edi
add ebp, ebx
mov [rsp+8*4+((1024+15-(8)) MOD (16))*4], ebp
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(8+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(8+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(8+4)) MOD (8))*4]
mov [rsp+((1024+7-(8+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(8)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(9+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(9+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(9+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebp, [rsp+8*4+((1024+15-((9)-2)) MOD (16))*4]
mov edx, [rsp+8*4+((1024+15-((9)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((9)-7)) MOD (16))*4]
mov ebp, edx
shr ebp, 3
ror edx, 7
add ebx, [rsp+8*4+((1024+15-(9)) MOD (16))*4]
xor ebp, edx
add edi, [rsi+(9)*4]
ror edx, 11
add edi, [rsp+((1024+7-(9)) MOD (8))*4]
xor ebp, edx
add ebp, ebx
mov [rsp+8*4+((1024+15-(9)) MOD (16))*4], ebp
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(9+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(9+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(9+4)) MOD (8))*4]
mov [rsp+((1024+7-(9+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(9)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(10+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(10+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(10+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebp, [rsp+8*4+((1024+15-((10)-2)) MOD (16))*4]
mov edi, [rsp+8*4+((1024+15-((10)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((10)-7)) MOD (16))*4]
mov ebp, edi
shr ebp, 3
ror edi, 7
add ebx, [rsp+8*4+((1024+15-(10)) MOD (16))*4]
xor ebp, edi
add edx, [rsi+(10)*4]
ror edi, 11
add edx, [rsp+((1024+7-(10)) MOD (8))*4]
xor ebp, edi
add ebp, ebx
mov [rsp+8*4+((1024+15-(10)) MOD (16))*4], ebp
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(10+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(10+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(10+4)) MOD (8))*4]
mov [rsp+((1024+7-(10+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(10)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(11+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(11+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(11+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebp, [rsp+8*4+((1024+15-((11)-2)) MOD (16))*4]
mov edx, [rsp+8*4+((1024+15-((11)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((11)-7)) MOD (16))*4]
mov ebp, edx
shr ebp, 3
ror edx, 7
add ebx, [rsp+8*4+((1024+15-(11)) MOD (16))*4]
xor ebp, edx
add edi, [rsi+(11)*4]
ror edx, 11
add edi, [rsp+((1024+7-(11)) MOD (8))*4]
xor ebp, edx
add ebp, ebx
mov [rsp+8*4+((1024+15-(11)) MOD (16))*4], ebp
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(11+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(11+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(11+4)) MOD (8))*4]
mov [rsp+((1024+7-(11+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(11)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(12+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(12+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(12+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebp, [rsp+8*4+((1024+15-((12)-2)) MOD (16))*4]
mov edi, [rsp+8*4+((1024+15-((12)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((12)-7)) MOD (16))*4]
mov ebp, edi
shr ebp, 3
ror edi, 7
add ebx, [rsp+8*4+((1024+15-(12)) MOD (16))*4]
xor ebp, edi
add edx, [rsi+(12)*4]
ror edi, 11
add edx, [rsp+((1024+7-(12)) MOD (8))*4]
xor ebp, edi
add ebp, ebx
mov [rsp+8*4+((1024+15-(12)) MOD (16))*4], ebp
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(12+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(12+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(12+4)) MOD (8))*4]
mov [rsp+((1024+7-(12+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(12)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(13+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(13+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(13+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebp, [rsp+8*4+((1024+15-((13)-2)) MOD (16))*4]
mov edx, [rsp+8*4+((1024+15-((13)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((13)-7)) MOD (16))*4]
mov ebp, edx
shr ebp, 3
ror edx, 7
add ebx, [rsp+8*4+((1024+15-(13)) MOD (16))*4]
xor ebp, edx
add edi, [rsi+(13)*4]
ror edx, 11
add edi, [rsp+((1024+7-(13)) MOD (8))*4]
xor ebp, edx
add ebp, ebx
mov [rsp+8*4+((1024+15-(13)) MOD (16))*4], ebp
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(13+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(13+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(13+4)) MOD (8))*4]
mov [rsp+((1024+7-(13+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(13)) MOD (8))*4], ecx
mov edx, [rsp+((1024+7-(14+2)) MOD (8))*4]
xor edx, [rsp+((1024+7-(14+1)) MOD (8))*4]
and edx, edi
xor edx, [rsp+((1024+7-(14+1)) MOD (8))*4]
mov ebp, edi
ror edi, 6
ror ebp, 25
xor ebp, edi
ror edi, 5
xor ebp, edi
add edx, ebp
mov ebp, [rsp+8*4+((1024+15-((14)-2)) MOD (16))*4]
mov edi, [rsp+8*4+((1024+15-((14)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((14)-7)) MOD (16))*4]
mov ebp, edi
shr ebp, 3
ror edi, 7
add ebx, [rsp+8*4+((1024+15-(14)) MOD (16))*4]
xor ebp, edi
add edx, [rsi+(14)*4]
ror edi, 11
add edx, [rsp+((1024+7-(14)) MOD (8))*4]
xor ebp, edi
add ebp, ebx
mov [rsp+8*4+((1024+15-(14)) MOD (16))*4], ebp
add edx, ebp
mov ebx, ecx
xor ecx, [rsp+((1024+7-(14+6)) MOD (8))*4]
and eax, ecx
xor eax, [rsp+((1024+7-(14+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add eax, edx
add edx, [rsp+((1024+7-(14+4)) MOD (8))*4]
mov [rsp+((1024+7-(14+4)) MOD (8))*4], edx
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add eax, ebp
mov [rsp+((1024+7-(14)) MOD (8))*4], eax
mov edi, [rsp+((1024+7-(15+2)) MOD (8))*4]
xor edi, [rsp+((1024+7-(15+1)) MOD (8))*4]
and edi, edx
xor edi, [rsp+((1024+7-(15+1)) MOD (8))*4]
mov ebp, edx
ror edx, 6
ror ebp, 25
xor ebp, edx
ror edx, 5
xor ebp, edx
add edi, ebp
mov ebp, [rsp+8*4+((1024+15-((15)-2)) MOD (16))*4]
mov edx, [rsp+8*4+((1024+15-((15)-15)) MOD (16))*4]
mov ebx, ebp
shr ebp, 10
ror ebx, 17
xor ebp, ebx
ror ebx, 2
xor ebx, ebp
add ebx, [rsp+8*4+((1024+15-((15)-7)) MOD (16))*4]
mov ebp, edx
shr ebp, 3
ror edx, 7
add ebx, [rsp+8*4+((1024+15-(15)) MOD (16))*4]
xor ebp, edx
add edi, [rsi+(15)*4]
ror edx, 11
add edi, [rsp+((1024+7-(15)) MOD (8))*4]
xor ebp, edx
add ebp, ebx
mov [rsp+8*4+((1024+15-(15)) MOD (16))*4], ebp
add edi, ebp
mov ebx, eax
xor eax, [rsp+((1024+7-(15+6)) MOD (8))*4]
and ecx, eax
xor ecx, [rsp+((1024+7-(15+6)) MOD (8))*4]
mov ebp, ebx
ror ebx, 2
add ecx, edi
add edi, [rsp+((1024+7-(15+4)) MOD (8))*4]
mov [rsp+((1024+7-(15+4)) MOD (8))*4], edi
ror ebp, 22
xor ebp, ebx
ror ebx, 11
xor ebp, ebx
add ecx, ebp
mov [rsp+((1024+7-(15)) MOD (8))*4], ecx
cmp rsi, [rsp+8*4+16*4+0*8]
jne label1
mov rcx, [rsp+8*4+16*4+1*8]
movdqa xmm1, XMMWORD PTR [rcx+1*16]
movdqa xmm0, XMMWORD PTR [rcx+0*16]
paddd xmm1, [rsp+((1024+7-(0+3)) MOD (8))*4]
paddd xmm0, [rsp+((1024+7-(0+7)) MOD (8))*4]
movdqa [rcx+1*16], xmm1
movdqa [rcx+0*16], xmm0
mov rdx, [rsp+8*4+16*4+2*8]
add rdx, 64
mov [rsp+8*4+16*4+2*8], rdx
cmp rdx, [rsp+8*4+16*4+3*8]
jne label0
add rsp, 8*4 + 16*4 + 4*8 + 8
pop rbp
pop rbx
pop rdi
pop rsi
ret
SHA256_HashMultipleBlocks_SSE2 ENDP
_TEXT ENDS
END
马建仓 AI 助手
尝试更多
代码解读
代码找茬
代码优化
1
https://gitee.com/mirrors/cryptopp.git
git@gitee.com:mirrors/cryptopp.git
mirrors
cryptopp
cryptopp
master

搜索帮助