Loop Id: 16803 | Module: libgromacs_mpi.so.9.0.0 | Source: bonded.cpp:2179-2256 [...] | Coverage: 0.03% |
---|
Loop Id: 16803 | Module: libgromacs_mpi.so.9.0.0 | Source: bonded.cpp:2179-2256 [...] | Coverage: 0.03% |
---|
0xbc8230 VMOVDQA64 0xfc0(%RSP),%ZMM0 |
0xbc8238 VMOVDQA64 0xbc0(%RSP),%ZMM11 |
0xbc8240 VPMULLD %ZMM11,%ZMM0,%ZMM1 |
0xbc8246 KXNORW %K0,%K0,%K1 |
0xbc824a VXORPS %XMM2,%XMM2,%XMM2 |
0xbc824e MOV 0x60(%RSP),%RAX |
0xbc8253 VGATHERDPS (%RAX,%ZMM1,4),%ZMM2{%K1} |
0xbc825a KXNORW %K0,%K0,%K1 |
0xbc825e VXORPS %XMM3,%XMM3,%XMM3 |
0xbc8262 MOV 0x70(%RSP),%RCX |
0xbc8267 VGATHERDPS (%RCX,%ZMM1,4),%ZMM3{%K1} |
0xbc826e KXNORW %K0,%K0,%K1 |
0xbc8272 VXORPS %XMM4,%XMM4,%XMM4 |
0xbc8276 MOV 0x68(%RSP),%RDX |
0xbc827b VGATHERDPS (%RDX,%ZMM1,4),%ZMM4{%K1} |
0xbc8282 VPMULLD 0x100(%RSP),%ZMM11,%ZMM1 |
0xbc828a KXNORW %K0,%K0,%K1 |
0xbc828e VXORPS %XMM5,%XMM5,%XMM5 |
0xbc8292 VGATHERDPS (%RAX,%ZMM1,4),%ZMM5{%K1} |
0xbc8299 KXNORW %K0,%K0,%K1 |
0xbc829d VXORPS %XMM6,%XMM6,%XMM6 |
0xbc82a1 VGATHERDPS (%RCX,%ZMM1,4),%ZMM6{%K1} |
0xbc82a8 KXNORW %K0,%K0,%K1 |
0xbc82ac VXORPS %XMM7,%XMM7,%XMM7 |
0xbc82b0 VGATHERDPS (%RDX,%ZMM1,4),%ZMM7{%K1} |
0xbc82b7 VPMULLD 0xc0(%RSP),%ZMM11,%ZMM1 |
0xbc82bf KXNORW %K0,%K0,%K1 |
0xbc82c3 VXORPS %XMM8,%XMM8,%XMM8 |
0xbc82c8 VGATHERDPS (%RAX,%ZMM1,4),%ZMM8{%K1} |
0xbc82cf KXNORW %K0,%K0,%K1 |
0xbc82d3 VXORPS %XMM9,%XMM9,%XMM9 |
0xbc82d8 VGATHERDPS (%RCX,%ZMM1,4),%ZMM9{%K1} |
0xbc82df KXNORW %K0,%K0,%K1 |
0xbc82e3 VXORPS %XMM10,%XMM10,%XMM10 |
0xbc82e8 VGATHERDPS (%RDX,%ZMM1,4),%ZMM10{%K1} |
0xbc82ef VPMULLD 0x80(%RSP),%ZMM11,%ZMM1 |
0xbc82f7 KXNORW %K0,%K0,%K1 |
0xbc82fb VPXOR %XMM11,%XMM11,%XMM11 |
0xbc8300 VGATHERDPS (%RAX,%ZMM1,4),%ZMM11{%K1} |
0xbc8307 KXNORW %K0,%K0,%K1 |
0xbc830b VXORPS %XMM12,%XMM12,%XMM12 |
0xbc8310 VGATHERDPS (%RCX,%ZMM1,4),%ZMM12{%K1} |
0xbc8317 KXNORW %K0,%K0,%K1 |
0xbc831b VXORPS %XMM13,%XMM13,%XMM13 |
0xbc8320 VGATHERDPS (%RDX,%ZMM1,4),%ZMM13{%K1} |
0xbc8327 VSUBPS %ZMM5,%ZMM2,%ZMM1 |
0xbc832d VSUBPS %ZMM6,%ZMM3,%ZMM2 |
0xbc8333 VSUBPS %ZMM7,%ZMM4,%ZMM3 |
0xbc8339 VSUBPS %ZMM5,%ZMM8,%ZMM4 |
0xbc833f VSUBPS %ZMM6,%ZMM9,%ZMM5 |
0xbc8345 VSUBPS %ZMM7,%ZMM10,%ZMM6 |
0xbc834b VSUBPS %ZMM11,%ZMM8,%ZMM8 |
0xbc8351 VSUBPS %ZMM12,%ZMM9,%ZMM9 |
0xbc8357 VSUBPS %ZMM13,%ZMM10,%ZMM10 |
0xbc835d VMOVAPS 0xe00(%RSP),%ZMM12 |
0xbc8365 VMULPS %ZMM12,%ZMM3,%ZMM7 |
0xbc836b VRNDSCALEPS $0,%ZMM7,%ZMM7 |
0xbc8372 VMOVAPS 0xdc0(%RSP),%ZMM13 |
0xbc837a VMULPS %ZMM13,%ZMM7,%ZMM11 |
0xbc8380 VSUBPS %ZMM11,%ZMM1,%ZMM11 |
0xbc8386 VMOVAPS 0xd80(%RSP),%ZMM14 |
0xbc838e VMULPS %ZMM14,%ZMM7,%ZMM1 |
0xbc8394 VSUBPS %ZMM1,%ZMM2,%ZMM2 |
0xbc839a VMOVAPS 0xd40(%RSP),%ZMM15 |
0xbc83a2 VMULPS %ZMM15,%ZMM7,%ZMM1 |
0xbc83a8 VSUBPS %ZMM1,%ZMM3,%ZMM1 |
0xbc83ae VMOVAPS 0xd00(%RSP),%ZMM16 |
0xbc83b6 VMULPS %ZMM16,%ZMM2,%ZMM3 |
0xbc83bc VRNDSCALEPS $0,%ZMM3,%ZMM3 |
0xbc83c3 VMOVAPS 0xcc0(%RSP),%ZMM17 |
0xbc83cb VMULPS %ZMM17,%ZMM3,%ZMM7 |
0xbc83d1 VSUBPS %ZMM7,%ZMM11,%ZMM7 |
0xbc83d7 VMOVAPS 0xc80(%RSP),%ZMM18 |
0xbc83df VMULPS %ZMM18,%ZMM3,%ZMM3 |
0xbc83e5 VMOVAPS 0xc40(%RSP),%ZMM19 |
0xbc83ed VMULPS %ZMM19,%ZMM7,%ZMM11 |
0xbc83f3 VRNDSCALEPS $0,%ZMM11,%ZMM11 |
0xbc83fa VSUBPS %ZMM3,%ZMM2,%ZMM3 |
0xbc8400 VMOVAPS 0xc00(%RSP),%ZMM20 |
0xbc8408 VMULPS %ZMM20,%ZMM11,%ZMM2 |
0xbc840e VSUBPS %ZMM2,%ZMM7,%ZMM2 |
0xbc8414 VMULPS %ZMM12,%ZMM6,%ZMM7 |
0xbc841a VRNDSCALEPS $0,%ZMM7,%ZMM7 |
0xbc8421 VMULPS %ZMM7,%ZMM13,%ZMM11 |
0xbc8427 VSUBPS %ZMM11,%ZMM4,%ZMM11 |
0xbc842d VMULPS %ZMM7,%ZMM14,%ZMM4 |
0xbc8433 VSUBPS %ZMM4,%ZMM5,%ZMM5 |
0xbc8439 VMULPS %ZMM7,%ZMM15,%ZMM4 |
0xbc843f VSUBPS %ZMM4,%ZMM6,%ZMM4 |
0xbc8445 VMULPS %ZMM5,%ZMM16,%ZMM6 |
0xbc844b VRNDSCALEPS $0,%ZMM6,%ZMM6 |
0xbc8452 VMULPS %ZMM6,%ZMM17,%ZMM7 |
0xbc8458 VSUBPS %ZMM7,%ZMM11,%ZMM11 |
0xbc845e VMULPS %ZMM6,%ZMM18,%ZMM6 |
0xbc8464 VSUBPS %ZMM6,%ZMM5,%ZMM7 |
0xbc846a VMULPS %ZMM11,%ZMM19,%ZMM5 |
0xbc8470 VRNDSCALEPS $0,%ZMM5,%ZMM5 |
0xbc8477 VMULPS %ZMM5,%ZMM20,%ZMM5 |
0xbc847d VSUBPS %ZMM5,%ZMM11,%ZMM5 |
0xbc8483 VMULPS %ZMM12,%ZMM10,%ZMM6 |
0xbc8489 VRNDSCALEPS $0,%ZMM6,%ZMM6 |
0xbc8490 VMULPS %ZMM6,%ZMM13,%ZMM11 |
0xbc8496 VSUBPS %ZMM11,%ZMM8,%ZMM8 |
0xbc849c VMULPS %ZMM6,%ZMM14,%ZMM11 |
0xbc84a2 VSUBPS %ZMM11,%ZMM9,%ZMM9 |
0xbc84a8 VMULPS %ZMM6,%ZMM15,%ZMM6 |
0xbc84ae VSUBPS %ZMM6,%ZMM10,%ZMM10 |
0xbc84b4 VMULPS %ZMM9,%ZMM16,%ZMM6 |
0xbc84ba VRNDSCALEPS $0,%ZMM6,%ZMM6 |
0xbc84c1 VMULPS %ZMM6,%ZMM17,%ZMM11 |
0xbc84c7 VSUBPS %ZMM11,%ZMM8,%ZMM8 |
0xbc84cd VMULPS %ZMM6,%ZMM18,%ZMM6 |
0xbc84d3 VMULPS %ZMM8,%ZMM19,%ZMM11 |
0xbc84d9 VRNDSCALEPS $0,%ZMM11,%ZMM11 |
0xbc84e0 VSUBPS %ZMM6,%ZMM9,%ZMM15 |
0xbc84e6 VMULPS %ZMM11,%ZMM20,%ZMM6 |
0xbc84ec VSUBPS %ZMM6,%ZMM8,%ZMM14 |
0xbc84f2 VMULPS %ZMM4,%ZMM3,%ZMM6 |
0xbc84f8 VFNMADD231PS %ZMM7,%ZMM1,%ZMM6 |
0xbc84fe VMULPS %ZMM5,%ZMM1,%ZMM8 |
0xbc8504 VFNMADD231PS %ZMM4,%ZMM2,%ZMM8 |
0xbc850a VMULPS %ZMM7,%ZMM2,%ZMM9 |
0xbc8510 VFNMADD231PS %ZMM5,%ZMM3,%ZMM9 |
0xbc8516 VMULPS %ZMM10,%ZMM7,%ZMM11 |
0xbc851c VFNMADD231PS %ZMM15,%ZMM4,%ZMM11 |
0xbc8522 VMULPS %ZMM14,%ZMM4,%ZMM12 |
0xbc8528 VFNMADD231PS %ZMM10,%ZMM5,%ZMM12 |
0xbc852e VMULPS %ZMM15,%ZMM5,%ZMM13 |
0xbc8534 VFNMADD231PS %ZMM14,%ZMM7,%ZMM13 |
0xbc853a VMULPS %ZMM13,%ZMM8,%ZMM16 |
0xbc8540 VFNMADD231PS %ZMM12,%ZMM9,%ZMM16 |
0xbc8546 VMULPS %ZMM11,%ZMM9,%ZMM17 |
0xbc854c VFNMADD231PS %ZMM13,%ZMM6,%ZMM17 |
0xbc8552 VMULPS %ZMM12,%ZMM6,%ZMM18 |
0xbc8558 VFNMADD231PS %ZMM11,%ZMM8,%ZMM18 |
0xbc855e VMULPS %ZMM16,%ZMM16,%ZMM16 |
0xbc8564 VMULPS %ZMM17,%ZMM17,%ZMM17 |
0xbc856a VADDPS %ZMM16,%ZMM17,%ZMM16 |
0xbc8570 VMULPS %ZMM18,%ZMM18,%ZMM17 |
0xbc8576 VADDPS %ZMM16,%ZMM17,%ZMM16 |
0xbc857c VXORPS %XMM30,%XMM30,%XMM30 |
0xbc8582 VCMPPS $0x1,%ZMM16,%ZMM30,%K1 |
0xbc8589 VRSQRT14PS %ZMM16,%ZMM17{%K1}{z} |
0xbc858f VMULPS %ZMM16,%ZMM17,%ZMM18 |
0xbc8595 VMOVAPS 0xb80(%RSP),%ZMM25 |
0xbc859d VMULPS %ZMM25,%ZMM17,%ZMM19 |
0xbc85a3 VMOVAPS 0xb40(%RSP),%ZMM26 |
0xbc85ab VFMADD213PS %ZMM26,%ZMM17,%ZMM18 |
0xbc85b1 VMULPS %ZMM18,%ZMM19,%ZMM17 |
0xbc85b7 VMULPS %ZMM17,%ZMM16,%ZMM16 |
0xbc85bd VMULPS %ZMM11,%ZMM6,%ZMM17 |
0xbc85c3 VMULPS %ZMM12,%ZMM8,%ZMM18 |
0xbc85c9 VADDPS %ZMM18,%ZMM17,%ZMM17 |
0xbc85cf VMULPS %ZMM13,%ZMM9,%ZMM18 |
0xbc85d5 VADDPS %ZMM17,%ZMM18,%ZMM17 |
0xbc85db VCMPPS $0xc,%ZMM30,%ZMM17,%K1 |
0xbc85e2 VCMPPS $0xc,%ZMM30,%ZMM16,%K2 |
0xbc85e9 VCMPPS $0x1,%ZMM30,%ZMM17,%K3 |
0xbc85f0 VMOVAPS 0xb00(%RSP),%ZMM23 |
0xbc85f8 VBLENDMPS %ZMM30,%ZMM23,%ZMM18{%K1} |
0xbc85fe VMOVAPS %ZMM18,%ZMM18{%K2}{z} |
0xbc8604 VBROADCASTSS -0x86c4ae(%RIP),%ZMM18{%K3} |
0xbc860e VCMPPS $0x1,%ZMM30,%ZMM16,%K3 |
0xbc8615 VRCP14PS %ZMM17,%ZMM19{%K1}{z} |
0xbc861b VMOVAPS 0xa80(%RSP),%ZMM27 |
0xbc8623 VFNMADD213PS %ZMM27,%ZMM19,%ZMM17 |
0xbc8629 VMULPS %ZMM17,%ZMM19,%ZMM17 |
0xbc862f VMULPS %ZMM16,%ZMM17,%ZMM16 |
0xbc8635 VANDPS 0xa40(%RSP),%ZMM16,%ZMM17 |
0xbc863d VMOVAPS 0xa00(%RSP),%ZMM28 |
0xbc8645 VCMPPS $0x1,%ZMM17,%ZMM28,%K2 |
0xbc864c VRCP14PS %ZMM17,%ZMM19{%K2}{z} |
0xbc8652 VMOVAPS 0xac0(%RSP),%ZMM24 |
0xbc865a VORPS %ZMM24,%ZMM18,%ZMM18{%K3} |
0xbc8660 VCMPPS $0x1,%ZMM30,%ZMM16,%K1 |
0xbc8667 VMOVAPS %ZMM17,%ZMM16 |
0xbc866d VFNMADD213PS %ZMM27,%ZMM19,%ZMM16 |
0xbc8673 VMULPS %ZMM16,%ZMM19,%ZMM17{%K2} |
0xbc8679 VMULPS %ZMM17,%ZMM17,%ZMM16 |
0xbc867f VMULPS %ZMM16,%ZMM17,%ZMM19 |
0xbc8685 VMULPS %ZMM16,%ZMM16,%ZMM20 |
0xbc868b VMOVAPS 0x9c0(%RSP),%ZMM21 |
0xbc8693 VFMADD213PS 0x980(%RSP),%ZMM20,%ZMM21 |
0xbc869b VMOVAPS 0x940(%RSP),%ZMM22 |
0xbc86a3 VFMADD213PS 0x900(%RSP),%ZMM20,%ZMM22 |
0xbc86ab VFMADD213PS 0x8c0(%RSP),%ZMM20,%ZMM21 |
0xbc86b3 VFMADD213PS 0x880(%RSP),%ZMM20,%ZMM22 |
0xbc86bb VFMADD213PS 0x840(%RSP),%ZMM20,%ZMM21 |
0xbc86c3 VFMADD213PS 0x800(%RSP),%ZMM20,%ZMM22 |
0xbc86cb VFMADD231PS %ZMM21,%ZMM16,%ZMM22 |
0xbc86d1 VFMADD213PS %ZMM17,%ZMM19,%ZMM22 |
0xbc86d7 VSUBPS %ZMM22,%ZMM23,%ZMM22{%K2} |
0xbc86dd VXORPS %ZMM24,%ZMM22,%ZMM22{%K1} |
0xbc86e3 VADDPS %ZMM22,%ZMM18,%ZMM16 |
0xbc86e9 VMULPS %ZMM11,%ZMM2,%ZMM17 |
0xbc86ef VMULPS %ZMM12,%ZMM3,%ZMM18 |
0xbc86f5 VADDPS %ZMM18,%ZMM17,%ZMM17 |
0xbc86fb VMULPS %ZMM13,%ZMM1,%ZMM18 |
0xbc8701 VADDPS %ZMM17,%ZMM18,%ZMM18 |
0xbc8707 VPTERNLOGD $-0x1c,0x740(%RSP),%ZMM16,%ZMM18 |
0xbc8710 VMULPS %ZMM6,%ZMM6,%ZMM16 |
0xbc8716 VMULPS %ZMM8,%ZMM8,%ZMM17 |
0xbc871c VADDPS %ZMM17,%ZMM16,%ZMM16 |
0xbc8722 VMULPS %ZMM9,%ZMM9,%ZMM17 |
0xbc8728 VADDPS %ZMM16,%ZMM17,%ZMM16 |
0xbc872e VMULPS %ZMM11,%ZMM11,%ZMM17 |
0xbc8734 VMULPS %ZMM12,%ZMM12,%ZMM19 |
0xbc873a VADDPS %ZMM19,%ZMM17,%ZMM17 |
0xbc8740 VMULPS %ZMM13,%ZMM13,%ZMM19 |
0xbc8746 VADDPS %ZMM17,%ZMM19,%ZMM17 |
0xbc874c VADDPS 0x700(%RSP),%ZMM18,%ZMM18 |
0xbc8754 VMULPS 0x6c0(%RSP),%ZMM18,%ZMM19 |
0xbc875c VCVTPS2DQ %ZMM19,%ZMM20 |
0xbc8762 VRNDSCALEPS $0,%ZMM19,%ZMM19 |
0xbc8769 VPTESTNMD 0x680(%RSP),%ZMM20,%K1 |
0xbc8771 VPSUBD -0x844c7b(%RIP),%ZMM20,%ZMM21 |
0xbc877b VMOVDQA64 0x640(%RSP),%ZMM29 |
0xbc8783 VPTESTMD %ZMM29,%ZMM21,%K2 |
0xbc8789 VMOVAPS 0x600(%RSP),%ZMM31 |
0xbc8791 VMOVAPS %ZMM31,%ZMM21{%K2}{z} |
0xbc8797 VFMADD231PS 0x5c0(%RSP),%ZMM19,%ZMM18 |
0xbc879f VFMADD231PS 0x580(%RSP),%ZMM19,%ZMM18 |
0xbc87a7 VFMADD231PS 0x540(%RSP),%ZMM19,%ZMM18 |
0xbc87af VFMADD231PS 0x500(%RSP),%ZMM19,%ZMM18 |
0xbc87b7 VMULPS %ZMM18,%ZMM18,%ZMM19 |
0xbc87bd VMOVAPS 0x4c0(%RSP),%ZMM22 |
0xbc87c5 VFMADD213PS 0x480(%RSP),%ZMM19,%ZMM22 |
0xbc87cd VFMADD213PS 0x440(%RSP),%ZMM19,%ZMM22 |
0xbc87d5 VMULPS %ZMM19,%ZMM18,%ZMM23 |
0xbc87db VFMADD213PS %ZMM18,%ZMM22,%ZMM23 |
0xbc87e1 VMOVAPS 0x400(%RSP),%ZMM18 |
0xbc87e9 VFMADD213PS 0x3c0(%RSP),%ZMM19,%ZMM18 |
0xbc87f1 VFMADD213PS 0x380(%RSP),%ZMM19,%ZMM18 |
0xbc87f9 VFMADD213PS %ZMM25,%ZMM19,%ZMM18 |
0xbc87ff VFMADD213PS %ZMM28,%ZMM19,%ZMM18 |
0xbc8805 VBLENDMPS %ZMM18,%ZMM23,%ZMM19{%K1} |
0xbc880b VADDPS 0x180(%RSP),%ZMM30,%ZMM22 |
0xbc8813 VXORPS %ZMM21,%ZMM19,%ZMM19 |
0xbc8819 VMOVAPS 0x1c0(%RSP),%ZMM21 |
0xbc8821 VADDPS %ZMM21,%ZMM21,%ZMM21 |
0xbc8827 VFMADD213PS %ZMM22,%ZMM19,%ZMM21 |
0xbc882d VMULPS %ZMM19,%ZMM19,%ZMM22 |
0xbc8833 VMOVAPS 0x340(%RSP),%ZMM24 |
0xbc883b VMULPS 0x200(%RSP),%ZMM24,%ZMM24 |
0xbc8843 VFMADD213PS %ZMM21,%ZMM22,%ZMM24 |
0xbc8849 VMULPS %ZMM19,%ZMM22,%ZMM21 |
0xbc884f VMOVAPS 0x300(%RSP),%ZMM22 |
0xbc8857 VMULPS 0x240(%RSP),%ZMM22,%ZMM22 |
0xbc885f VFMADD213PS %ZMM24,%ZMM21,%ZMM22 |
0xbc8865 VMULPS %ZMM19,%ZMM21,%ZMM19 |
0xbc886b VMOVAPS 0x2c0(%RSP),%ZMM21 |
0xbc8873 VMULPS 0x280(%RSP),%ZMM21,%ZMM21 |
0xbc887b VFMADD213PS %ZMM22,%ZMM19,%ZMM21 |
0xbc8881 VMULPS %ZMM5,%ZMM5,%ZMM19 |
0xbc8887 VMULPS %ZMM7,%ZMM7,%ZMM22 |
0xbc888d VADDPS %ZMM19,%ZMM22,%ZMM19 |
0xbc8893 VMULPS %ZMM4,%ZMM4,%ZMM22 |
0xbc8899 VADDPS %ZMM19,%ZMM22,%ZMM19 |
0xbc889f VMAXPS 0x7c0(%RSP),%ZMM19,%ZMM19 |
0xbc88a7 VMULPS 0x780(%RSP),%ZMM19,%ZMM22 |
0xbc88af VMAXPS %ZMM22,%ZMM16,%ZMM16 |
0xbc88b5 VMAXPS %ZMM22,%ZMM17,%ZMM17 |
0xbc88bb VRSQRT14PS %ZMM19,%ZMM22 |
0xbc88c1 VMULPS %ZMM7,%ZMM3,%ZMM3 |
0xbc88c7 VMULPS %ZMM15,%ZMM7,%ZMM7 |
0xbc88cd VMULPS %ZMM22,%ZMM19,%ZMM15 |
0xbc88d3 VFMADD213PS %ZMM26,%ZMM22,%ZMM15 |
0xbc88d9 VMULPS %ZMM25,%ZMM22,%ZMM22 |
0xbc88df VMULPS %ZMM15,%ZMM22,%ZMM15 |
0xbc88e5 VMULPS %ZMM5,%ZMM2,%ZMM2 |
0xbc88eb VADDPS %ZMM2,%ZMM3,%ZMM2 |
0xbc88f1 VMULPS %ZMM4,%ZMM1,%ZMM1 |
0xbc88f7 VADDPS %ZMM2,%ZMM1,%ZMM1 |
0xbc88fd VRCP14PS %ZMM16,%ZMM2 |
0xbc8903 VMULPS %ZMM10,%ZMM4,%ZMM3 |
0xbc8909 VFNMADD213PS %ZMM27,%ZMM2,%ZMM16 |
0xbc890f VPTESTMD %ZMM29,%ZMM20,%K2 |
0xbc8915 VRCP14PS %ZMM17,%ZMM4 |
0xbc891b VMULPS %ZMM16,%ZMM2,%ZMM2 |
0xbc8921 VFNMADD213PS %ZMM27,%ZMM4,%ZMM17 |
0xbc8927 VMULPS %ZMM17,%ZMM4,%ZMM4 |
0xbc892d VMULPS %ZMM14,%ZMM5,%ZMM5 |
0xbc8933 VADDPS %ZMM5,%ZMM7,%ZMM5 |
0xbc8939 VADDPS %ZMM5,%ZMM3,%ZMM3 |
0xbc893f VMULPS %ZMM15,%ZMM19,%ZMM5 |
0xbc8945 VMULPS %ZMM2,%ZMM5,%ZMM2 |
0xbc894b VMULPS %ZMM4,%ZMM5,%ZMM7 |
0xbc8951 VMULPS %ZMM15,%ZMM15,%ZMM5 |
0xbc8957 VMULPS %ZMM5,%ZMM1,%ZMM4 |
0xbc895d VMULPS %ZMM5,%ZMM3,%ZMM5 |
0xbc8963 VMOVAPS %ZMM23,%ZMM18{%K1} |
0xbc8969 VMOVAPS %ZMM31,%ZMM1{%K2}{z} |
0xbc896f VXORPS %ZMM1,%ZMM18,%ZMM1 |
0xbc8975 VMULPS %ZMM1,%ZMM21,%ZMM1 |
0xbc897b VMULPS %ZMM1,%ZMM2,%ZMM2 |
0xbc8981 VMULPS %ZMM1,%ZMM7,%ZMM1 |
0xbc8987 VMULPS %ZMM2,%ZMM6,%ZMM3 |
0xbc898d VMULPS %ZMM2,%ZMM8,%ZMM6 |
0xbc8993 VMULPS %ZMM2,%ZMM9,%ZMM2 |
0xbc8999 VMULPS %ZMM1,%ZMM11,%ZMM7 |
0xbc899f VMULPS %ZMM1,%ZMM12,%ZMM8 |
0xbc89a5 VMULPS %ZMM1,%ZMM13,%ZMM1 |
0xbc89ab VMOVAPS %ZMM3,0xf80(%RSP) |
0xbc89b3 VMOVAPS %ZMM2,0xf00(%RSP) |
0xbc89bb VMOVAPS %ZMM6,0xf40(%RSP) |
0xbc89c3 LEA 0xf00(%RSP),%RAX |
0xbc89cb MOV %RAX,0x20(%RSP) |
0xbc89d0 VMOVAPS %ZMM7,0xec0(%RSP) |
0xbc89d8 LEA 0xec0(%RSP),%RAX |
0xbc89e0 MOV %RAX,0x28(%RSP) |
0xbc89e5 VMOVAPS %ZMM8,0xe80(%RSP) |
0xbc89ed LEA 0xe80(%RSP),%RAX |
0xbc89f5 MOV %RAX,0x30(%RSP) |
0xbc89fa VMOVAPS %ZMM1,0xe40(%RSP) |
0xbc8a02 LEA 0xe40(%RSP),%RAX |
0xbc8a0a MOV %RAX,0x38(%RSP) |
0xbc8a0f MOV 0x78(%RSP),%RAX |
0xbc8a14 MOV %RAX,0x40(%RSP) |
0xbc8a19 LEA 0xf40(%RSP),%RAX |
0xbc8a21 MOV %RAX,0x18(%RSP) |
0xbc8a26 LEA 0xf80(%RSP),%RAX |
0xbc8a2e MOV %RAX,0x10(%RSP) |
0xbc8a33 LEA 0x100(%RSP),%RDX |
0xbc8a3b LEA 0xc0(%RSP),%R8 |
0xbc8a43 LEA 0x80(%RSP),%R9 |
0xbc8a4b CALL bde790 <_ZN12_GLOBAL__N_124do_dih_fup_noshiftf_simdEPKiS1_S1_S1_N3gmx9SimdFloatES3_S3_S3_S3_S3_S3_S3_PA4_f@@608> |
0xbc8a50 ADD $0x50,%R14 |
0xbc8a54 CMP %RBX,%R14 |
0xbc8a57 JAE bc8b74 |
0xbc8a5d MOV %R14,%RAX |
0xbc8a60 XOR %ECX,%ECX |
0xbc8a62 MOV %R14D,%EDX |
0xbc8a65 JMP bc8ab9 |
(16804) 0xbc8a70 MOVL $0,0x180(%RSP,%RCX,1) |
(16804) 0xbc8a7b MOVL $0,0x1c0(%RSP,%RCX,1) |
(16804) 0xbc8a86 MOVL $0,0x200(%RSP,%RCX,1) |
(16804) 0xbc8a91 MOVL $0,0x240(%RSP,%RCX,1) |
(16804) 0xbc8a9c MOVL $0,0x280(%RSP,%RCX,1) |
(16804) 0xbc8aa7 ADD $0x4,%RCX |
(16804) 0xbc8aab ADD $0x5,%RAX |
(16804) 0xbc8aaf CMP $0x40,%RCX |
(16804) 0xbc8ab3 JE bc8230 |
(16804) 0xbc8ab9 MOVSXD %EDX,%RDI |
(16804) 0xbc8abc MOVSXD (%R12,%RDI,4),%RSI |
(16804) 0xbc8ac0 MOV 0x4(%R12,%RDI,4),%R8D |
(16804) 0xbc8ac5 MOV %R8D,0xfc0(%RSP,%RCX,1) |
(16804) 0xbc8acd MOV 0x8(%R12,%RDI,4),%R8D |
(16804) 0xbc8ad2 MOV %R8D,0x100(%RSP,%RCX,1) |
(16804) 0xbc8ada MOV 0xc(%R12,%RDI,4),%R8D |
(16804) 0xbc8adf MOV %R8D,0xc0(%RSP,%RCX,1) |
(16804) 0xbc8ae7 MOV 0x10(%R12,%RDI,4),%EDI |
(16804) 0xbc8aec MOV %EDI,0x80(%RSP,%RCX,1) |
(16804) 0xbc8af3 CMP %RBX,%RAX |
(16804) 0xbc8af6 JAE bc8a70 |
(16804) 0xbc8afc LEA (%RSI,%RSI,2),%RSI |
(16804) 0xbc8b00 SAL $0x4,%RSI |
(16804) 0xbc8b04 VMOVSS 0x4(%R15,%RSI,1),%XMM0 |
(16804) 0xbc8b0b VMOVSS %XMM0,0x180(%RSP,%RCX,1) |
(16804) 0xbc8b14 VMOVSS 0x8(%R15,%RSI,1),%XMM0 |
(16804) 0xbc8b1b VMOVSS %XMM0,0x1c0(%RSP,%RCX,1) |
(16804) 0xbc8b24 VMOVSS 0xc(%R15,%RSI,1),%XMM0 |
(16804) 0xbc8b2b VMOVSS %XMM0,0x200(%RSP,%RCX,1) |
(16804) 0xbc8b34 VMOVSS 0x10(%R15,%RSI,1),%XMM0 |
(16804) 0xbc8b3b VMOVSS %XMM0,0x240(%RSP,%RCX,1) |
(16804) 0xbc8b44 VMOVD 0x14(%R15,%RSI,1),%XMM0 |
(16804) 0xbc8b4b VMOVD %XMM0,0x280(%RSP,%RCX,1) |
(16804) 0xbc8b54 LEA 0x5(%RDX),%ESI |
(16804) 0xbc8b57 CMP %R13D,%ESI |
(16804) 0xbc8b5a CMOVL %ESI,%EDX |
(16804) 0xbc8b5d ADD $0x4,%RCX |
(16804) 0xbc8b61 ADD $0x5,%RAX |
(16804) 0xbc8b65 CMP $0x40,%RCX |
(16804) 0xbc8b69 JNE bc8ab9 |
0xbc8b6f JMP bc8230 |
/home/eoseret/gromacs-2024.2/src/gromacs/listed_forces/bonded.cpp: 2179 - 2256 |
-------------------------------------------------------------------------------- |
2179: for (i = 0; (i < nbonds); i += GMX_SIMD_REAL_WIDTH * nfa1) |
[...] |
2185: for (s = 0; s < GMX_SIMD_REAL_WIDTH; s++) |
2186: { |
2187: type = forceatoms[iu]; |
2188: ai[s] = forceatoms[iu + 1]; |
2189: aj[s] = forceatoms[iu + 2]; |
2190: ak[s] = forceatoms[iu + 3]; |
2191: al[s] = forceatoms[iu + 4]; |
2192: |
2193: /* At the end fill the arrays with the last atoms and 0 params */ |
2194: if (i + s * nfa1 < nbonds) |
[...] |
2201: parm[j * GMX_SIMD_REAL_WIDTH + s] = forceparams[type].rbdihs.rbcA[j]; |
2202: } |
2203: |
2204: if (iu + nfa1 < nbonds) |
[...] |
2213: parm[j * GMX_SIMD_REAL_WIDTH + s] = 0; |
[...] |
2219: dih_angle_simd( |
[...] |
2232: parm_S = load<SimdReal>(parm + j * GMX_SIMD_REAL_WIDTH); |
[...] |
2256: do_dih_fup_noshiftf_simd(ai, aj, ak, al, p_S, q_S, mx_S, my_S, mz_S, nx_S, ny_S, nz_S, f); |
/home/eoseret/gromacs-2024.2/src/gromacs/simd/include/gromacs/simd/impl_x86_avx_512/impl_x86_avx_512_util_float.h: 113 - 113 |
-------------------------------------------------------------------------------- |
113: v->simdInternal_ = _mm512_i32gather_ps(offset.simdInternal_, base, sizeof(float) * align_); |
/home/eoseret/gromacs-2024.2/src/gromacs/simd/include/gromacs/simd/impl_x86_avx_512/impl_x86_avx_512_simd_float.h: 175 - 501 |
-------------------------------------------------------------------------------- |
175: return { _mm512_castsi512_ps(_mm512_xor_epi32(_mm512_castps_si512(a.simdInternal_), |
[...] |
181: return { _mm512_add_ps(a.simdInternal_, b.simdInternal_) }; |
182: } |
183: |
184: static inline SimdFloat gmx_simdcall operator-(SimdFloat a, SimdFloat b) |
185: { |
186: return { _mm512_sub_ps(a.simdInternal_, b.simdInternal_) }; |
[...] |
197: return { _mm512_mul_ps(a.simdInternal_, b.simdInternal_) }; |
198: } |
199: |
200: static inline SimdFloat gmx_simdcall fma(SimdFloat a, SimdFloat b, SimdFloat c) |
201: { |
202: return { _mm512_fmadd_ps(a.simdInternal_, b.simdInternal_, c.simdInternal_) }; |
203: } |
204: |
205: static inline SimdFloat gmx_simdcall fms(SimdFloat a, SimdFloat b, SimdFloat c) |
206: { |
207: return { _mm512_fmsub_ps(a.simdInternal_, b.simdInternal_, c.simdInternal_) }; |
208: } |
209: |
210: static inline SimdFloat gmx_simdcall fnma(SimdFloat a, SimdFloat b, SimdFloat c) |
211: { |
212: return { _mm512_fnmadd_ps(a.simdInternal_, b.simdInternal_, c.simdInternal_) }; |
[...] |
224: return { _mm512_rsqrt14_ps(x.simdInternal_) }; |
225: } |
226: |
227: static inline SimdFloat gmx_simdcall rcp(SimdFloat x) |
228: { |
229: return { _mm512_rcp14_ps(x.simdInternal_) }; |
[...] |
252: return { _mm512_maskz_rsqrt14_ps(m.simdInternal_, x.simdInternal_) }; |
253: } |
254: |
255: static inline SimdFloat gmx_simdcall maskzRcp(SimdFloat x, SimdFBool m) |
256: { |
257: return { _mm512_maskz_rcp14_ps(m.simdInternal_, x.simdInternal_) }; |
[...] |
263: return { _mm512_castsi512_ps(_mm512_andnot_epi32(_mm512_castps_si512(_mm512_set1_ps(GMX_FLOAT_NEGZERO)), |
[...] |
269: return { _mm512_max_ps(a.simdInternal_, b.simdInternal_) }; |
[...] |
279: return { _mm512_roundscale_ps(x.simdInternal_, 0) }; |
[...] |
362: return { _mm512_cmp_ps_mask(a.simdInternal_, b.simdInternal_, _CMP_NEQ_OQ) }; |
363: } |
364: |
365: static inline SimdFBool gmx_simdcall operator<(SimdFloat a, SimdFloat b) |
366: { |
367: return { _mm512_cmp_ps_mask(a.simdInternal_, b.simdInternal_, _CMP_LT_OQ) }; |
[...] |
398: return { _mm512_mask_mov_ps(_mm512_setzero_ps(), m.simdInternal_, a.simdInternal_) }; |
399: } |
400: |
401: static inline SimdFloat gmx_simdcall selectByNotMask(SimdFloat a, SimdFBool m) |
402: { |
403: return { _mm512_mask_mov_ps(a.simdInternal_, m.simdInternal_, _mm512_setzero_ps()) }; |
404: } |
405: |
406: static inline SimdFloat gmx_simdcall blend(SimdFloat a, SimdFloat b, SimdFBool sel) |
407: { |
408: return { _mm512_mask_blend_ps(sel.simdInternal_, a.simdInternal_, b.simdInternal_) }; |
409: } |
410: |
411: static inline SimdFloat gmx_simdcall copysign(SimdFloat a, SimdFloat b) |
412: { |
413: return { _mm512_castsi512_ps(_mm512_ternarylogic_epi32(_mm512_castps_si512(a.simdInternal_), |
[...] |
441: return { _mm512_add_epi32(a.simdInternal_, b.simdInternal_) }; |
[...] |
451: return { _mm512_mullo_epi32(a.simdInternal_, b.simdInternal_) }; |
452: } |
453: |
454: static inline SimdFIBool gmx_simdcall operator==(SimdFInt32 a, SimdFInt32 b) |
455: { |
456: return { _mm512_cmp_epi32_mask(a.simdInternal_, b.simdInternal_, _MM_CMPINT_EQ) }; |
[...] |
501: return { _mm512_cvtps_epi32(a.simdInternal_) }; |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►81.30+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►13.82+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
►4.88+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►79.25+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►20.75+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►94.34+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►5.66+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►84.87+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 | |
►10.08+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
►5.04+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | calculateSimpleBond(int, int, [...] | bonded.cpp:4143 | libgromacs_mpi.so.9.0.0 |
○ | (anonymous namespace)::calc_on[...] | listed_forces.cpp:356 | libgromacs_mpi.so.9.0.0 |
○ | .omp_outlined.#0xbe5d40 | listed_forces.cpp:428 | libgromacs_mpi.so.9.0.0 |
○ | __kmp_invoke_microtask | libomp.so | |
○ | __kmp_fork_call | libomp.so | |
○ | __kmpc_fork_call | libomp.so | |
○ | ListedForces::calculate(gmx_wa[...] | listed_forces.cpp:387 | libgromacs_mpi.so.9.0.0 |
○ | do_force(_IO_FILE*, t_commrec [...] | sim_util.cpp:2047 | libgromacs_mpi.so.9.0.0 |
○ | gmx::LegacySimulator::do_md() | md.cpp:1248 | libgromacs_mpi.so.9.0.0 |
○ | gmx::Mdrunner::mdrunner() | runner.cpp:2311 | libgromacs_mpi.so.9.0.0 |
○ | gmx::gmx_mdrun(int, gmx_hw_inf[...] | mdrun.cpp:280 | gmx_mpi |
○ | gmx::gmx_mdrun(int, char**) | mdrun.cpp:82 | gmx_mpi |
○ | gmx::CommandLineModuleManager:[...] | cmdlinemodulemanager.cpp:569 | libgromacs_mpi.so.9.0.0 |
○ | main | gmx.cpp:58 | gmx_mpi |
○ | __libc_start_call_main | libc.so.6 |
Path / |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.03 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.02 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.08 |
Bottlenecks | |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::rbdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source | bonded.cpp:2179-2179,bonded.cpp:2185-2185,bonded.cpp:2219-2219,bonded.cpp:2232-2232,bonded.cpp:2256-2256,impl_x86_avx_512_util_float.h:113-113,impl_x86_avx_512_simd_float.h:175-175,impl_x86_avx_512_simd_float.h:181-181,impl_x86_avx_512_simd_float.h:186-186,impl_x86_avx_512_simd_float.h:197-197,impl_x86_avx_512_simd_float.h:202-202,impl_x86_avx_512_simd_float.h:207-207,impl_x86_avx_512_simd_float.h:212-212,impl_x86_avx_512_simd_float.h:224-224,impl_x86_avx_512_simd_float.h:229-229,impl_x86_avx_512_simd_float.h:252-252,impl_x86_avx_512_simd_float.h:257-257,impl_x86_avx_512_simd_float.h:263-263,impl_x86_avx_512_simd_float.h:269-269,impl_x86_avx_512_simd_float.h:279-279,impl_x86_avx_512_simd_float.h:362-362,impl_x86_avx_512_simd_float.h:367-367,impl_x86_avx_512_simd_float.h:398-398,impl_x86_avx_512_simd_float.h:403-403,impl_x86_avx_512_simd_float.h:408-408,impl_x86_avx_512_simd_float.h:413-413,impl_x86_avx_512_simd_float.h:441-441,impl_x86_avx_512_simd_float.h:451-451,impl_x86_avx_512_simd_float.h:456-456,impl_x86_avx_512_simd_float.h:501-501 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 216.92 |
CQA cycles if no scalar integer | 210.33 |
CQA cycles if FP arith vectorized | 216.92 |
CQA cycles if fully vectorized | 212.32 |
Front-end cycles | 216.92 |
P0 cycles | 3.25 |
P1 cycles | 3.25 |
P2 cycles | 3.13 |
P3 cycles | 3.13 |
P4 cycles | 1.75 |
P5 cycles | 43.67 |
P6 cycles | 43.67 |
P7 cycles | 43.67 |
P8 cycles | 200.50 |
P9 cycles | 190.42 |
P10 cycles | 150.08 |
P11 cycles | 151.00 |
P12 cycles | 114.00 |
P13 cycles | 114.00 |
DIV/SQRT cycles | 0.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | NA |
Stall cycles (UFS) | NA |
Nb insns | 333.50 |
Nb uops | 1301.50 |
Nb loads | 73.00 |
Nb stores | 13.00 |
Nb stack references | 72.00 |
FLOP/cycle | 16.97 |
Nb FLOP add-sub | 768.00 |
Nb FLOP mul | 1600.00 |
Nb FLOP fma | 608.00 |
Nb FLOP div | 0.00 |
Nb FLOP rcp | 64.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 32.00 |
Bytes/cycle | 22.26 |
Bytes prefetched | 0.00 |
Bytes loaded | 4388.00 |
Bytes stored | 440.00 |
Stride 0 | 3.00 |
Stride 1 | 0.00 |
Stride n | 2.00 |
Stride unknown | 2.50 |
Stride indirect | 0.00 |
Vectorization ratio all | 97.33 |
Vectorization ratio load | 98.55 |
Vectorization ratio store | 46.15 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.41 |
Vector-efficiency ratio all | 94.40 |
Vector-efficiency ratio load | 98.64 |
Vector-efficiency ratio store | 52.88 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 100.00 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 83.04 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.03 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.02 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.08 |
Bottlenecks | micro-operation queue, |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::rbdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source | bonded.cpp:2179-2179,bonded.cpp:2185-2185,bonded.cpp:2219-2219,bonded.cpp:2232-2232,bonded.cpp:2256-2256,impl_x86_avx_512_util_float.h:113-113,impl_x86_avx_512_simd_float.h:175-175,impl_x86_avx_512_simd_float.h:181-181,impl_x86_avx_512_simd_float.h:186-186,impl_x86_avx_512_simd_float.h:197-197,impl_x86_avx_512_simd_float.h:202-202,impl_x86_avx_512_simd_float.h:207-207,impl_x86_avx_512_simd_float.h:212-212,impl_x86_avx_512_simd_float.h:224-224,impl_x86_avx_512_simd_float.h:229-229,impl_x86_avx_512_simd_float.h:252-252,impl_x86_avx_512_simd_float.h:257-257,impl_x86_avx_512_simd_float.h:263-263,impl_x86_avx_512_simd_float.h:269-269,impl_x86_avx_512_simd_float.h:279-279,impl_x86_avx_512_simd_float.h:362-362,impl_x86_avx_512_simd_float.h:367-367,impl_x86_avx_512_simd_float.h:398-398,impl_x86_avx_512_simd_float.h:403-403,impl_x86_avx_512_simd_float.h:408-408,impl_x86_avx_512_simd_float.h:413-413,impl_x86_avx_512_simd_float.h:441-441,impl_x86_avx_512_simd_float.h:451-451,impl_x86_avx_512_simd_float.h:456-456,impl_x86_avx_512_simd_float.h:501-501 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 217.00 |
CQA cycles if no scalar integer | 210.33 |
CQA cycles if FP arith vectorized | 217.00 |
CQA cycles if fully vectorized | 212.41 |
Front-end cycles | 217.00 |
P0 cycles | 3.25 |
P1 cycles | 3.25 |
P2 cycles | 3.25 |
P3 cycles | 3.25 |
P4 cycles | 2.00 |
P5 cycles | 43.67 |
P6 cycles | 43.67 |
P7 cycles | 43.67 |
P8 cycles | 200.50 |
P9 cycles | 190.42 |
P10 cycles | 150.08 |
P11 cycles | 151.00 |
P12 cycles | 114.00 |
P13 cycles | 114.00 |
DIV/SQRT cycles | 0.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | NA |
Stall cycles (UFS) | NA |
Nb insns | 334.00 |
Nb uops | 1302.00 |
Nb loads | 73.00 |
Nb stores | 13.00 |
Nb stack references | 72.00 |
FLOP/cycle | 16.96 |
Nb FLOP add-sub | 768.00 |
Nb FLOP mul | 1600.00 |
Nb FLOP fma | 608.00 |
Nb FLOP div | 0.00 |
Nb FLOP rcp | 64.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 32.00 |
Bytes/cycle | 22.25 |
Bytes prefetched | 0.00 |
Bytes loaded | 4388.00 |
Bytes stored | 440.00 |
Stride 0 | 3.00 |
Stride 1 | 0.00 |
Stride n | 2.00 |
Stride unknown | 3.00 |
Stride indirect | 0.00 |
Vectorization ratio all | 97.33 |
Vectorization ratio load | 98.55 |
Vectorization ratio store | 46.15 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.41 |
Vector-efficiency ratio all | 94.40 |
Vector-efficiency ratio load | 98.64 |
Vector-efficiency ratio store | 52.88 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 100.00 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 83.04 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.03 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.02 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.08 |
Bottlenecks | micro-operation queue, |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::rbdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source | bonded.cpp:2179-2179,bonded.cpp:2185-2185,bonded.cpp:2219-2219,bonded.cpp:2232-2232,bonded.cpp:2256-2256,impl_x86_avx_512_util_float.h:113-113,impl_x86_avx_512_simd_float.h:175-175,impl_x86_avx_512_simd_float.h:181-181,impl_x86_avx_512_simd_float.h:186-186,impl_x86_avx_512_simd_float.h:197-197,impl_x86_avx_512_simd_float.h:202-202,impl_x86_avx_512_simd_float.h:207-207,impl_x86_avx_512_simd_float.h:212-212,impl_x86_avx_512_simd_float.h:224-224,impl_x86_avx_512_simd_float.h:229-229,impl_x86_avx_512_simd_float.h:252-252,impl_x86_avx_512_simd_float.h:257-257,impl_x86_avx_512_simd_float.h:263-263,impl_x86_avx_512_simd_float.h:269-269,impl_x86_avx_512_simd_float.h:279-279,impl_x86_avx_512_simd_float.h:362-362,impl_x86_avx_512_simd_float.h:367-367,impl_x86_avx_512_simd_float.h:398-398,impl_x86_avx_512_simd_float.h:403-403,impl_x86_avx_512_simd_float.h:408-408,impl_x86_avx_512_simd_float.h:413-413,impl_x86_avx_512_simd_float.h:441-441,impl_x86_avx_512_simd_float.h:451-451,impl_x86_avx_512_simd_float.h:456-456,impl_x86_avx_512_simd_float.h:501-501 |
Source loop unroll info | NA |
Source loop unroll confidence level | NA |
Unroll/vectorization loop type | NA |
Unroll factor | NA |
CQA cycles | 216.83 |
CQA cycles if no scalar integer | 210.33 |
CQA cycles if FP arith vectorized | 216.83 |
CQA cycles if fully vectorized | 212.23 |
Front-end cycles | 216.83 |
P0 cycles | 3.25 |
P1 cycles | 3.25 |
P2 cycles | 3.00 |
P3 cycles | 3.00 |
P4 cycles | 1.50 |
P5 cycles | 43.67 |
P6 cycles | 43.67 |
P7 cycles | 43.67 |
P8 cycles | 200.50 |
P9 cycles | 190.42 |
P10 cycles | 150.08 |
P11 cycles | 151.00 |
P12 cycles | 114.00 |
P13 cycles | 114.00 |
DIV/SQRT cycles | 0.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | NA |
Stall cycles (UFS) | NA |
Nb insns | 333.00 |
Nb uops | 1301.00 |
Nb loads | 73.00 |
Nb stores | 13.00 |
Nb stack references | 72.00 |
FLOP/cycle | 16.97 |
Nb FLOP add-sub | 768.00 |
Nb FLOP mul | 1600.00 |
Nb FLOP fma | 608.00 |
Nb FLOP div | 0.00 |
Nb FLOP rcp | 64.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 32.00 |
Bytes/cycle | 22.27 |
Bytes prefetched | 0.00 |
Bytes loaded | 4388.00 |
Bytes stored | 440.00 |
Stride 0 | 3.00 |
Stride 1 | 0.00 |
Stride n | 2.00 |
Stride unknown | 2.00 |
Stride indirect | 0.00 |
Vectorization ratio all | 97.33 |
Vectorization ratio load | 98.55 |
Vectorization ratio store | 46.15 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.41 |
Vector-efficiency ratio all | 94.40 |
Vector-efficiency ratio load | 98.64 |
Vector-efficiency ratio store | 52.88 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 100.00 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 83.04 |
Path / |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::rbdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source file and lines | bonded.cpp:2179-2256 |
Module | libgromacs_mpi.so.9.0.0 |
nb instructions | 333.50 |
nb uops | 1301.50 |
loop length | 2105.50 |
used x86 registers | 8 |
used mmx registers | 0 |
used xmm registers | 13 |
used ymm registers | 0 |
used zmm registers | 32 |
nb stack references | 72 |
ADD-SUB / MUL ratio | 0.48 |
micro-operation queue | 216.92 cycles |
front end | 216.92 cycles |
ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 3.25 | 3.25 | 3.13 | 3.13 | 1.75 | 25.00 | 25.00 | 25.00 | 126.50 | 126.42 | 126.58 | 126.50 | 114.00 | 114.00 |
cycles | 3.25 | 3.25 | 3.13 | 3.13 | 1.75 | 43.67 | 43.67 | 43.67 | 200.50 | 190.42 | 150.08 | 151.00 | 114.00 | 114.00 |
Cycles executing div or sqrt instructions | NA |
Longest recurrence chain latency (RecMII) | 1.00 |
Front-end | 216.92 |
Dispatch | 200.50 |
Data deps. | 1.00 |
Overall L1 | 216.92 |
all | 65% |
load | 100% |
store | 0% |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 99% |
load | 98% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 97% |
load | 98% |
store | 46% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 65% |
load | 100% |
store | 12% |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 85% |
all | 96% |
load | 98% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 82% |
all | 94% |
load | 98% |
store | 52% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 83% |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::rbdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source file and lines | bonded.cpp:2179-2256 |
Module | libgromacs_mpi.so.9.0.0 |
nb instructions | 334 |
nb uops | 1302 |
loop length | 2108 |
used x86 registers | 8 |
used mmx registers | 0 |
used xmm registers | 13 |
used ymm registers | 0 |
used zmm registers | 32 |
nb stack references | 72 |
ADD-SUB / MUL ratio | 0.48 |
micro-operation queue | 217.00 cycles |
front end | 217.00 cycles |
ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 3.25 | 3.25 | 3.25 | 3.25 | 2.00 | 25.00 | 25.00 | 25.00 | 126.50 | 126.42 | 126.58 | 126.50 | 114.00 | 114.00 |
cycles | 3.25 | 3.25 | 3.25 | 3.25 | 2.00 | 43.67 | 43.67 | 43.67 | 200.50 | 190.42 | 150.08 | 151.00 | 114.00 | 114.00 |
Cycles executing div or sqrt instructions | NA |
Longest recurrence chain latency (RecMII) | 1.00 |
Front-end | 217.00 |
Dispatch | 200.50 |
Data deps. | 1.00 |
Overall L1 | 217.00 |
all | 65% |
load | 100% |
store | 0% |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 99% |
load | 98% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 97% |
load | 98% |
store | 46% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 65% |
load | 100% |
store | 12% |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 85% |
all | 96% |
load | 98% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 82% |
all | 94% |
load | 98% |
store | 52% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 83% |
Instruction | Nb FU | ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | Latency | Recip. throughput | Vectorization |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVDQA64 0xfc0(%RSP),%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVDQA64 0xbc0(%RSP),%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPMULLD %ZMM11,%ZMM0,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
MOV 0x60(%RSP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM2{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
MOV 0x70(%RSP),%RCX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM3{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
MOV 0x68(%RSP),%RDX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM4{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0x100(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM5{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM6{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM7{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0xc0(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM8{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM9{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM10{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0x80(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VPXOR %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | vect (25.0%) |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM11{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM12{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM13,%XMM13,%XMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM13{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM2,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM4,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM8,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM9,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM10,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM12,%ZMM9,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM13,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xe00(%RSP),%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM3,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xdc0(%RSP),%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM7,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM1,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xd80(%RSP),%ZMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM7,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM1,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xd40(%RSP),%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM7,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM1,%ZMM3,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xd00(%RSP),%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM2,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xcc0(%RSP),%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM3,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM11,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xc80(%RSP),%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xc40(%RSP),%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM7,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM11,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM3,%ZMM2,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xc00(%RSP),%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM11,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM2,%ZMM7,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM6,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM13,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM4,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM14,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM4,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM15,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM4,%ZMM6,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM16,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM17,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM11,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM18,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM5,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM19,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM20,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM11,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM10,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM13,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM14,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM9,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM15,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM16,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM17,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM18,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM8,%ZMM19,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM11,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM9,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM20,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM8,%ZMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM3,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM7,%ZMM1,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM1,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM4,%ZMM2,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM2,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM5,%ZMM3,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM7,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM15,%ZMM4,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM4,%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM10,%ZMM5,%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM5,%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM14,%ZMM7,%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM8,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM12,%ZMM9,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM9,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM13,%ZMM6,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM6,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM11,%ZMM8,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM30,%XMM30,%XMM30 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VCMPPS $0x1,%ZMM16,%ZMM30,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRSQRT14PS %ZMM16,%ZMM17{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb80(%RSP),%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM25,%ZMM17,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb40(%RSP),%ZMM26 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM26,%ZMM17,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM6,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM8,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM9,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCMPPS $0xc,%ZMM30,%ZMM17,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0xc,%ZMM30,%ZMM16,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM30,%ZMM17,%K3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0xb00(%RSP),%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VBLENDMPS %ZMM30,%ZMM23,%ZMM18{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMOVAPS %ZMM18,%ZMM18{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VBROADCASTSS -0x86c4ae(%RIP),%ZMM18{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 5 | 1 | scal (6.3%) |
VCMPPS $0x1,%ZMM30,%ZMM16,%K3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM19{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0xa80(%RSP),%ZMM27 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VANDPS 0xa40(%RSP),%ZMM16,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMOVAPS 0xa00(%RSP),%ZMM28 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM17,%ZMM28,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM19{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0xac0(%RSP),%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VORPS %ZMM24,%ZMM18,%ZMM18{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VCMPPS $0x1,%ZMM30,%ZMM16,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS %ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM19,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM19,%ZMM17{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM16,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x9c0(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x980(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMOVAPS 0x940(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x900(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x8c0(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x880(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x840(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x800(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS %ZMM21,%ZMM16,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM17,%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VSUBPS %ZMM22,%ZMM23,%ZMM22{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %ZMM24,%ZMM22,%ZMM22{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VADDPS %ZMM22,%ZMM18,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM2,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM3,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM1,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM18,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTERNLOGD $-0x1c,0x740(%RSP),%ZMM16,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM6,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM8,%ZMM8,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM9,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM11,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM12,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM13,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS 0x700(%RSP),%ZMM18,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x6c0(%RSP),%ZMM18,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCVTPS2DQ %ZMM19,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTESTNMD 0x680(%RSP),%ZMM20,%K1 | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VPSUBD -0x844c7b(%RIP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMOVDQA64 0x640(%RSP),%ZMM29 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTESTMD %ZMM29,%ZMM21,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0x600(%RSP),%ZMM31 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS %ZMM31,%ZMM21{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VFMADD231PS 0x5c0(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x580(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x540(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x500(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM18,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x4c0(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x480(%RSP),%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x440(%RSP),%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM18,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM18,%ZMM22,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMOVAPS 0x400(%RSP),%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x3c0(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x380(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM25,%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM28,%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VBLENDMPS %ZMM18,%ZMM23,%ZMM19{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VADDPS 0x180(%RSP),%ZMM30,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %ZMM21,%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMOVAPS 0x1c0(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM21,%ZMM21,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM22,%ZMM19,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x340(%RSP),%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x200(%RSP),%ZMM24,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM21,%ZMM22,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM22,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x300(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x240(%RSP),%ZMM22,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM24,%ZMM21,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM21,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x2c0(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x280(%RSP),%ZMM21,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM22,%ZMM19,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM5,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM7,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM22,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM4,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM22,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMAXPS 0x7c0(%RSP),%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VMULPS 0x780(%RSP),%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMAXPS %ZMM22,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VMAXPS %ZMM22,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VRSQRT14PS %ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM22,%ZMM19,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM26,%ZMM22,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM25,%ZMM22,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM22,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRCP14PS %ZMM16,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM4,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM2,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VPTESTMD %ZMM29,%ZMM20,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM4,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM4,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM5,%ZMM7,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM5,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM19,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM5,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM5,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM15,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM1,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM3,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS %ZMM23,%ZMM18{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VMOVAPS %ZMM31,%ZMM1{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VXORPS %ZMM1,%ZMM18,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMULPS %ZMM1,%ZMM21,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM7,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM6,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM8,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM9,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM11,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM12,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM13,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS %ZMM3,0xf80(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
VMOVAPS %ZMM2,0xf00(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
VMOVAPS %ZMM6,0xf40(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xf00(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x20(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM7,0xec0(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xec0(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x28(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM8,0xe80(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xe80(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x30(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM1,0xe40(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xe40(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x38(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
MOV 0x78(%RSP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
MOV %RAX,0x40(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0xf40(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x18(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0xf80(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x10(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0x100(%RSP),%RDX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
LEA 0xc0(%RSP),%R8 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
LEA 0x80(%RSP),%R9 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
CALL bde790 <_ZN12_GLOBAL__N_124do_dih_fup_noshiftf_simdEPKiS1_S1_S1_N3gmx9SimdFloatES3_S3_S3_S3_S3_S3_S3_PA4_f@@608> | 2 | 0.50 | 0 | 0 | 0 | 0.50 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | N/A |
ADD $0x50,%R14 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
CMP %RBX,%R14 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
JAE bc8b74 <_ZN12_GLOBAL__N_16rbdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0xce4> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
MOV %R14,%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
XOR %ECX,%ECX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | N/A |
MOV %R14D,%EDX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
JMP bc8ab9 <_ZN12_GLOBAL__N_16rbdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0xc29> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
JMP bc8230 <_ZN12_GLOBAL__N_16rbdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0x3a0> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
Function | std::enable_if<((BondedKernelFlavor)0)==((BondedKernelFlavor)0), float>::type (anonymous namespace)::rbdihs<(BondedKernelFlavor)0>(int, int const*, t_iparams const*, float const (*) [3], float (*) [4], float (*) [3], t_pbc const*, float, float*, gmx::ArrayRef |
Source file and lines | bonded.cpp:2179-2256 |
Module | libgromacs_mpi.so.9.0.0 |
nb instructions | 333 |
nb uops | 1301 |
loop length | 2103 |
used x86 registers | 8 |
used mmx registers | 0 |
used xmm registers | 13 |
used ymm registers | 0 |
used zmm registers | 32 |
nb stack references | 72 |
ADD-SUB / MUL ratio | 0.48 |
micro-operation queue | 216.83 cycles |
front end | 216.83 cycles |
ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 3.25 | 3.25 | 3.00 | 3.00 | 1.50 | 25.00 | 25.00 | 25.00 | 126.50 | 126.42 | 126.58 | 126.50 | 114.00 | 114.00 |
cycles | 3.25 | 3.25 | 3.00 | 3.00 | 1.50 | 43.67 | 43.67 | 43.67 | 200.50 | 190.42 | 150.08 | 151.00 | 114.00 | 114.00 |
Cycles executing div or sqrt instructions | NA |
Longest recurrence chain latency (RecMII) | 1.00 |
Front-end | 216.83 |
Dispatch | 200.50 |
Data deps. | 1.00 |
Overall L1 | 216.83 |
all | 65% |
load | 100% |
store | 0% |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 99% |
load | 98% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 97% |
load | 98% |
store | 46% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 65% |
load | 100% |
store | 12% |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 85% |
all | 96% |
load | 98% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 82% |
all | 94% |
load | 98% |
store | 52% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 83% |
Instruction | Nb FU | ALU0/BRU0 | ALU1 | ALU2 | ALU3 | BRU1 | AGU0 | AGU1 | AGU2 | FP0 | FP1 | FP2 | FP3 | FP4 | FP5 | Latency | Recip. throughput | Vectorization |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VMOVDQA64 0xfc0(%RSP),%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVDQA64 0xbc0(%RSP),%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPMULLD %ZMM11,%ZMM0,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
MOV 0x60(%RSP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM2{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM3,%XMM3,%XMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
MOV 0x70(%RSP),%RCX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM3{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
MOV 0x68(%RSP),%RDX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM4{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0x100(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM5{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM6{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM7{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0xc0(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM8{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM9{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM10{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VPMULLD 0x80(%RSP),%ZMM11,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VPXOR %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | vect (25.0%) |
VGATHERDPS (%RAX,%ZMM1,4),%ZMM11{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RCX,%ZMM1,4),%ZMM12{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
KXNORW %K0,%K0,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 1 | 0.50 | N/A |
VXORPS %XMM13,%XMM13,%XMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VGATHERDPS (%RDX,%ZMM1,4),%ZMM13{%K1} | 81 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.50 | 6.67 | 7.17 | 5.67 | 9 | 9 | 0-21 | 16.56 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM2,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM4,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM8,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM9,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM10,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM12,%ZMM9,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM13,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xe00(%RSP),%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM3,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xdc0(%RSP),%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM7,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM1,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xd80(%RSP),%ZMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM7,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM1,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xd40(%RSP),%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM7,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM1,%ZMM3,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xd00(%RSP),%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM2,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xcc0(%RSP),%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM3,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM11,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xc80(%RSP),%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xc40(%RSP),%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM7,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM11,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM3,%ZMM2,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xc00(%RSP),%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM20,%ZMM11,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM2,%ZMM7,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM6,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM13,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM4,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM14,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM4,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM15,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM4,%ZMM6,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM16,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM17,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM7,%ZMM11,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM18,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM5,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM19,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM20,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM5,%ZMM11,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM10,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM13,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM14,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM9,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM15,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM10,%ZMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM16,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM17,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM11,%ZMM8,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM18,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM8,%ZMM19,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM11,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM9,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM20,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VSUBPS %ZMM6,%ZMM8,%ZMM14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM3,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM7,%ZMM1,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM1,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM4,%ZMM2,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM2,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM5,%ZMM3,%ZMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM7,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM15,%ZMM4,%ZMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM4,%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM10,%ZMM5,%ZMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM5,%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM14,%ZMM7,%ZMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM8,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM12,%ZMM9,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM9,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM13,%ZMM6,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM6,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD231PS %ZMM11,%ZMM8,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %XMM30,%XMM30,%XMM30 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (25.0%) |
VCMPPS $0x1,%ZMM16,%ZMM30,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRSQRT14PS %ZMM16,%ZMM17{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb80(%RSP),%ZMM25 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM25,%ZMM17,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0xb40(%RSP),%ZMM26 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM26,%ZMM17,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM6,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM8,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM9,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM18,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCMPPS $0xc,%ZMM30,%ZMM17,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0xc,%ZMM30,%ZMM16,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM30,%ZMM17,%K3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0xb00(%RSP),%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VBLENDMPS %ZMM30,%ZMM23,%ZMM18{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMOVAPS %ZMM18,%ZMM18{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VBROADCASTSS -0x86c4ae(%RIP),%ZMM18{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 5 | 1 | scal (6.3%) |
VCMPPS $0x1,%ZMM30,%ZMM16,%K3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM19{%K1}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0xa80(%RSP),%ZMM27 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VANDPS 0xa40(%RSP),%ZMM16,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMOVAPS 0xa00(%RSP),%ZMM28 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCMPPS $0x1,%ZMM17,%ZMM28,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM19{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0xac0(%RSP),%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VORPS %ZMM24,%ZMM18,%ZMM18{%K3} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VCMPPS $0x1,%ZMM30,%ZMM16,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS %ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM19,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM19,%ZMM17{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM17,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM16,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x9c0(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x980(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMOVAPS 0x940(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x900(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x8c0(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x880(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x840(%RSP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x800(%RSP),%ZMM20,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS %ZMM21,%ZMM16,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM17,%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VSUBPS %ZMM22,%ZMM23,%ZMM22{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %ZMM24,%ZMM22,%ZMM22{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VADDPS %ZMM22,%ZMM18,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM2,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM3,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM18,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM1,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM18,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTERNLOGD $-0x1c,0x740(%RSP),%ZMM16,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMULPS %ZMM6,%ZMM6,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM8,%ZMM8,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM9,%ZMM9,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM16,%ZMM17,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM11,%ZMM11,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM12,%ZMM12,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM13,%ZMM13,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM17,%ZMM19,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS 0x700(%RSP),%ZMM18,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x6c0(%RSP),%ZMM18,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VCVTPS2DQ %ZMM19,%ZMM20 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRNDSCALEPS $0,%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTESTNMD 0x680(%RSP),%ZMM20,%K1 | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VPSUBD -0x844c7b(%RIP),%ZMM20,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 1 | vect (100.0%) |
VMOVDQA64 0x640(%RSP),%ZMM29 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VPTESTMD %ZMM29,%ZMM21,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMOVAPS 0x600(%RSP),%ZMM31 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS %ZMM31,%ZMM21{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VFMADD231PS 0x5c0(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x580(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x540(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD231PS 0x500(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM18,%ZMM18,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x4c0(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x480(%RSP),%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x440(%RSP),%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM18,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM18,%ZMM22,%ZMM23 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMOVAPS 0x400(%RSP),%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS 0x3c0(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS 0x380(%RSP),%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM25,%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VFMADD213PS %ZMM28,%ZMM19,%ZMM18 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VBLENDMPS %ZMM18,%ZMM23,%ZMM19{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VADDPS 0x180(%RSP),%ZMM30,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VXORPS %ZMM21,%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMOVAPS 0x1c0(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM21,%ZMM21,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM22,%ZMM19,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x340(%RSP),%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x200(%RSP),%ZMM24,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM21,%ZMM22,%ZMM24 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM22,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x300(%RSP),%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x240(%RSP),%ZMM22,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM24,%ZMM21,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM19,%ZMM21,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS 0x2c0(%RSP),%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS 0x280(%RSP),%ZMM21,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM22,%ZMM19,%ZMM21 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM5,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM7,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM22,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM4,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM19,%ZMM22,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMAXPS 0x7c0(%RSP),%ZMM19,%ZMM19 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VMULPS 0x780(%RSP),%ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMAXPS %ZMM22,%ZMM16,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VMAXPS %ZMM22,%ZMM17,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 2 | 1 | vect (100.0%) |
VRSQRT14PS %ZMM19,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM7,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM7,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM22,%ZMM19,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFMADD213PS %ZMM26,%ZMM22,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM25,%ZMM22,%ZMM22 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM22,%ZMM15 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM3,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM2,%ZMM1,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VRCP14PS %ZMM16,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM10,%ZMM4,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM2,%ZMM16 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VPTESTMD %ZMM29,%ZMM20,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VRCP14PS %ZMM17,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 5 | 1 | vect (100.0%) |
VMULPS %ZMM16,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VFNMADD213PS %ZMM27,%ZMM4,%ZMM17 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 4 | 1 | vect (100.0%) |
VMULPS %ZMM17,%ZMM4,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM14,%ZMM5,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM5,%ZMM7,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VADDPS %ZMM5,%ZMM3,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM19,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM5,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM4,%ZMM5,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM15,%ZMM15,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM1,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM5,%ZMM3,%ZMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS %ZMM23,%ZMM18{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VMOVAPS %ZMM31,%ZMM1{%K2}{z} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | vect (100.0%) |
VXORPS %ZMM1,%ZMM18,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 1 | 0.50 | vect (100.0%) |
VMULPS %ZMM1,%ZMM21,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM2,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM7,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM6,%ZMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM8,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM2,%ZMM9,%ZMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM11,%ZMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM12,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMULPS %ZMM1,%ZMM13,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 3 | 1 | vect (100.0%) |
VMOVAPS %ZMM3,0xf80(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
VMOVAPS %ZMM2,0xf00(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
VMOVAPS %ZMM6,0xf40(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xf00(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x20(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM7,0xec0(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xec0(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x28(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM8,0xe80(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xe80(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x30(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
VMOVAPS %ZMM1,0xe40(%RSP) | 2 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 1 | 1 | 5 | 2 | vect (100.0%) |
LEA 0xe40(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x38(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
MOV 0x78(%RSP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.33 | N/A |
MOV %RAX,0x40(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0xf40(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x18(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0xf80(%RSP),%RAX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
MOV %RAX,0x10(%RSP) | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 | scal (12.5%) |
LEA 0x100(%RSP),%RDX | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
LEA 0xc0(%RSP),%R8 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
LEA 0x80(%RSP),%R9 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
CALL bde790 <_ZN12_GLOBAL__N_124do_dih_fup_noshiftf_simdEPKiS1_S1_S1_N3gmx9SimdFloatES3_S3_S3_S3_S3_S3_S3_PA4_f@@608> | 2 | 0.50 | 0 | 0 | 0 | 0.50 | 0.33 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 | N/A |
ADD $0x50,%R14 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
CMP %RBX,%R14 | 1 | 0.25 | 0.25 | 0.25 | 0.25 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.25 | N/A |
JAE bc8b74 <_ZN12_GLOBAL__N_16rbdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0xce4> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50-1 | N/A |
MOV %R14,%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
XOR %ECX,%ECX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.25 | N/A |
MOV %R14D,%EDX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 | N/A |
JMP bc8ab9 <_ZN12_GLOBAL__N_16rbdihsIL18BondedKernelFlavor0EEENSt9enable_ifIXeqT_LS1_0EEfE4typeEiPKiPK9t_iparamsPA3_KfPA4_fPA3_fPK5t_pbcfPfN3gmx8ArrayRefISA_EEP8t_fcdataP12t_disresdataP12t_oriresdataPi+0xc29> | 1 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | N/A |
Run 1x1 | Number processes: 1Number processes per node: 1OMP_NUM_THREADS: 1 |
---|---|
Run 2x1 | Number processes: 2Number processes per node: 2OMP_NUM_THREADS: 1 |
Run 4x1 | Number processes: 4Number processes per node: 4OMP_NUM_THREADS: 1 |
Run 8x1 | Number processes: 8Number processes per node: 8OMP_NUM_THREADS: 1 |
Run 16x1 | Number processes: 16Number processes per node: 16OMP_NUM_THREADS: 1 |
Run 32x1 | Number processes: 32Number processes per node: 32OMP_NUM_THREADS: 1 |
Run 64x1 | Number processes: 64Number processes per node: 64OMP_NUM_THREADS: 1 |
Run 128x1 | Number processes: 128Number processes per node: 128OMP_NUM_THREADS: 1 |
Run 192x1 | Number processes: 192Number nodes: 1Number processes per node: 192Run Command: <executable> mdrun -s ion_channel.tpr -nsteps 10000 -pin on -deffnm aoccMPI Command: mpirun -genv I_MPI_FABRICS=shm -n <number_processes>Dataset: Run Directory: .OMP_NUM_THREADS: 1 |
(1x1) Efficiency | (1x1) Potential Speed-Up (%) | (2x1) Efficiency | (2x1) Potential Speed-Up (%) | (4x1) Efficiency | (4x1) Potential Speed-Up (%) | (8x1) Efficiency | (8x1) Potential Speed-Up (%) | (16x1) Efficiency | (16x1) Potential Speed-Up (%) | (32x1) Efficiency | (32x1) Potential Speed-Up (%) | (64x1) Efficiency | (64x1) Potential Speed-Up (%) | (128x1) Efficiency | (128x1) Potential Speed-Up (%) | (192x1) Efficiency | (192x1) Potential Speed-Up (%) |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
1 | 0 | 0.99 | 0 | 0.95 | 0 | 0.99 | 0 | 0.87 | 0.01 | 1.15 | -0 | 1.15 | -0 | 1.01 | -0 | 0.88 | 0 |
Run | Number of threads | Efficiency (ideal is 1) | Speedup | Ideal Speedup | Time (s) | Coverage (%) |
---|---|---|---|---|---|---|
1x1 | 1 | 1 | 1 | 1 | 0.6100001335144 | 0.073176197707653 |
2x1 | 2 | 0.99 | 1.98 | 2 | 0.31000003218651 | 0.067840680480003 |
4x1 | 4 | 0.95 | 3.78 | 4 | 0.22000008821487 | 0.064766228199005 |
8x1 | 8 | 0.99 | 7.93 | 8 | 0.15000000596046 | 0.049703568220139 |
16x1 | 6 | 0.87 | 13.88 | 16 | 0.25000005960464 | 0.0531354136765 |
32x1 | 8 | 1.15 | 36.79 | 32 | 0.18000002205372 | 0.032264571636915 |
64x1 | 16 | 1.15 | 73.41 | 64 | 0.099999964237213 | 0.020907012745738 |
128x1 | 19 | 1.01 | 129.37 | 128 | 0.065000012516975 | 0.027744783088565 |
192x1 | 37 | 0.88 | 168.03 | 192 | 0.050000008195639 | 0.026951342821121 |