Loop Id: 110 | Module: exec | Source: timestep.c:88-94 | Coverage: 1.76% |
---|
Loop Id: 110 | Module: exec | Source: timestep.c:88-94 | Coverage: 1.76% |
---|
0x412fe0 VPMOVSXDQ 0x10(%R12,%RBX,4),%YMM2 [3] |
0x412fe7 KXNORW %K0,%K0,%K1 |
0x412feb VXORPD %XMM7,%XMM7,%XMM7 |
0x412fef VPADDQ %YMM2,%YMM2,%YMM2 |
0x412ff3 VGATHERQPD 0x8(%R9,%YMM2,8),%YMM7{%K1} [1] |
0x412ffb VPMOVSXDQ (%R12,%RBX,4),%YMM2 [3] |
0x413001 KXNORW %K0,%K0,%K1 |
0x413005 VXORPD %XMM8,%XMM8,%XMM8 |
0x41300a VPADDQ %YMM2,%YMM2,%YMM2 |
0x41300e VGATHERQPD 0x8(%R9,%YMM2,8),%YMM8{%K1} [4] |
0x413016 VDIVPD %YMM7,%YMM22,%YMM7 |
0x41301c VMOVUPD 0x80(%R11,%R13,1),%YMM9 [2] |
0x413026 VMOVUPD 0x20(%R11,%R13,1),%YMM10 [2] |
0x41302d VMOVUPD 0x10(%R11,%R13,1),%XMM13 [2] |
0x413034 VMOVUPD 0x20(%R11,%R13,1),%XMM14 [2] |
0x41303b VMOVUPD 0x70(%R11,%R13,1),%XMM15 [2] |
0x413042 VBROADCASTSD 0x50(%R11,%R13,1),%YMM11 [2] |
0x413049 VBROADCASTSD 0xb0(%R11,%R13,1),%YMM12 [2] |
0x413053 VMOVUPD 0x80(%R11,%R13,1),%XMM3 [2] |
0x41305d VMOVUPD 0x20(%R10,%R13,1),%YMM2 [5] |
0x413064 VMOVUPD 0x10(%R10,%R13,1),%XMM18 [5] |
0x41306c VMOVUPD 0x20(%R10,%R13,1),%XMM4 [5] |
0x413073 VINSERTF128 $0x1,0x40(%R11,%R13,1),%YMM13,%YMM13 [2] |
0x41307b VINSERTF128 $0x1,0xa0(%R11,%R13,1),%YMM15,%YMM5 [2] |
0x413086 VBLENDPD $0xc,0x40(%R11,%R13,1),%YMM14,%YMM14 [2] |
0x41308e VBLENDPD $0xc,0xa0(%R11,%R13,1),%YMM3,%YMM3 [2] |
0x413099 VINSERTF32X4 $0x1,0x40(%R10,%R13,1),%YMM18,%YMM6 [5] |
0x4130a2 VBLENDPD $0xc,0x40(%R10,%R13,1),%YMM4,%YMM4 [5] |
0x4130aa VBLENDPD $0x3,(%R11,%R13,1),%YMM10,%YMM15 [2] |
0x4130b1 VBLENDPD $0xa,%YMM14,%YMM13,%YMM14 |
0x4130b7 VBLENDPD $0xa,%YMM13,%YMM15,%YMM1 |
0x4130bd VSHUFPD $0x5,%YMM10,%YMM15,%YMM10 |
0x4130c3 VBLENDPD $0x3,0x60(%R11,%R13,1),%YMM9,%YMM13 [2] |
0x4130cb VBLENDPD $0xa,%YMM3,%YMM5,%YMM15 |
0x4130d1 VBLENDPD $0xa,%YMM5,%YMM13,%YMM3 |
0x4130d7 VSHUFPD $0x5,%YMM9,%YMM13,%YMM5 |
0x4130dd VBLENDPD $0x3,(%R10,%R13,1),%YMM2,%YMM0 [5] |
0x4130e4 VBLENDPD $0xa,%YMM4,%YMM6,%YMM9 |
0x4130ea VBLENDPD $0xa,%YMM6,%YMM0,%YMM13 |
0x4130f0 VSHUFPD $0x5,%YMM2,%YMM0,%YMM0 |
0x4130f5 VMOVUPD 0x80(%R10,%R13,1),%YMM2 [5] |
0x4130ff VMOVUPD 0x70(%R10,%R13,1),%XMM4 [5] |
0x413106 VBLENDPD $0x8,%YMM11,%YMM10,%YMM6 |
0x41310c VBROADCASTSD 0x50(%R10,%R13,1),%YMM10 [5] |
0x413113 VBLENDPD $0x8,%YMM12,%YMM5,%YMM5 |
0x413119 VBROADCASTSD 0xb0(%R10,%R13,1),%YMM12 [5] |
0x413123 VBLENDPD $0x8,%YMM10,%YMM0,%YMM11 |
0x413129 VBLENDPD $0x3,0x60(%R10,%R13,1),%YMM2,%YMM0 [5] |
0x413131 VSHUFPD $0x5,%YMM2,%YMM0,%YMM2 |
0x413136 VBLENDPD $0x8,%YMM12,%YMM2,%YMM2 |
0x41313c VINSERTF128 $0x1,0xa0(%R10,%R13,1),%YMM4,%YMM4 [5] |
0x413147 VBLENDPD $0xa,%YMM4,%YMM0,%YMM10 |
0x41314d VFMADD231PD %YMM3,%YMM7,%YMM10 |
0x413152 VDIVPD %YMM8,%YMM22,%YMM0 |
0x413158 VFMADD231PD %YMM1,%YMM0,%YMM13 |
0x41315d VFMADD231PD %YMM5,%YMM7,%YMM2 |
0x413162 VFMADD231PD %YMM6,%YMM0,%YMM11 |
0x413167 VMOVUPD 0x80(%R10,%R13,1),%XMM1 [5] |
0x413171 VBLENDPD $0xc,0xa0(%R10,%R13,1),%YMM1,%YMM1 [5] |
0x41317c VBLENDPD $0xa,%YMM1,%YMM4,%YMM8 |
0x413182 VFMADD231PD %YMM15,%YMM7,%YMM8 |
0x413187 VFMADD231PD %YMM14,%YMM0,%YMM9 |
0x41318c VMOVAPD %YMM13,%YMM0 |
0x413190 VPERMT2PD %YMM11,%YMM16,%YMM0 |
0x413196 VMOVAPD %YMM2,%YMM1 |
0x41319a VPERMT2PD %YMM10,%YMM17,%YMM1 |
0x4131a0 VMOVAPD %YMM2,%YMM3 |
0x4131a4 VPERMT2PD %YMM10,%YMM19,%YMM3 |
0x4131aa VPERMT2PD %YMM2,%YMM16,%YMM10 |
0x4131b0 VMOVAPD %YMM11,%YMM2 |
0x4131b4 VPERMT2PD %YMM13,%YMM17,%YMM2 |
0x4131ba VPERMT2PD %YMM13,%YMM19,%YMM11 |
0x4131c0 VPERMT2PD %YMM8,%YMM20,%YMM10 |
0x4131c6 VBLENDPD $0x2,%YMM8,%YMM3,%YMM3 |
0x4131cc VPERMT2PD %YMM1,%YMM21,%YMM8 |
0x4131d2 VBLENDPD $0x2,%YMM9,%YMM11,%YMM1 |
0x4131d8 VPERMT2PD %YMM9,%YMM20,%YMM0 |
0x4131de VPERMT2PD %YMM2,%YMM21,%YMM9 |
0x4131e4 VMOVUPD %YMM3,0x80(%R10,%R13,1) [5] |
0x4131ee VMOVUPD %YMM1,0x20(%R10,%R13,1) [5] |
0x4131f5 VMOVUPD %YMM9,0x40(%R10,%R13,1) [5] |
0x4131fc VMOVUPD %YMM8,0xa0(%R10,%R13,1) [5] |
0x413206 VMOVUPD %YMM10,0x60(%R10,%R13,1) [5] |
0x41320d VMOVUPD %YMM0,(%R10,%R13,1) [5] |
0x413213 ADD $0x8,%RBX |
0x413217 ADD $0xc0,%R13 |
0x41321e CMP %EDX,%EBX |
0x413220 JLE 412fe0 |
/scratch_na/users/xoserete/qaas_runs/171-416-7289/intel/CoMD/build/CoMD/CoMD/src-openmp/timestep.c: 88 - 94 |
-------------------------------------------------------------------------------- |
88: for (int iOff=MAXATOMS*iBox,ii=0; ii<s->boxes->nAtoms[iBox]; ii++,iOff++) |
89: { |
90: int iSpecies = s->atoms->iSpecies[iOff]; |
91: real_t invMass = 1.0/s->species[iSpecies].mass; |
92: s->atoms->r[iOff][0] += dt*s->atoms->p[iOff][0]*invMass; |
93: s->atoms->r[iOff][1] += dt*s->atoms->p[iOff][1]*invMass; |
94: s->atoms->r[iOff][2] += dt*s->atoms->p[iOff][2]*invMass; |
Coverage (%) | Name | Source Location | Module |
---|---|---|---|
►100.00+ | __kmp_invoke_microtask | libiomp5.so | |
○ | __kmp_invoke_task_func | libiomp5.so |
Path / |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.03 |
CQA speedup if FP arith vectorized | 1.09 |
CQA speedup if fully vectorized | 1.25 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.10 |
Bottlenecks | P0, P1, P5, |
Function | advancePosition.extracted |
Source | timestep.c:88-94 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 20.00 |
CQA cycles if no scalar integer | 19.33 |
CQA cycles if FP arith vectorized | 18.33 |
CQA cycles if fully vectorized | 16.00 |
Front-end cycles | 18.17 |
DIV/SQRT cycles | 20.00 |
P0 cycles | 20.00 |
P1 cycles | 12.67 |
P2 cycles | 12.67 |
P3 cycles | 3.00 |
P4 cycles | 20.00 |
P5 cycles | 1.00 |
P6 cycles | 3.00 |
P7 cycles | 3.00 |
P8 cycles | 3.00 |
P9 cycles | 1.00 |
P10 cycles | 12.67 |
P11 cycles | 16.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 20.40 - 103.44 |
Stall cycles (UFS) | 2.21 - 85.16 |
Nb insns | 88.00 |
Nb uops | 109.00 |
Nb loads | 32.00 |
Nb stores | 6.00 |
Nb stack references | 0.00 |
FLOP/cycle | 2.80 |
Nb FLOP add-sub | 0.00 |
Nb FLOP mul | 0.00 |
Nb FLOP fma | 24.00 |
Nb FLOP div | 8.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 44.80 |
Bytes prefetched | 0.00 |
Bytes loaded | 704.00 |
Bytes stored | 192.00 |
Stride 0 | 0.00 |
Stride 1 | 1.00 |
Stride n | 1.00 |
Stride unknown | 1.00 |
Stride indirect | 1.00 |
Vectorization ratio all | 95.12 |
Vectorization ratio load | 87.50 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | NA |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 92.31 |
Vector-efficiency ratio all | 43.29 |
Vector-efficiency ratio load | 34.38 |
Vector-efficiency ratio store | 50.00 |
Vector-efficiency ratio mul | NA |
Vector-efficiency ratio add_sub | 50.00 |
Vector-efficiency ratio fma | 50.00 |
Vector-efficiency ratio div_sqrt | 50.00 |
Vector-efficiency ratio other | 44.23 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.03 |
CQA speedup if FP arith vectorized | 1.09 |
CQA speedup if fully vectorized | 1.25 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.10 |
Bottlenecks | P0, P1, P5, |
Function | advancePosition.extracted |
Source | timestep.c:88-94 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 20.00 |
CQA cycles if no scalar integer | 19.33 |
CQA cycles if FP arith vectorized | 18.33 |
CQA cycles if fully vectorized | 16.00 |
Front-end cycles | 18.17 |
DIV/SQRT cycles | 20.00 |
P0 cycles | 20.00 |
P1 cycles | 12.67 |
P2 cycles | 12.67 |
P3 cycles | 3.00 |
P4 cycles | 20.00 |
P5 cycles | 1.00 |
P6 cycles | 3.00 |
P7 cycles | 3.00 |
P8 cycles | 3.00 |
P9 cycles | 1.00 |
P10 cycles | 12.67 |
P11 cycles | 16.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 20.40 - 103.44 |
Stall cycles (UFS) | 2.21 - 85.16 |
Nb insns | 88.00 |
Nb uops | 109.00 |
Nb loads | 32.00 |
Nb stores | 6.00 |
Nb stack references | 0.00 |
FLOP/cycle | 2.80 |
Nb FLOP add-sub | 0.00 |
Nb FLOP mul | 0.00 |
Nb FLOP fma | 24.00 |
Nb FLOP div | 8.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 44.80 |
Bytes prefetched | 0.00 |
Bytes loaded | 704.00 |
Bytes stored | 192.00 |
Stride 0 | 0.00 |
Stride 1 | 1.00 |
Stride n | 1.00 |
Stride unknown | 1.00 |
Stride indirect | 1.00 |
Vectorization ratio all | 95.12 |
Vectorization ratio load | 87.50 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | NA |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 92.31 |
Vector-efficiency ratio all | 43.29 |
Vector-efficiency ratio load | 34.38 |
Vector-efficiency ratio store | 50.00 |
Vector-efficiency ratio mul | NA |
Vector-efficiency ratio add_sub | 50.00 |
Vector-efficiency ratio fma | 50.00 |
Vector-efficiency ratio div_sqrt | 50.00 |
Vector-efficiency ratio other | 44.23 |
Path / |
Function | advancePosition.extracted |
Source file and lines | timestep.c:88-94 |
Module | exec |
nb instructions | 88 |
nb uops | 109 |
loop length | 582 |
used x86 registers | 7 |
used mmx registers | 0 |
used xmm registers | 9 |
used ymm registers | 23 |
used zmm registers | 0 |
nb stack references | 0 |
micro-operation queue | 18.17 cycles |
front end | 18.17 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 20.00 | 20.00 | 12.67 | 12.67 | 3.00 | 20.00 | 1.00 | 3.00 | 3.00 | 3.00 | 1.00 | 12.67 |
cycles | 20.00 | 20.00 | 12.67 | 12.67 | 3.00 | 20.00 | 1.00 | 3.00 | 3.00 | 3.00 | 1.00 | 12.67 |
Cycles executing div or sqrt instructions | 16.00 |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 20.40-103.44 |
Stall cycles | 2.21-85.16 |
RS full (events) | 5.32-0.00 |
Front-end | 18.17 |
Dispatch | 20.00 |
DIV/SQRT | 16.00 |
Data deps. | 1.00 |
Overall L1 | 20.00 |
all | 100% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | NA (no other vectorizable/vectorized instructions) |
all | 94% |
load | 86% |
store | 100% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | 100% |
div/sqrt | 100% |
other | 92% |
all | 95% |
load | 87% |
store | 100% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 92% |
all | 37% |
load | 25% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 50% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | NA (no other vectorizable/vectorized instructions) |
all | 43% |
load | 35% |
store | 50% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | 50% |
div/sqrt | 50% |
other | 44% |
all | 43% |
load | 34% |
store | 50% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 50% |
fma | 50% |
div/sqrt | 50% |
other | 44% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VPMOVSXDQ 0x10(%R12,%RBX,4),%YMM2 | 2 | 0 | 0 | 0.33 | 0.33 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 1 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VXORPD %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPADDQ %YMM2,%YMM2,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VGATHERQPD 0x8(%R9,%YMM2,8),%YMM7{%K1} | 5 | 1 | 1 | 1.33 | 1.33 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1.33 | 0-29 | 2 |
VPMOVSXDQ (%R12,%RBX,4),%YMM2 | 2 | 0 | 0 | 0.33 | 0.33 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 1 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VXORPD %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPADDQ %YMM2,%YMM2,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VGATHERQPD 0x8(%R9,%YMM2,8),%YMM8{%K1} | 5 | 1 | 1 | 1.33 | 1.33 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1.33 | 0-29 | 2 |
VDIVPD %YMM7,%YMM22,%YMM7 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13-15 | 8 |
VMOVUPD 0x80(%R11,%R13,1),%YMM9 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R11,%R13,1),%YMM10 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x10(%R11,%R13,1),%XMM13 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R11,%R13,1),%XMM14 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x70(%R11,%R13,1),%XMM15 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBROADCASTSD 0x50(%R11,%R13,1),%YMM11 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBROADCASTSD 0xb0(%R11,%R13,1),%YMM12 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VMOVUPD 0x80(%R11,%R13,1),%XMM3 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R10,%R13,1),%YMM2 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x10(%R10,%R13,1),%XMM18 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R10,%R13,1),%XMM4 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VINSERTF128 $0x1,0x40(%R11,%R13,1),%YMM13,%YMM13 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VINSERTF128 $0x1,0xa0(%R11,%R13,1),%YMM15,%YMM5 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0x40(%R11,%R13,1),%YMM14,%YMM14 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xc,0xa0(%R11,%R13,1),%YMM3,%YMM3 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VINSERTF32X4 $0x1,0x40(%R10,%R13,1),%YMM18,%YMM6 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0x40(%R10,%R13,1),%YMM4,%YMM4 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0x3,(%R11,%R13,1),%YMM10,%YMM15 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM14,%YMM13,%YMM14 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM13,%YMM15,%YMM1 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM10,%YMM15,%YMM10 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBLENDPD $0x3,0x60(%R11,%R13,1),%YMM9,%YMM13 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM3,%YMM5,%YMM15 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM5,%YMM13,%YMM3 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM9,%YMM13,%YMM5 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBLENDPD $0x3,(%R10,%R13,1),%YMM2,%YMM0 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM4,%YMM6,%YMM9 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM6,%YMM0,%YMM13 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM2,%YMM0,%YMM0 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMOVUPD 0x80(%R10,%R13,1),%YMM2 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x70(%R10,%R13,1),%XMM4 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0x8,%YMM11,%YMM10,%YMM6 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0x50(%R10,%R13,1),%YMM10 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM12,%YMM5,%YMM5 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0xb0(%R10,%R13,1),%YMM12 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM10,%YMM0,%YMM11 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0x3,0x60(%R10,%R13,1),%YMM2,%YMM0 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VSHUFPD $0x5,%YMM2,%YMM0,%YMM2 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBLENDPD $0x8,%YMM12,%YMM2,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VINSERTF128 $0x1,0xa0(%R10,%R13,1),%YMM4,%YMM4 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xa,%YMM4,%YMM0,%YMM10 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VFMADD231PD %YMM3,%YMM7,%YMM10 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %YMM8,%YMM22,%YMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13-15 | 8 |
VFMADD231PD %YMM1,%YMM0,%YMM13 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM5,%YMM7,%YMM2 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM6,%YMM0,%YMM11 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVUPD 0x80(%R10,%R13,1),%XMM1 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0xc,0xa0(%R10,%R13,1),%YMM1,%YMM1 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM1,%YMM4,%YMM8 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VFMADD231PD %YMM15,%YMM7,%YMM8 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM14,%YMM0,%YMM9 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVAPD %YMM13,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM11,%YMM16,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM2,%YMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM10,%YMM17,%YMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM2,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM10,%YMM19,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM2,%YMM16,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM11,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM13,%YMM17,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM13,%YMM19,%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM8,%YMM20,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VBLENDPD $0x2,%YMM8,%YMM3,%YMM3 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPERMT2PD %YMM1,%YMM21,%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VBLENDPD $0x2,%YMM9,%YMM11,%YMM1 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPERMT2PD %YMM9,%YMM20,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM2,%YMM21,%YMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVUPD %YMM3,0x80(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM1,0x20(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM9,0x40(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM8,0xa0(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM10,0x60(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM0,(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
ADD $0x8,%RBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
ADD $0xc0,%R13 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CMP %EDX,%EBX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JLE 412fe0 <advancePosition.extracted+0x260> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
Function | advancePosition.extracted |
Source file and lines | timestep.c:88-94 |
Module | exec |
nb instructions | 88 |
nb uops | 109 |
loop length | 582 |
used x86 registers | 7 |
used mmx registers | 0 |
used xmm registers | 9 |
used ymm registers | 23 |
used zmm registers | 0 |
nb stack references | 0 |
micro-operation queue | 18.17 cycles |
front end | 18.17 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 20.00 | 20.00 | 12.67 | 12.67 | 3.00 | 20.00 | 1.00 | 3.00 | 3.00 | 3.00 | 1.00 | 12.67 |
cycles | 20.00 | 20.00 | 12.67 | 12.67 | 3.00 | 20.00 | 1.00 | 3.00 | 3.00 | 3.00 | 1.00 | 12.67 |
Cycles executing div or sqrt instructions | 16.00 |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 20.40-103.44 |
Stall cycles | 2.21-85.16 |
RS full (events) | 5.32-0.00 |
Front-end | 18.17 |
Dispatch | 20.00 |
DIV/SQRT | 16.00 |
Data deps. | 1.00 |
Overall L1 | 20.00 |
all | 100% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | NA (no other vectorizable/vectorized instructions) |
all | 94% |
load | 86% |
store | 100% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | 100% |
div/sqrt | 100% |
other | 92% |
all | 95% |
load | 87% |
store | 100% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 92% |
all | 37% |
load | 25% |
store | NA (no store vectorizable/vectorized instructions) |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 50% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | NA (no other vectorizable/vectorized instructions) |
all | 43% |
load | 35% |
store | 50% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | NA (no add-sub vectorizable/vectorized instructions) |
fma | 50% |
div/sqrt | 50% |
other | 44% |
all | 43% |
load | 34% |
store | 50% |
mul | NA (no mul vectorizable/vectorized instructions) |
add-sub | 50% |
fma | 50% |
div/sqrt | 50% |
other | 44% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VPMOVSXDQ 0x10(%R12,%RBX,4),%YMM2 | 2 | 0 | 0 | 0.33 | 0.33 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 1 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VXORPD %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPADDQ %YMM2,%YMM2,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VGATHERQPD 0x8(%R9,%YMM2,8),%YMM7{%K1} | 5 | 1 | 1 | 1.33 | 1.33 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1.33 | 0-29 | 2 |
VPMOVSXDQ (%R12,%RBX,4),%YMM2 | 2 | 0 | 0 | 0.33 | 0.33 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 1 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VXORPD %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPADDQ %YMM2,%YMM2,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VGATHERQPD 0x8(%R9,%YMM2,8),%YMM8{%K1} | 5 | 1 | 1 | 1.33 | 1.33 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 1.33 | 0-29 | 2 |
VDIVPD %YMM7,%YMM22,%YMM7 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13-15 | 8 |
VMOVUPD 0x80(%R11,%R13,1),%YMM9 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R11,%R13,1),%YMM10 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x10(%R11,%R13,1),%XMM13 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R11,%R13,1),%XMM14 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x70(%R11,%R13,1),%XMM15 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBROADCASTSD 0x50(%R11,%R13,1),%YMM11 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBROADCASTSD 0xb0(%R11,%R13,1),%YMM12 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VMOVUPD 0x80(%R11,%R13,1),%XMM3 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R10,%R13,1),%YMM2 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x10(%R10,%R13,1),%XMM18 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x20(%R10,%R13,1),%XMM4 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VINSERTF128 $0x1,0x40(%R11,%R13,1),%YMM13,%YMM13 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VINSERTF128 $0x1,0xa0(%R11,%R13,1),%YMM15,%YMM5 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0x40(%R11,%R13,1),%YMM14,%YMM14 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xc,0xa0(%R11,%R13,1),%YMM3,%YMM3 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VINSERTF32X4 $0x1,0x40(%R10,%R13,1),%YMM18,%YMM6 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xc,0x40(%R10,%R13,1),%YMM4,%YMM4 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0x3,(%R11,%R13,1),%YMM10,%YMM15 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM14,%YMM13,%YMM14 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM13,%YMM15,%YMM1 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM10,%YMM15,%YMM10 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBLENDPD $0x3,0x60(%R11,%R13,1),%YMM9,%YMM13 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM3,%YMM5,%YMM15 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM5,%YMM13,%YMM3 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM9,%YMM13,%YMM5 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBLENDPD $0x3,(%R10,%R13,1),%YMM2,%YMM0 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM4,%YMM6,%YMM9 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0xa,%YMM6,%YMM0,%YMM13 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VSHUFPD $0x5,%YMM2,%YMM0,%YMM0 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMOVUPD 0x80(%R10,%R13,1),%YMM2 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VMOVUPD 0x70(%R10,%R13,1),%XMM4 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0x8,%YMM11,%YMM10,%YMM6 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0x50(%R10,%R13,1),%YMM10 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM12,%YMM5,%YMM5 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBROADCASTSD 0xb0(%R10,%R13,1),%YMM12 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0x8,%YMM10,%YMM0,%YMM11 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VBLENDPD $0x3,0x60(%R10,%R13,1),%YMM2,%YMM0 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VSHUFPD $0x5,%YMM2,%YMM0,%YMM2 | 1 | 0 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VBLENDPD $0x8,%YMM12,%YMM2,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VINSERTF128 $0x1,0xa0(%R10,%R13,1),%YMM4,%YMM4 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VBLENDPD $0xa,%YMM4,%YMM0,%YMM10 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VFMADD231PD %YMM3,%YMM7,%YMM10 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %YMM8,%YMM22,%YMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 13-15 | 8 |
VFMADD231PD %YMM1,%YMM0,%YMM13 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM5,%YMM7,%YMM2 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM6,%YMM0,%YMM11 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVUPD 0x80(%R10,%R13,1),%XMM1 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 0-1 | 0.33 |
VBLENDPD $0xc,0xa0(%R10,%R13,1),%YMM1,%YMM1 | 2 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VBLENDPD $0xa,%YMM1,%YMM4,%YMM8 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VFMADD231PD %YMM15,%YMM7,%YMM8 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %YMM14,%YMM0,%YMM9 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMOVAPD %YMM13,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM11,%YMM16,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM2,%YMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM10,%YMM17,%YMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM2,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM10,%YMM19,%YMM3 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM2,%YMM16,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVAPD %YMM11,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VPERMT2PD %YMM13,%YMM17,%YMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM13,%YMM19,%YMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM8,%YMM20,%YMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VBLENDPD $0x2,%YMM8,%YMM3,%YMM3 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPERMT2PD %YMM1,%YMM21,%YMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VBLENDPD $0x2,%YMM9,%YMM11,%YMM1 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPERMT2PD %YMM9,%YMM20,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPERMT2PD %YMM2,%YMM21,%YMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVUPD %YMM3,0x80(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM1,0x20(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM9,0x40(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM8,0xa0(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM10,0x60(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
VMOVUPD %YMM0,(%R10,%R13,1) | 1 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0-1 | 0.50 |
ADD $0x8,%RBX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
ADD $0xc0,%R13 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
CMP %EDX,%EBX | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JLE 412fe0 <advancePosition.extracted+0x260> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |