Loop Id: 154 | Module: exec | Source: advec_cell.cpp:157-202 [...] | Coverage: 2.56% |
---|
Loop Id: 154 | Module: exec | Source: advec_cell.cpp:157-202 [...] | Coverage: 2.56% |
---|
0x41d1e0 VADDPD %ZMM10,%ZMM7,%ZMM7{%K1} |
0x41d1e6 VMULPD %ZMM5,%ZMM7,%ZMM2 |
0x41d1ec VPMULLQ %ZMM1,%ZMM26,%ZMM1 |
0x41d1f2 VPADDQ %ZMM0,%ZMM1,%ZMM0 |
0x41d1f8 KXNORW %K0,%K0,%K1 |
0x41d1fc MOV 0x20(%RSP),%RAX [11] |
0x41d201 VSCATTERQPD %ZMM2,(%RAX,%ZMM0,8){%K1} [1] |
0x41d208 VPADDQ 0x4fe66(%RIP){1to8},%ZMM17,%ZMM17 [10] |
0x41d212 ADD $0x8,%R14 |
0x41d216 CMP %R13,%R14 |
0x41d219 JAE 41d569 |
0x41d21f VMOVDQA64 %ZMM17,%ZMM0 |
0x41d225 VMOVDQA64 %ZMM16,%ZMM1 |
0x41d22b LEA 0x3e88e(%RIP),%RAX |
0x41d232 CALL %RAX |
0x41d234 VPMOVQD %ZMM0,%YMM0 |
0x41d23a VPADDD 0xa0(%RSP),%YMM0,%YMM30 [11] |
0x41d242 VMOVDQA64 %ZMM17,%ZMM0 |
0x41d248 VMOVDQA64 %ZMM16,%ZMM1 |
0x41d24e CALLQ 0x6ed7c(%RIP) |
0x41d254 VPADDQ %ZMM19,%ZMM0,%ZMM0 |
0x41d25a VPSLLQ $0x20,%ZMM0,%ZMM0 |
0x41d261 VPSRAQ $0x20,%ZMM0,%ZMM0 |
0x41d268 VPMOVSXDQ %YMM30,%ZMM1 |
0x41d26e VPXOR %XMM2,%XMM2,%XMM2 |
0x41d272 VPMULLQ %ZMM1,%ZMM20,%ZMM2 |
0x41d278 VPADDQ %ZMM0,%ZMM2,%ZMM2 |
0x41d27e VXORPD %XMM5,%XMM5,%XMM5 |
0x41d282 KXNORW %K0,%K0,%K1 |
0x41d286 MOV 0x68(%RSP),%RAX [11] |
0x41d28b VGATHERQPD (%RAX,%ZMM2,8),%ZMM5{%K1} [3] |
0x41d292 VCMPPD $0x1,%ZMM5,%ZMM27,%K1 |
0x41d299 VPCMPEQD %YMM6,%YMM6,%YMM6 |
0x41d29d VPADDD %YMM6,%YMM30,%YMM2 |
0x41d2a3 VPMOVSXDQ %YMM2,%ZMM4 |
0x41d2a9 VPBLENDMQ %ZMM4,%ZMM1,%ZMM7{%K1} |
0x41d2af VPXOR %XMM2,%XMM2,%XMM2 |
0x41d2b3 VPMULLQ %ZMM7,%ZMM22,%ZMM2 |
0x41d2b9 VPADDQ %ZMM0,%ZMM2,%ZMM3 |
0x41d2bf VPXOR %XMM2,%XMM2,%XMM2 |
0x41d2c3 KXNORW %K0,%K0,%K2 |
0x41d2c7 MOV 0x8(%RSP),%RAX [11] |
0x41d2cc VGATHERQPD (%RAX,%ZMM3,8),%ZMM2{%K2} [13] |
0x41d2d3 VPSUBD %YMM6,%YMM30,%YMM6 |
0x41d2d9 VPMINSD %YMM6,%YMM21,%YMM6 |
0x41d2df VPMOVSXDQ %YMM6,%ZMM6 |
0x41d2e5 VMOVDQA64 %ZMM6,%ZMM8 |
0x41d2eb VXORPD %XMM9,%XMM9,%XMM9 |
0x41d2f0 KXNORW %K0,%K0,%K2 |
0x41d2f4 VGATHERDPD (%R12,%YMM30,8),%ZMM9{%K2} [15] |
0x41d2fb VMOVDQA64 %ZMM4,%ZMM6{%K1} |
0x41d301 VANDPD %ZMM28,%ZMM5,%ZMM10 |
0x41d307 VDIVPD %ZMM2,%ZMM10,%ZMM10 |
0x41d30d VXORPD %XMM2,%XMM2,%XMM2 |
0x41d311 KXNORW %K0,%K0,%K2 |
0x41d315 VGATHERQPD (%R12,%ZMM6,8),%ZMM2{%K2} [4] |
0x41d31c VFMADD213PD %ZMM9,%ZMM10,%ZMM9 |
0x41d322 VDIVPD %ZMM2,%ZMM9,%ZMM2 |
0x41d328 VPADDD 0x514ce(%RIP){1to8},%YMM30,%YMM9 [10] |
0x41d332 VPXOR %XMM6,%XMM6,%XMM6 |
0x41d336 VPMULLQ %ZMM7,%ZMM23,%ZMM6 |
0x41d33c VPADDQ %ZMM0,%ZMM6,%ZMM6 |
0x41d342 VXORPD %XMM11,%XMM11,%XMM11 |
0x41d347 KXNORW %K0,%K0,%K2 |
0x41d34b VGATHERQPD (%RDI,%ZMM6,8),%ZMM11{%K2} [14] |
0x41d352 VPMOVSXDQ %YMM9,%ZMM8{%K1} |
0x41d358 VPXOR %XMM9,%XMM9,%XMM9 |
0x41d35d VPMULLQ %ZMM8,%ZMM23,%ZMM9 |
0x41d363 VPADDQ %ZMM0,%ZMM9,%ZMM9 |
0x41d369 VXORPD %XMM12,%XMM12,%XMM12 |
0x41d36e KXNORW %K0,%K0,%K2 |
0x41d372 VGATHERQPD (%RDI,%ZMM9,8),%ZMM12{%K2} [6] |
0x41d379 VPBLENDMQ %ZMM1,%ZMM4,%ZMM9{%K1} |
0x41d37f VPXOR %XMM4,%XMM4,%XMM4 |
0x41d383 VPMULLQ %ZMM9,%ZMM23,%ZMM4 |
0x41d389 VPADDQ %ZMM0,%ZMM4,%ZMM4 |
0x41d38f VXORPD %XMM13,%XMM13,%XMM13 |
0x41d394 KXNORW %K0,%K0,%K1 |
0x41d398 VGATHERQPD (%RDI,%ZMM4,8),%ZMM13{%K1} [8] |
0x41d39f VBROADCASTSD 0x4fcd7(%RIP),%ZMM4 [10] |
0x41d3a9 VSUBPD %ZMM10,%ZMM4,%ZMM4 |
0x41d3af VSUBPD %ZMM12,%ZMM11,%ZMM12 |
0x41d3b5 VSUBPD %ZMM11,%ZMM13,%ZMM13 |
0x41d3bb VMULPD %ZMM12,%ZMM13,%ZMM14 |
0x41d3c1 VCMPPD $0x1,%ZMM14,%ZMM27,%K1 |
0x41d3c8 VANDPD %ZMM28,%ZMM12,%ZMM12 |
0x41d3ce VANDPD %ZMM28,%ZMM13,%ZMM14 |
0x41d3d4 VMINPD %ZMM14,%ZMM12,%ZMM15 |
0x41d3da VMULPD %ZMM2,%ZMM12,%ZMM12 |
0x41d3e0 VFMADD231PD %ZMM14,%ZMM4,%ZMM12 |
0x41d3e6 VMULPD %ZMM29,%ZMM12,%ZMM12 |
0x41d3ec VMINPD %ZMM12,%ZMM15,%ZMM12 |
0x41d3f2 VCMPPD $0x1,%ZMM13,%ZMM27,%K2 |
0x41d3f9 VSUBPD %ZMM10,%ZMM31,%ZMM10 |
0x41d3ff VXORPD %ZMM18,%ZMM10,%ZMM13 |
0x41d405 VMOVAPD %ZMM10,%ZMM13{%K2} |
0x41d40b VFMADD231PD %ZMM13,%ZMM12,%ZMM11{%K1} |
0x41d411 VMULPD %ZMM5,%ZMM11,%ZMM5 |
0x41d417 VPXOR %XMM10,%XMM10,%XMM10 |
0x41d41c VPMULLQ %ZMM1,%ZMM24,%ZMM10 |
0x41d422 VPADDQ %ZMM0,%ZMM10,%ZMM10 |
0x41d428 KXNORW %K0,%K0,%K1 |
0x41d42c MOV 0x28(%RSP),%RAX [11] |
0x41d431 VSCATTERQPD %ZMM5,(%RAX,%ZMM10,8){%K1} [16] |
0x41d438 VPMULLQ %ZMM7,%ZMM25,%ZMM7 |
0x41d43e VPADDQ %ZMM0,%ZMM7,%ZMM10 |
0x41d444 VPXOR %XMM7,%XMM7,%XMM7 |
0x41d448 KXNORW %K0,%K0,%K1 |
0x41d44c VGATHERQPD (%R15,%ZMM10,8),%ZMM7{%K1} [7] |
0x41d453 VPMULLQ %ZMM8,%ZMM25,%ZMM8 |
0x41d459 VPADDQ %ZMM0,%ZMM8,%ZMM8 |
0x41d45f VXORPD %XMM10,%XMM10,%XMM10 |
0x41d464 KXNORW %K0,%K0,%K1 |
0x41d468 VGATHERQPD (%R15,%ZMM8,8),%ZMM10{%K1} [2] |
0x41d46f VPXOR %XMM8,%XMM8,%XMM8 |
0x41d474 VPMULLQ %ZMM9,%ZMM25,%ZMM8 |
0x41d47a VPADDQ %ZMM0,%ZMM8,%ZMM8 |
0x41d480 VXORPD %XMM11,%XMM11,%XMM11 |
0x41d485 KXNORW %K0,%K0,%K1 |
0x41d489 VGATHERQPD (%R15,%ZMM8,8),%ZMM11{%K1} [9] |
0x41d490 VSUBPD %ZMM10,%ZMM7,%ZMM9 |
0x41d496 VSUBPD %ZMM7,%ZMM11,%ZMM8 |
0x41d49c VMULPD %ZMM9,%ZMM8,%ZMM10 |
0x41d4a2 VCMPPD $0x1,%ZMM10,%ZMM27,%K1 |
0x41d4a9 KORTESTB %K1,%K1 |
0x41d4ad VXORPD %XMM10,%XMM10,%XMM10 |
0x41d4b2 JE 41d1e0 |
0x41d4b8 VPSLLQ $0x3,%ZMM3,%ZMM3 |
0x41d4bf VPADDQ 0x100(%RSP),%ZMM3,%ZMM3 [11] |
0x41d4c7 VPSLLQ $0x3,%ZMM6,%ZMM6 |
0x41d4ce VPADDQ 0xc0(%RSP),%ZMM6,%ZMM6 [11] |
0x41d4d6 KMOVQ %K1,%K2 |
0x41d4db VGATHERQPD (,%ZMM6,1),%ZMM10{%K2} [12] |
0x41d4e6 VXORPD %XMM6,%XMM6,%XMM6 |
0x41d4ea KMOVQ %K1,%K2 |
0x41d4ef VGATHERQPD (,%ZMM3,1),%ZMM6{%K2} [5] |
0x41d4fa VANDPD %ZMM28,%ZMM5,%ZMM3 |
0x41d500 VMULPD %ZMM10,%ZMM6,%ZMM6 |
0x41d506 VDIVPD %ZMM6,%ZMM3,%ZMM3 |
0x41d50c VCMPPD $0x1,%ZMM8,%ZMM27,%K2 |
0x41d513 VSUBPD %ZMM3,%ZMM31,%ZMM3 |
0x41d519 VXORPD %ZMM18,%ZMM3,%ZMM6 |
0x41d51f VMOVAPD %ZMM3,%ZMM6{%K2} |
0x41d525 VANDPD %ZMM28,%ZMM9,%ZMM3 |
0x41d52b VANDPD %ZMM28,%ZMM8,%ZMM8 |
0x41d531 VMINPD %ZMM8,%ZMM3,%ZMM9 |
0x41d537 VMULPD %ZMM2,%ZMM3,%ZMM2 |
0x41d53d VFMADD213PD %ZMM2,%ZMM8,%ZMM4 |
0x41d543 VMULPD %ZMM29,%ZMM4,%ZMM2 |
0x41d549 VMINPD %ZMM2,%ZMM9,%ZMM2 |
0x41d54f VMULPD %ZMM2,%ZMM6,%ZMM10 |
0x41d555 JMP 41d1e0 |
/home/eoseret/qaas_runs_CPU_9468/171-145-9336/intel/CloverLeafCXX/build/CloverLeafCXX/src/omp/context.h: 69 - 69 |
-------------------------------------------------------------------------------- |
69: T &operator()(size_t i, size_t j) const { return data[i + j * sizeX]; } |
/home/eoseret/qaas_runs_CPU_9468/171-145-9336/intel/CloverLeafCXX/build/CloverLeafCXX/src/omp/advec_cell.cpp: 157 - 202 |
-------------------------------------------------------------------------------- |
157: #pragma omp parallel for simd collapse(2) |
158: for (int j = (y_min + 1); j < (y_max + 2 + 2); j++) { |
159: for (int i = (x_min + 1); i < (x_max + 2); i++) |
160: ({ |
161: int upwind, donor, downwind, dif; |
162: double sigmat, sigma3, sigma4, sigmav, sigmam, diffuw, diffdw, limiter, wind; |
163: if (vol_flux_y(i, j) > 0.0) { |
164: upwind = j - 2; |
165: donor = j - 1; |
166: downwind = j; |
167: dif = donor; |
168: } else { |
169: upwind = std::min(j + 1, y_max + 2); |
170: donor = j; |
171: downwind = j - 1; |
172: dif = upwind; |
173: } |
174: sigmat = std::fabs(vol_flux_y(i, j)) / pre_vol(i, donor); |
175: sigma3 = (1.0 + sigmat) * (vertexdy[j] / vertexdy[dif]); |
176: sigma4 = 2.0 - sigmat; |
177: sigmav = sigmat; |
178: diffuw = density1(i, donor) - density1(i, upwind); |
179: diffdw = density1(i, downwind) - density1(i, donor); |
180: wind = 1.0; |
181: if (diffdw <= 0.0) wind = -1.0; |
182: if (diffuw * diffdw > 0.0) { |
183: limiter = (1.0 - sigmav) * wind * |
184: std::fmin(std::fmin(std::fabs(diffuw), std::fabs(diffdw)), |
185: one_by_six * (sigma3 * std::fabs(diffuw) + sigma4 * std::fabs(diffdw))); |
186: } else { |
187: limiter = 0.0; |
188: } |
189: mass_flux_y(i, j) = vol_flux_y(i, j) * (density1(i, donor) + limiter); |
190: sigmam = std::fabs(mass_flux_y(i, j)) / (density1(i, donor) * pre_vol(i, donor)); |
191: diffuw = energy1(i, donor) - energy1(i, upwind); |
192: diffdw = energy1(i, downwind) - energy1(i, donor); |
193: wind = 1.0; |
194: if (diffdw <= 0.0) wind = -1.0; |
195: if (diffuw * diffdw > 0.0) { |
196: limiter = (1.0 - sigmam) * wind * |
197: std::fmin(std::fmin(std::fabs(diffuw), std::fabs(diffdw)), |
198: one_by_six * (sigma3 * std::fabs(diffuw) + sigma4 * std::fabs(diffdw))); |
199: } else { |
200: limiter = 0.0; |
201: } |
202: ener_flux(i, j) = mass_flux_y(i, j) * (energy1(i, donor) + limiter); |
Path / |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.11 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | NA |
Bottlenecks | |
Function | _Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7 |
Source | context.h:69-69,advec_cell.cpp:157-202 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 72.00 |
CQA cycles if no scalar integer | 65.00 |
CQA cycles if FP arith vectorized | 72.00 |
CQA cycles if fully vectorized | 72.00 |
Front-end cycles | 44.92 |
DIV/SQRT cycles | 72.00 |
P0 cycles | 9.50 |
P1 cycles | 32.67 |
P2 cycles | 32.67 |
P3 cycles | 9.00 |
P4 cycles | 72.00 |
P5 cycles | 4.00 |
P6 cycles | 9.00 |
P7 cycles | 9.00 |
P8 cycles | 9.00 |
P9 cycles | 4.00 |
P10 cycles | 32.67 |
P11 cycles | 40.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 97.50 - 225.72 |
Stall cycles (UFS) | 61.41 - 189.62 |
Nb insns | 139.50 |
Nb uops | 269.50 |
Nb loads | 21.00 |
Nb stores | 2.00 |
Nb stack references | 6.00 |
FLOP/cycle | 2.78 |
Nb FLOP add-sub | 60.00 |
Nb FLOP mul | 64.00 |
Nb FLOP fma | 28.00 |
Nb FLOP div | 20.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 13.68 |
Bytes prefetched | 0.00 |
Bytes loaded | 860.00 |
Bytes stored | 128.00 |
Stride 0 | 2.00 |
Stride 1 | 0.00 |
Stride n | 0.00 |
Stride unknown | 4.00 |
Stride indirect | 8.00 |
Vectorization ratio all | 99.11 |
Vectorization ratio load | 93.65 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.41 |
Vector-efficiency ratio all | 82.54 |
Vector-efficiency ratio load | 88.10 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 91.81 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 71.81 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.10 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | NA |
Bottlenecks | P0, P5, |
Function | _Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7 |
Source | context.h:69-69,advec_cell.cpp:157-202 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 79.00 |
CQA cycles if no scalar integer | 71.50 |
CQA cycles if FP arith vectorized | 79.00 |
CQA cycles if fully vectorized | 79.00 |
Front-end cycles | 47.83 |
DIV/SQRT cycles | 79.00 |
P0 cycles | 11.00 |
P1 cycles | 35.67 |
P2 cycles | 35.67 |
P3 cycles | 9.00 |
P4 cycles | 79.00 |
P5 cycles | 4.00 |
P6 cycles | 9.00 |
P7 cycles | 9.00 |
P8 cycles | 9.00 |
P9 cycles | 4.00 |
P10 cycles | 35.67 |
P11 cycles | 48.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 104.62 - 249.20 |
Stall cycles (UFS) | 65.94 - 210.52 |
Nb insns | 152.00 |
Nb uops | 287.00 |
Nb loads | 23.00 |
Nb stores | 2.00 |
Nb stack references | 7.00 |
FLOP/cycle | 2.94 |
Nb FLOP add-sub | 64.00 |
Nb FLOP mul | 80.00 |
Nb FLOP fma | 32.00 |
Nb FLOP div | 24.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 14.13 |
Bytes prefetched | 0.00 |
Bytes loaded | 988.00 |
Bytes stored | 128.00 |
Stride 0 | 2.00 |
Stride 1 | 0.00 |
Stride n | 0.00 |
Stride unknown | 4.00 |
Stride indirect | 9.00 |
Vectorization ratio all | 99.20 |
Vectorization ratio load | 94.44 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.57 |
Vector-efficiency ratio all | 83.90 |
Vector-efficiency ratio load | 89.58 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 92.31 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 74.11 |
Metric | Value |
---|---|
CQA speedup if no scalar integer | 1.11 |
CQA speedup if FP arith vectorized | 1.00 |
CQA speedup if fully vectorized | 1.00 |
CQA speedup if no inter-iteration dependency | NA |
CQA speedup if next bottleneck killed | 1.55 |
Bottlenecks | P0, P5, |
Function | _Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7 |
Source | context.h:69-69,advec_cell.cpp:157-202 |
Source loop unroll info | unrolled by 8 |
Source loop unroll confidence level | high |
Unroll/vectorization loop type | main |
Unroll factor | 8 |
CQA cycles | 65.00 |
CQA cycles if no scalar integer | 58.50 |
CQA cycles if FP arith vectorized | 65.00 |
CQA cycles if fully vectorized | 65.00 |
Front-end cycles | 42.00 |
DIV/SQRT cycles | 65.00 |
P0 cycles | 8.00 |
P1 cycles | 29.67 |
P2 cycles | 29.67 |
P3 cycles | 9.00 |
P4 cycles | 65.00 |
P5 cycles | 4.00 |
P6 cycles | 9.00 |
P7 cycles | 9.00 |
P8 cycles | 9.00 |
P9 cycles | 4.00 |
P10 cycles | 29.67 |
P11 cycles | 32.00 |
Inter-iter dependencies cycles | 1 |
FE+BE cycles (UFS) | 90.39 - 202.24 |
Stall cycles (UFS) | 56.89 - 168.72 |
Nb insns | 127.00 |
Nb uops | 252.00 |
Nb loads | 19.00 |
Nb stores | 2.00 |
Nb stack references | 5.00 |
FLOP/cycle | 2.58 |
Nb FLOP add-sub | 56.00 |
Nb FLOP mul | 48.00 |
Nb FLOP fma | 24.00 |
Nb FLOP div | 16.00 |
Nb FLOP rcp | 0.00 |
Nb FLOP sqrt | 0.00 |
Nb FLOP rsqrt | 0.00 |
Bytes/cycle | 13.23 |
Bytes prefetched | 0.00 |
Bytes loaded | 732.00 |
Bytes stored | 128.00 |
Stride 0 | 2.00 |
Stride 1 | 0.00 |
Stride n | 0.00 |
Stride unknown | 4.00 |
Stride indirect | 7.00 |
Vectorization ratio all | 99.03 |
Vectorization ratio load | 92.86 |
Vectorization ratio store | 100.00 |
Vectorization ratio mul | 100.00 |
Vectorization ratio add_sub | 100.00 |
Vectorization ratio fma | 100.00 |
Vectorization ratio div_sqrt | 100.00 |
Vectorization ratio other | 98.25 |
Vector-efficiency ratio all | 81.19 |
Vector-efficiency ratio load | 86.61 |
Vector-efficiency ratio store | 100.00 |
Vector-efficiency ratio mul | 100.00 |
Vector-efficiency ratio add_sub | 91.30 |
Vector-efficiency ratio fma | 100.00 |
Vector-efficiency ratio div_sqrt | 100.00 |
Vector-efficiency ratio other | 69.52 |
Path / |
Function | _Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7 |
Source file and lines | advec_cell.cpp:157-202 |
Module | exec |
nb instructions | 139.50 |
nb uops | 269.50 |
loop length | 809 |
used x86 registers | 7 |
used mmx registers | 0 |
used xmm registers | 11 |
used ymm registers | 6 |
used zmm registers | 30 |
nb stack references | 6 |
ADD-SUB / MUL ratio | 0.98 |
micro-operation queue | 44.92 cycles |
front end | 44.92 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 72.00 | 6.00 | 32.67 | 32.67 | 9.00 | 72.00 | 4.00 | 9.00 | 9.00 | 9.00 | 4.00 | 32.67 |
cycles | 72.00 | 9.50 | 32.67 | 32.67 | 9.00 | 72.00 | 4.00 | 9.00 | 9.00 | 9.00 | 4.00 | 32.67 |
Cycles executing div or sqrt instructions | 40.00 |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 97.50-225.72 |
Stall cycles | 61.41-189.62 |
ROB full (events) | 29.61-137.30 |
RS full (events) | 54.41-71.13 |
Front-end | 44.92 |
Dispatch | 72.00 |
DIV/SQRT | 40.00 |
Data deps. | 1.00 |
Overall L1 | 72.00 |
all | 100% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 98% |
load | 91% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 97% |
all | 99% |
load | 93% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 78% |
load | 73% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 88% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 63% |
all | 86% |
load | 92% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 77% |
all | 82% |
load | 88% |
store | 100% |
mul | 100% |
add-sub | 91% |
fma | 100% |
div/sqrt | 100% |
other | 71% |
Function | _Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7 |
Source file and lines | advec_cell.cpp:157-202 |
Module | exec |
nb instructions | 152 |
nb uops | 287 |
loop length | 890 |
used x86 registers | 7 |
used mmx registers | 0 |
used xmm registers | 11 |
used ymm registers | 6 |
used zmm registers | 30 |
nb stack references | 7 |
ADD-SUB / MUL ratio | 0.80 |
micro-operation queue | 47.83 cycles |
front end | 47.83 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 79.00 | 6.00 | 35.67 | 35.67 | 9.00 | 79.00 | 4.00 | 9.00 | 9.00 | 9.00 | 4.00 | 35.67 |
cycles | 79.00 | 11.00 | 35.67 | 35.67 | 9.00 | 79.00 | 4.00 | 9.00 | 9.00 | 9.00 | 4.00 | 35.67 |
Cycles executing div or sqrt instructions | 48.00 |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 104.62-249.20 |
Stall cycles | 65.94-210.52 |
ROB full (events) | 20.78-128.97 |
RS full (events) | 74.49-101.78 |
Front-end | 47.83 |
Dispatch | 79.00 |
DIV/SQRT | 48.00 |
Data deps. | 1.00 |
Overall L1 | 79.00 |
all | 100% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 98% |
load | 92% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 97% |
all | 99% |
load | 94% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 79% |
load | 80% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 88% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 65% |
all | 87% |
load | 93% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 80% |
all | 83% |
load | 89% |
store | 100% |
mul | 100% |
add-sub | 92% |
fma | 100% |
div/sqrt | 100% |
other | 74% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VADDPD %ZMM10,%ZMM7,%ZMM7{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM5,%ZMM7,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VPMULLQ %ZMM1,%ZMM26,%ZMM1 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM1,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x20(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VSCATTERQPD %ZMM2,(%RAX,%ZMM0,8){%K1} | 20 | 2.20 | 0.20 | 0 | 0 | 4 | 0.20 | 0.20 | 4 | 4 | 4 | 0.20 | 0 | 2-12 | 7 |
VPADDQ 0x4fe66(%RIP){1to8},%ZMM17,%ZMM17 | 1 | 0.50 | 0 | 0.33 | 0.33 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.67 |
ADD $0x8,%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
CMP %R13,%R14 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JAE 41d569 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x5e9> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VMOVDQA64 %ZMM17,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVDQA64 %ZMM16,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
LEA 0x3e88e(%RIP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
CALL %RAX | 2 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0.50 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0 | 2.14 |
VPMOVQD %ZMM0,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD 0xa0(%RSP),%YMM0,%YMM30 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVDQA64 %ZMM17,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVDQA64 %ZMM16,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
CALLQ 0x6ed7c(%RIP) | 3 | 0.70 | 0.20 | 0.33 | 0.33 | 0.50 | 0.20 | 0.70 | 0.50 | 0.50 | 0.50 | 0.20 | 0.33 | 0 | 2.27 |
VPADDQ %ZMM19,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPSLLQ $0x20,%ZMM0,%ZMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPSRAQ $0x20,%ZMM0,%ZMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPMOVSXDQ %YMM30,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM1,%ZMM20,%ZMM2 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM2,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x68(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VGATHERQPD (%RAX,%ZMM2,8),%ZMM5{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VCMPPD $0x1,%ZMM5,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPEQD %YMM6,%YMM6,%YMM6 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VPADDD %YMM6,%YMM30,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVSXDQ %YMM2,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPBLENDMQ %ZMM4,%ZMM1,%ZMM7{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM7,%ZMM22,%ZMM2 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM2,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x8(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VGATHERQPD (%RAX,%ZMM3,8),%ZMM2{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPSUBD %YMM6,%YMM30,%YMM6 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VPMINSD %YMM6,%YMM21,%YMM6 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPMOVSXDQ %YMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVDQA64 %ZMM6,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VXORPD %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERDPD (%R12,%YMM30,8),%ZMM9{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VMOVDQA64 %ZMM4,%ZMM6{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM28,%ZMM5,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VDIVPD %ZMM2,%ZMM10,%ZMM10 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VXORPD %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R12,%ZMM6,8),%ZMM2{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VFMADD213PD %ZMM9,%ZMM10,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM2,%ZMM9,%ZMM2 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VPADDD 0x514ce(%RIP){1to8},%YMM30,%YMM9 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VPXOR %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM7,%ZMM23,%ZMM6 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM6,%ZMM6 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM6,8),%ZMM11{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMOVSXDQ %YMM9,%ZMM8{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPXOR %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM8,%ZMM23,%ZMM9 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM9,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM9,8),%ZMM12{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPBLENDMQ %ZMM1,%ZMM4,%ZMM9{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM9,%ZMM23,%ZMM4 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM4,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM13,%XMM13,%XMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM4,8),%ZMM13{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VBROADCASTSD 0x4fcd7(%RIP),%ZMM4 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VSUBPD %ZMM10,%ZMM4,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM12,%ZMM11,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM11,%ZMM13,%ZMM13 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM12,%ZMM13,%ZMM14 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM14,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VANDPD %ZMM28,%ZMM12,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM28,%ZMM13,%ZMM14 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMINPD %ZMM14,%ZMM12,%ZMM15 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM2,%ZMM12,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %ZMM14,%ZMM4,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM29,%ZMM12,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM12,%ZMM15,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM13,%ZMM27,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VSUBPD %ZMM10,%ZMM31,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VXORPD %ZMM18,%ZMM10,%ZMM13 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
VMOVAPD %ZMM10,%ZMM13{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VFMADD231PD %ZMM13,%ZMM12,%ZMM11{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM5,%ZMM11,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VPXOR %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM1,%ZMM24,%ZMM10 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM10,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x28(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VSCATTERQPD %ZMM5,(%RAX,%ZMM10,8){%K1} | 20 | 2.20 | 0.20 | 0 | 0 | 4 | 0.20 | 0.20 | 4 | 4 | 4 | 0.20 | 0 | 2-12 | 7 |
VPMULLQ %ZMM7,%ZMM25,%ZMM7 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM7,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM10,8),%ZMM7{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMULLQ %ZMM8,%ZMM25,%ZMM8 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM8,8),%ZMM10{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPXOR %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM9,%ZMM25,%ZMM8 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM8,8),%ZMM11{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VSUBPD %ZMM10,%ZMM7,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM7,%ZMM11,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM9,%ZMM8,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM10,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
KORTESTB %K1,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VXORPD %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JE 41d1e0 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x260> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VPSLLQ $0x3,%ZMM3,%ZMM3 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPADDQ 0x100(%RSP),%ZMM3,%ZMM3 | 1 | 0.50 | 0 | 0.33 | 0.33 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.67 |
VPSLLQ $0x3,%ZMM6,%ZMM6 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPADDQ 0xc0(%RSP),%ZMM6,%ZMM6 | 1 | 0.50 | 0 | 0.33 | 0.33 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.67 |
KMOVQ %K1,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VGATHERQPD (,%ZMM6,1),%ZMM10{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VXORPD %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KMOVQ %K1,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VGATHERQPD (,%ZMM3,1),%ZMM6{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VANDPD %ZMM28,%ZMM5,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMULPD %ZMM10,%ZMM6,%ZMM6 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM6,%ZMM3,%ZMM3 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VCMPPD $0x1,%ZMM8,%ZMM27,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VSUBPD %ZMM3,%ZMM31,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VXORPD %ZMM18,%ZMM3,%ZMM6 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
VMOVAPD %ZMM3,%ZMM6{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM28,%ZMM9,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM28,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMINPD %ZMM8,%ZMM3,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM2,%ZMM3,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD213PD %ZMM2,%ZMM8,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM29,%ZMM4,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM2,%ZMM9,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM2,%ZMM6,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
JMP 41d1e0 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x260> | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2.08 |
Function | _Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7 |
Source file and lines | advec_cell.cpp:157-202 |
Module | exec |
nb instructions | 127 |
nb uops | 252 |
loop length | 728 |
used x86 registers | 7 |
used mmx registers | 0 |
used xmm registers | 11 |
used ymm registers | 6 |
used zmm registers | 30 |
nb stack references | 5 |
ADD-SUB / MUL ratio | 1.17 |
micro-operation queue | 42.00 cycles |
front end | 42.00 cycles |
P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | |
---|---|---|---|---|---|---|---|---|---|---|---|---|
uops | 65.00 | 6.00 | 29.67 | 29.67 | 9.00 | 65.00 | 4.00 | 9.00 | 9.00 | 9.00 | 4.00 | 29.67 |
cycles | 65.00 | 8.00 | 29.67 | 29.67 | 9.00 | 65.00 | 4.00 | 9.00 | 9.00 | 9.00 | 4.00 | 29.67 |
Cycles executing div or sqrt instructions | 32.00 |
Longest recurrence chain latency (RecMII) | 1.00 |
FE+BE cycles | 90.39-202.24 |
Stall cycles | 56.89-168.73 |
ROB full (events) | 38.44-145.62 |
RS full (events) | 34.34-40.48 |
Front-end | 42.00 |
Dispatch | 65.00 |
DIV/SQRT | 32.00 |
Data deps. | 1.00 |
Overall L1 | 65.00 |
all | 100% |
load | 100% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 100% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 100% |
all | 98% |
load | 90% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 96% |
all | 99% |
load | 92% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 98% |
all | 77% |
load | 66% |
store | NA (no store vectorizable/vectorized instructions) |
mul | 100% |
add-sub | 87% |
fma | NA (no fma vectorizable/vectorized instructions) |
other | 62% |
all | 85% |
load | 92% |
store | 100% |
mul | 100% |
add-sub | 100% |
fma | 100% |
div/sqrt | 100% |
other | 75% |
all | 81% |
load | 86% |
store | 100% |
mul | 100% |
add-sub | 91% |
fma | 100% |
div/sqrt | 100% |
other | 69% |
Instruction | Nb FU | P0 | P1 | P2 | P3 | P4 | P5 | P6 | P7 | P8 | P9 | P10 | P11 | Latency | Recip. throughput |
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
VADDPD %ZMM10,%ZMM7,%ZMM7{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM5,%ZMM7,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VPMULLQ %ZMM1,%ZMM26,%ZMM1 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM1,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x20(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VSCATTERQPD %ZMM2,(%RAX,%ZMM0,8){%K1} | 20 | 2.20 | 0.20 | 0 | 0 | 4 | 0.20 | 0.20 | 4 | 4 | 4 | 0.20 | 0 | 2-12 | 7 |
VPADDQ 0x4fe66(%RIP){1to8},%ZMM17,%ZMM17 | 1 | 0.50 | 0 | 0.33 | 0.33 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.67 |
ADD $0x8,%R14 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.17 |
CMP %R13,%R14 | 1 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0.20 | 0 | 0 | 0 | 0.20 | 0 | 1 | 0.20 |
JAE 41d569 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x5e9> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |
VMOVDQA64 %ZMM17,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVDQA64 %ZMM16,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
LEA 0x3e88e(%RIP),%RAX | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
CALL %RAX | 2 | 0.50 | 0 | 0 | 0 | 0.50 | 0 | 0.50 | 0.50 | 0.50 | 0.50 | 0 | 0 | 0 | 2.14 |
VPMOVQD %ZMM0,%YMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPADDD 0xa0(%RSP),%YMM0,%YMM30 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VMOVDQA64 %ZMM17,%ZMM0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VMOVDQA64 %ZMM16,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
CALLQ 0x6ed7c(%RIP) | 3 | 0.70 | 0.20 | 0.33 | 0.33 | 0.50 | 0.20 | 0.70 | 0.50 | 0.50 | 0.50 | 0.20 | 0.33 | 0 | 2.27 |
VPADDQ %ZMM19,%ZMM0,%ZMM0 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPSLLQ $0x20,%ZMM0,%ZMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPSRAQ $0x20,%ZMM0,%ZMM0 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 2-4 | 1 |
VPMOVSXDQ %YMM30,%ZMM1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM1,%ZMM20,%ZMM2 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM2,%ZMM2 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM5,%XMM5,%XMM5 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x68(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VGATHERQPD (%RAX,%ZMM2,8),%ZMM5{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VCMPPD $0x1,%ZMM5,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPCMPEQD %YMM6,%YMM6,%YMM6 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VPADDD %YMM6,%YMM30,%YMM2 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.33 |
VPMOVSXDQ %YMM2,%ZMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPBLENDMQ %ZMM4,%ZMM1,%ZMM7{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM7,%ZMM22,%ZMM2 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM2,%ZMM3 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x8(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VGATHERQPD (%RAX,%ZMM3,8),%ZMM2{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPSUBD %YMM6,%YMM30,%YMM6 | 1 | 0.33 | 0.33 | 0 | 0 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.33 |
VPMINSD %YMM6,%YMM21,%YMM6 | 1 | 0.50 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPMOVSXDQ %YMM6,%ZMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VMOVDQA64 %ZMM6,%ZMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VXORPD %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERDPD (%R12,%YMM30,8),%ZMM9{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VMOVDQA64 %ZMM4,%ZMM6{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VANDPD %ZMM28,%ZMM5,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VDIVPD %ZMM2,%ZMM10,%ZMM10 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VXORPD %XMM2,%XMM2,%XMM2 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R12,%ZMM6,8),%ZMM2{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VFMADD213PD %ZMM9,%ZMM10,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VDIVPD %ZMM2,%ZMM9,%ZMM2 | 3 | 2.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 22-24 | 16 |
VPADDD 0x514ce(%RIP){1to8},%YMM30,%YMM9 | 1 | 0.33 | 0.33 | 0.33 | 0.33 | 0 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.40 |
VPXOR %XMM6,%XMM6,%XMM6 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM7,%ZMM23,%ZMM6 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM6,%ZMM6 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM6,8),%ZMM11{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMOVSXDQ %YMM9,%ZMM8{%K1} | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VPXOR %XMM9,%XMM9,%XMM9 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM8,%ZMM23,%ZMM9 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM9,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM12,%XMM12,%XMM12 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K2 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM9,8),%ZMM12{%K2} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPBLENDMQ %ZMM1,%ZMM4,%ZMM9{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM4,%XMM4,%XMM4 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM9,%ZMM23,%ZMM4 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM4,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM13,%XMM13,%XMM13 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%RDI,%ZMM4,8),%ZMM13{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VBROADCASTSD 0x4fcd7(%RIP),%ZMM4 | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 3 | 0.33 |
VSUBPD %ZMM10,%ZMM4,%ZMM4 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM12,%ZMM11,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM11,%ZMM13,%ZMM13 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM12,%ZMM13,%ZMM14 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM14,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VANDPD %ZMM28,%ZMM12,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VANDPD %ZMM28,%ZMM13,%ZMM14 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VMINPD %ZMM14,%ZMM12,%ZMM15 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM2,%ZMM12,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VFMADD231PD %ZMM14,%ZMM4,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM29,%ZMM12,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMINPD %ZMM12,%ZMM15,%ZMM12 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM13,%ZMM27,%K2 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
VSUBPD %ZMM10,%ZMM31,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VXORPD %ZMM18,%ZMM10,%ZMM13 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.50 |
VMOVAPD %ZMM10,%ZMM13{%K2} | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0-1 | 0.17 |
VFMADD231PD %ZMM13,%ZMM12,%ZMM11{%K1} | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VMULPD %ZMM5,%ZMM11,%ZMM5 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VPXOR %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM1,%ZMM24,%ZMM10 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM10,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
MOV 0x28(%RSP),%RAX | 1 | 0 | 0 | 0.33 | 0.33 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.33 | 1 | 0.33 |
VSCATTERQPD %ZMM5,(%RAX,%ZMM10,8){%K1} | 20 | 2.20 | 0.20 | 0 | 0 | 4 | 0.20 | 0.20 | 4 | 4 | 4 | 0.20 | 0 | 2-12 | 7 |
VPMULLQ %ZMM7,%ZMM25,%ZMM7 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM7,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VPXOR %XMM7,%XMM7,%XMM7 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM10,8),%ZMM7{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPMULLQ %ZMM8,%ZMM25,%ZMM8 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM8,8),%ZMM10{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VPXOR %XMM8,%XMM8,%XMM8 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
VPMULLQ %ZMM9,%ZMM25,%ZMM8 | 5 | 1.50 | 0 | 0 | 0 | 0 | 1.50 | 0 | 0 | 0 | 0 | 0 | 0 | 15 | 1.50 |
VPADDQ %ZMM0,%ZMM8,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 0.50 |
VXORPD %XMM11,%XMM11,%XMM11 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
KXNORW %K0,%K0,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 | 1 |
VGATHERQPD (%R15,%ZMM8,8),%ZMM11{%K1} | 5 | 1 | 0 | 2.67 | 2.67 | 0 | 2 | 0 | 0 | 0 | 0 | 0 | 2.67 | 0-29 | 2.67 |
VSUBPD %ZMM10,%ZMM7,%ZMM9 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VSUBPD %ZMM7,%ZMM11,%ZMM8 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 0.50 |
VMULPD %ZMM9,%ZMM8,%ZMM10 | 1 | 0.50 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 4 | 0.50 |
VCMPPD $0x1,%ZMM10,%ZMM27,%K1 | 1 | 0 | 0 | 0 | 0 | 0 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 3 | 1 |
KORTESTB %K1,%K1 | 1 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 1 |
VXORPD %XMM10,%XMM10,%XMM10 | 1 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0 | 0.17 |
JE 41d1e0 <_Z17advec_cell_kerneliiiiiiRN6clover8Buffer1DIdEES2_RNS_8Buffer2DIdEES5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_S5_.extracted.7+0x260> | 1 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0.50 | 0 | 0 | 0 | 0 | 0 | 0 | 0.50 |