options

ONE-View Configuration File

batch_command
bucket_threshold1
is_all_external_libraries_in_ccfalse
ranges_count20
localbinary/beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/run/oneview_runs/defaults/gcc/oneview_results_1759250338/binaries/exec
excluded_areas{ },
decan_multi_varianttrue
dataset_handlerlink
cqa_params{ },
_scalability_bins{ },
profile_stop{ unit = none ; value = 0 ; },
lprof_paramsbtm=fp
filter_decan{ type = all ; },
batch_script
maximal_path_number4
base_run_index0
multiruns_params{ },
delay0
object_coverage_threshold0.1
_is_loaded/beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/run/oneview_runs/defaults/gcc/oneview_run_1759250338/config.json
repetitions31
outliers_count0
thread_filter_threshold1%
scalability_referencemain
job_submission_threshold0s
environment_variables{ },
script_variables{ },
vprof_params
keep_executable_locationfalse
is_sudo_availablefalse
number_nodes1
decan_threshold500
number_processes1
base_run_namegcc_0
lprof_post_process_params{ },
profile_start{ unit = none ; value = 0 ; },
throughput_max_core0
__filtertrue
binary/beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/run/base_runs/defaults/gcc/exec
throughput_core-1
optimizer_loop_count10
external_libraries{ 1 = /beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/build/llama.cpp/../gcc/bin/libggml-base.so ; 2 = /beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/build/llama.cpp/../gcc/bin/libggml-blas.so ; 3 = /beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/build/llama.cpp/../gcc/bin/libggml-cpu.so ; 4 = /beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/build/llama.cpp/../gcc/bin/libggml.so ; 5 = /beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/build/llama.cpp/../gcc/bin/libllama.so ; },
mpi_commandmpirun -n <number_processes>
run_directory/beegfs/hackathon/users/eoseret/qaas_runs_test/175-924-9259/intel/llama.cpp/run/oneview_runs/defaults/gcc/oneview_run_1759250338
included_areas{ },
_is_custom_categoriesfalse
custom_categories{ { type = library ; value = libggml-base.so ; },
{ type = library ; value = libggml-blas.so ; },
{ type = library ; value = libggml-cpu.so ; },
{ type = library ; value = libggml.so ; },
{ type = library ; value = libllama.so ; },
},
dataset
filter{ type = number ; value = 1 ; },
additional_hwc{ },
decan_all_variantstrue
decan_params
basebinaryexec
run_command<executable> -m meta-llama-3.1-8b-instruct-Q8_0.gguf -no-cnv -t 192 -n 512 -p "what is a LLM?" --seed 0
pinning_command
frequencies{ 1 = 0 ; },
×