options

ONE-View Configuration File

bucket_threshold1
is_all_external_libraries_in_ccfalse
ranges_count20
_is_custom_categoriesfalse
decan_multi_varianttrue
dataset_handlerlink
cqa_params{ },
localbinary/home/eoseret/Applications/llama.cpp/DATA/OV3_no_rpath_r2/binaries/llama-cli
profile_stop{ unit = none ; value = 0 ; },
lprof_params
run_command<executable> -m /home/eoseret/Applications/llama.cpp/DATA/meta-llama-3.1-8b-instruct-Q8_0.gguf -no-cnv -t 52 -n 512 -p "what is a LLM?" --seed 0
vprof_params
maximal_path_number4
base_run_index0
_scalability_bins{ },
multiruns_params{ },
basebinaryllama-cli
throughput_max_core0
outliers_count0
thread_filter_threshold1%
delay0
repetitions31
job_submission_threshold0s
object_coverage_threshold0.01
environment_variables{ },
scalability_referencemain
is_sudo_availablefalse
number_nodes1
keep_executable_locationfalse
throughput_core-1
base_run_namerun_0
additional_hwc{ },
optimizer_loop_count10
lprof_post_process_params{ },
dataset
binary../build_icx_no_rpath/bin/llama-cli
number_processes1
custom_categories{ { type = library ; value = libggml-cpu.so ; },
},
external_libraries{ 1 = libggml-cpu.so ; },
mpi_command
run_directory.
included_areas{ },
profile_start{ unit = none ; value = 0 ; },
filter_decan{ type = all ; },
__filtertrue
filter{ type = number ; value = 1 ; },
decan_threshold500
decan_all_variantstrue
decan_params
scripts{ files = { },
variables = { },
},
excluded_areas{ },
pinning_command
frequencies{ 1 = 0 ; },
×