Skip to content
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
7 changes: 7 additions & 0 deletions streams/results_schema.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,7 @@
import pydantic
class Streams_Results(pydantic.BaseModel):
Array_sizes: str
Copy: int = pydantic.Field(gt=0)
Scale: int = pydantic.Field(gt=0)
Add: int = pydantic.Field(gt=0)
Triad: int = pydantic.Field(gt=0)
8 changes: 5 additions & 3 deletions streams/streams_extra/run_stream
Original file line number Diff line number Diff line change
Expand Up @@ -37,6 +37,7 @@ top_dir=`pwd`
pcp=0
pcpdir=""

source ${TOOLS_BIN}/error_codes
#
# Define options
#
Expand Down Expand Up @@ -129,7 +130,7 @@ while [[ $# -gt 0 ]]; do
;;
--usage)
echo usage to come
exit
exit $E_USAGE
;;
--)
break;
Expand Down Expand Up @@ -194,7 +195,7 @@ build_images()
if [ $? -ne 0 ]; then
if [ -z "$streams_exec" ]; then
echo Compilation of streams failed.
exit 1
exit $E_GENERAL
else
echo Could not compile streams with $use_cache size, skipping
fi
Expand Down Expand Up @@ -224,7 +225,7 @@ build_images()
gcc ${MOPT} -fopenmp -mcmodel=large ${optim_opt} -DSTREAM_ARRAY_SIZE=${test_size} stream_omp_5_10.c -o ${stream} -fno-pic
if [ $? -ne 0 ]; then
echo Compilation of streams failed.
exit 1
exit $E_GENERAL
fi
done
fi
Expand Down Expand Up @@ -354,3 +355,4 @@ if [[ $pcp -eq 1 ]]; then
stop_pcp
shutdown_pcp
fi
exit $E_SUCCESS
107 changes: 62 additions & 45 deletions streams/streams_run
Original file line number Diff line number Diff line change
Expand Up @@ -18,6 +18,8 @@
#

arguments="$@"
csv_file=""
script_dir=$(realpath $(dirname $0))
array_size=""
streams_wrapper_version=1.0
curdir=`pwd`
Expand Down Expand Up @@ -55,25 +57,55 @@ test_name="streams"
pcp=""
pdir=""

TOOLS_BIN="${HOME}/test_tools"
export TOOLS_BIN
tools_git=https://github.com/redhat-performance/test_tools-wrappers

if [ ! -d "${TOOLS_BIN}" ]; then
git clone $tools_git ${TOOLS_BIN}
if [ $? -ne 0 ]; then
echo pulling git $tools_git failed.
exit 101
fi
else
echo Found an existing test_tools directory, using it.
fi
source ${TOOLS_BIN}/error_codes

#
# Report results
#

process_list()
{
echo $number_sockets Socket >> ../results_${test_name}.csv
echo "Array sizes"$array_size >> ../results_${test_name}.csv
echo Copy,$copy,$start_time,$end_time >> ../results_${test_name}.csv
echo Scale,$scale,$start_time,$end_time >> ../results_${test_name}.csv
echo Add,$add,$start_time,$end_time >> ../results_${test_name}.csv
echo Triad,$triad,$start_time,$end_time >> ../results_${test_name}.csv
echo "" >> ../results_${test_name}.csv
line_size=`echo $copy | wc -c`
if [ $line_size -lt 2 ]; then
echo Failed >> /tmp/test_results_report
else
echo Ran >> /tmp/test_results_report
echo "# $number_sockets Socket" >> $csv_file
echo "Array_sizes"$array_size >> $csv_file
echo Copy,$copy,$start_time,$end_time >> $csv_file
echo Scale,$scale,$start_time,$end_time >> $csv_file
echo Add,$add,$start_time,$end_time >> $csv_file
echo Triad,$triad,$start_time,$end_time >> $csv_file
echo "" >> $csv_file
#
# Due to we are transposing the csv file, we need to remove the
# timestamps from the end, else we will get Copy: <date> as a field
# to check.
#
tmpfile=$(mktemp /tmp/streams_reduce_.XXXXXX)
cut -d',' -f1-4 $csv_file > $tmpfile
${TOOLS_BIN}/csv_to_json $to_json_flags --transpose --csv_file $tmpfile --output_file results_streams.json
lrtc=$?
if [[ $lrtc -ne 0 ]]; then
echo "Verification of streams data failed"
exit $lrtc
fi
${TOOLS_BIN}/verify_results $to_verify_flags --schema_file $script_dir/results_schema.py --class_name Streams_Results --file results_streams.json
lrtc=$?
if [[ $lrtc -ne 0 ]]; then
echo Verification of streams data failed.
rtc=$ltrc
fi
rm $tmpfile
rm -f results_streams.json
}

process_results()
Expand Down Expand Up @@ -134,7 +166,6 @@ process_results()
else
if [ $current_socket -ne $number_sockets ]; then
process_list
field_index=1
fi
number_sockets=$current_socket
fi
Expand Down Expand Up @@ -248,7 +279,8 @@ retrieve_results()
retrieve_sys_config()
{
streams_vers=`grep "^STREAM version" stream* | cut -d':' -f 3 | cut -d' ' -f2 | sort -u`
$TOOLS_BIN/test_header_info --results_file ../results_${test_name}.csv --host $to_configuration --sys_type $to_sys_type \
csv_file=../results_${test_name}_opt_$1.csv
$TOOLS_BIN/test_header_info --results_file $csv_file --host $to_configuration --sys_type $to_sys_type \
--tuned $to_tuned_setting --results_version $streams_wrapper_version --test_name $test_name \
--meta_output "Optimization level: ${1}" \
--meta_output "kernel_rev "`grep "^kernel:" stream* | cut -d: -f 3 | sort -u`" \
Expand Down Expand Up @@ -298,7 +330,7 @@ run_stream()
./run_stream --cache_cap_size ${cache_cap_size} --iterations ${to_times_to_run} --cache_start_size $cache_start_factor --optimize_lvl ${1} --cache_multiply $cache_multiply --numb_sizes $nsizes --thread_multiply $threads_multiple --results_dir ${results_dir} --host ${to_configuration} --size_list ${size_list} --top_dir $curdir > /tmp/streams_results/${2}_opt_${1} $pcp
if [ $? -ne 0 ]; then
echo "Execution of run stream failed."
exit 1
exit $E_GENERAL
fi
mv ${results_dir}_* /tmp/streams_results
cd ..
Expand All @@ -314,7 +346,7 @@ streams_run()
fi
}

tools_git=https://github.com/redhat-performance/test_tools-wrappers

usage()
{
echo Usage ${1}:
Expand All @@ -332,7 +364,7 @@ usage()
echo "--size_list <x,y...>: List of array sizes in byte"
echo "--threads_multiple <value>: Multiply number threads by <value>. Default is 2"
echo "--tools_git <value>: git repo to retrieve the required tools from, default is ${tools_git}"
source test_tools/general_setup --usage
source ${TOOLS_BIN}/general_setup --usage
}


Expand All @@ -358,17 +390,6 @@ for arg in "$@"; do
show_usage=1
fi
done

if [ ! -d "test_tools" ]; then
git clone $tools_git test_tools
if [ $? -ne 0 ]; then
echo pulling git $tools_git failed.
exit 1
fi
else
echo Found an existing test_tools directory, using it.
fi

if [ $show_usage -eq 1 ]; then
usage $0
fi
Expand All @@ -387,13 +408,12 @@ fi
# to_tuned_setting: tuned setting
#

${curdir}/test_tools/gather_data ${curdir}
${TOOLS_BIN}/gather_data ${curdir}
#
# We want a default of 5 iterations
#
source test_tools/general_setup "$@" --iteration_default 5
source ${TOOLS_BIN}/general_setup "$@" --iteration_default 5

export TOOLS_BIN
if [ ! -f "/tmp/${test_name}.out" ]; then
${TOOLS_BIN}/invoke_test --test_name ${test_name} --command ${0} --options "${arguments}"
exit $?
Expand All @@ -402,9 +422,10 @@ fi
# Install required packaging.
#
${TOOLS_BIN}/package_tool --wrapper_config ${run_dir}/streams.json --no_packages $to_no_pkg_install
if [[ $? -ne 0 ]]; then
echo Packaging installed failed.
exit 1
lrtc=$?
if [[ $? -ne $lrtc ]]; then
echo Packaging installed failed.
exit $lrtc
fi

#
Expand Down Expand Up @@ -450,15 +471,15 @@ while [[ $# -gt 0 ]]; do
cache_multiply=$2
if [ $cache_multiply -lt 2 ]; then
echo Error: cache multiply by must be greater then 1.
exit 1
exit $E_GENERAL
fi
shift 2
;;
--cache_start_factor)
cache_start_factor=$2
if [ $cache_start_factor -lt 1 ]; then
echo Error: cache start factor must be greater then 0.
exit 1
exit $E_GENERAL
fi
shift 2
;;
Expand Down Expand Up @@ -486,7 +507,7 @@ while [[ $# -gt 0 ]]; do
threads_multiple=${2}
if [ $threads_multiple -lt 2 ]; then
echo Error: threads_multiple must be greater then 1.
exit 1
exit $E_GENERAL
fi
shift 2
;;
Expand Down Expand Up @@ -541,12 +562,11 @@ else
mv /tmp/streams_results $results_dir
fi
mv ${run_dir}/streams_build_options $results_dir
${curdir}/test_tools/move_data $curdir $results_dir
${TOOLS_BIN}/move_data $curdir $results_dir
#
# report the results
#
pushd $results_dir/streams_results > /dev/null
rm results_${test_name}.csv 2> /dev/null
for rdir in `ls -d results_streams*`; do
if [ -d $rdir ]; then
copy_dirs=${copy_dirs}${copy_separ}$rdir
Expand All @@ -555,15 +575,12 @@ for rdir in `ls -d results_streams*`; do
retrieve_results
uchars=`echo $rdir | awk -v RS='_' 'END{print NR}'`
opt_level=`echo $rdir | cut -d'_' -f ${uchars} | cut -d'-' -f2`
if [ $front_meta_data_recorded -eq 0 ]; then
$TOOLS_BIN/test_header_info --front_matter --results_file ../results_${test_name}.csv --host $to_configuration --sys_type $to_sys_type --tuned $to_tuned_setting --results_version $streams_wrapper_version --test_name $test_name
front_meta_data_recorded=1
fi
retrieve_sys_config $opt_level
process_results results_${test_name}.wrkr
rm results_${test_name}.wrkr 2> /dev/null
popd > /dev/null
fi
done
${curdir}/test_tools/save_results --curdir $curdir --home_root $to_home_root --results ${curdir}/${results_dir}/streams_results/results_streams.csv --test_name $test_name --tuned_setting=$to_tuned_setting --version NONE --user $to_user --other_files "/tmp/test_results_report" --copy_dir "${curdir}/${results_dir}/streams_results $pdir"
exit 0

${TOOLS_BIN}/save_results --curdir $curdir --home_root $to_home_root --test_name $test_name --tuned_setting=$to_tuned_setting --version NONE --user $to_user --other_files "/tmp/test_results_report" --copy_dir "${curdir}/${results_dir}/streams_results $pdir"
exit $rtc