bachelor-thesis/chapters/thesis/appendix02_timing.sh

66 lines
1.4 KiB
Bash
Raw Normal View History

2024-04-18 00:51:19 +02:00
#!/bin/bash
#
# Activate Bash Strict Mode
set -euo pipefail
main() {
{
2024-04-18 20:20:16 +02:00
echo -e "URL\tRuns\tStDev\tMin (ms)\tAvg (ms)\tMax (ms)"
2024-04-18 00:51:19 +02:00
for url in ${@:2}; do
get_statistics $url $1
done
} #| column -s $'\t' -t
}
get_statistics() {
# Initialice the variables
local min=1000000000
local max=0
local dur=0
2024-04-18 20:20:16 +02:00
local durQ=0
2024-04-18 00:51:19 +02:00
# repeat for the defined counts the url calling
for i in $(seq 1 $2); do
local gp=$(($(get_posts $1)/1000000)) # from ns to ms
if [[ $gp -gt $max ]]
then max=$gp
fi
if [[ $gp -lt $min ]]
then min=$gp
fi
dur=$(( $dur + $gp ))
2024-04-18 20:20:16 +02:00
durQ=$(( $durQ + $(($gp * $gp)) ))
2024-04-18 00:51:19 +02:00
done
2024-04-18 20:20:16 +02:00
local avg=$(($dur/$2))
local avgPow=$(($avg * $avg))
local stdev=$( echo "sqrt(($durQ / $2) - $avgPow)" | bc )
2024-04-18 00:51:19 +02:00
# output the statistic values
2024-04-18 20:20:16 +02:00
echo -e "$1\t$2\t$stdev\t$min\t$avg\t$max"
2024-04-18 00:51:19 +02:00
}
get_posts() {
# Call the url with measure time
local start=$(date +%s%N)
curl --silent --show-error $1 > /dev/null
local stop=$(date +%s%N)
local dur=$(($stop-$start))
echo $dur
}
# the main domain
hostname="https://briefedition.wedekind.h-da.de"
# the Array of the Urls
url_arr=(
"$hostname/index.xhtml"
"$hostname/view/document/list.xhtml"
"$hostname/view/correspondent/list.xhtml"
"$hostname/view/person/list.xhtml"
)
# Execute all the URLs for 10 rounds
main 10 ${url_arr[@]}