2019-04-24 16:49:40 +02:00
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
* Run simulations
|
2019-04-26 11:13:36 +02:00
|
|
|
To run all the simulations, execute the following call:
|
|
|
|
#+NAME: runSim
|
|
|
|
#+CALL: runBW(lat=runLat(nbSens=runNbSensors(nbHop=runNbHop())))
|
|
|
|
|
|
|
|
#+RESULTS: runSim
|
|
|
|
|
2019-04-25 15:51:05 +02:00
|
|
|
** Experiments
|
2019-04-26 11:13:36 +02:00
|
|
|
*** Bandwidth
|
|
|
|
#+NAME: runBW
|
|
|
|
#+BEGIN_SRC bash :noweb yes :results output
|
|
|
|
<<singleRun>>
|
|
|
|
simKey="BW"
|
|
|
|
sensorsNumber=10
|
|
|
|
for linksBandwidth in $(seq 10 20 100)
|
|
|
|
do
|
|
|
|
run
|
|
|
|
done
|
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
#+RESULTS: runBW
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
|
|
|
*** Latency
|
|
|
|
#+NAME: runLat
|
|
|
|
#+BEGIN_SRC bash :noweb yes :results output
|
|
|
|
<<singleRun>>
|
|
|
|
simKey="LATENCY"
|
|
|
|
sensorsNumber=10
|
|
|
|
for linksLatency in $(seq 1 1 10)
|
|
|
|
do
|
|
|
|
run
|
|
|
|
done
|
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
#+RESULTS: runLat
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
2019-04-25 15:51:05 +02:00
|
|
|
*** Number of sensors
|
2019-04-26 11:13:36 +02:00
|
|
|
#+NAME: runNbSensors
|
2019-04-25 15:51:05 +02:00
|
|
|
#+BEGIN_SRC bash :noweb yes :results output
|
|
|
|
<<singleRun>>
|
2019-04-26 11:13:36 +02:00
|
|
|
simKey="NBSENSORS"
|
|
|
|
for sensorsNumber in $(seq 1 5)
|
2019-04-25 15:51:05 +02:00
|
|
|
do
|
|
|
|
run
|
2019-04-26 11:13:36 +02:00
|
|
|
done
|
2019-04-25 15:51:05 +02:00
|
|
|
#+END_SRC
|
2019-04-24 16:49:40 +02:00
|
|
|
|
2019-04-25 15:51:05 +02:00
|
|
|
#+RESULTS:
|
2019-04-24 16:49:40 +02:00
|
|
|
|
2019-04-26 11:13:36 +02:00
|
|
|
*** Number of Hop
|
|
|
|
#+NAME: runNbHop
|
|
|
|
#+BEGIN_SRC bash :noweb yes :results output
|
|
|
|
<<singleRun>>
|
|
|
|
simKey="NBHOP"
|
|
|
|
for nbHop in $(seq 1 10)
|
|
|
|
do
|
|
|
|
run
|
|
|
|
done
|
|
|
|
#+END_SRC
|
|
|
|
|
2019-04-24 16:49:40 +02:00
|
|
|
** Single Run
|
2019-04-26 11:13:36 +02:00
|
|
|
|
2019-04-24 16:49:40 +02:00
|
|
|
#+NAME: singleRun
|
2019-04-26 11:13:36 +02:00
|
|
|
#+BEGIN_SRC bash :eval never :noweb yes :results output
|
|
|
|
simulator="simulator/simulator"
|
2019-04-24 16:49:40 +02:00
|
|
|
parseEnergyScript="./parseEnergy.awk"
|
2019-04-25 15:51:05 +02:00
|
|
|
parseDelayScript="./parseDelay.awk"
|
2019-04-24 16:49:40 +02:00
|
|
|
logFolder="logs/"
|
|
|
|
export LD_LIBRARY_PATH=${LD_LIBRARY_PATH}:${NS3_PATH}/build/lib
|
2019-04-26 11:13:36 +02:00
|
|
|
|
|
|
|
# Default Parameters
|
|
|
|
sensorsSendInterval=10
|
|
|
|
sensorsPktSize=5 # 1 byte temperature (-128 à +128 °C) and 4Byte sensorsId
|
|
|
|
sensorsNumber=10
|
|
|
|
nbHop=10 # Cf paper AC/Yunbo
|
|
|
|
linksBandwidth=10
|
|
|
|
linksLatency=2
|
|
|
|
simKey="NOKEY"
|
|
|
|
|
2019-04-24 16:49:40 +02:00
|
|
|
run () {
|
2019-04-26 11:13:36 +02:00
|
|
|
logFile="${logFolder}/${simKey}_${sensorsSendInterval}SSI_${sensorsPktSize}SPS_${sensorsNumber}SN_${nbHop}NH_${linksBandwidth}LB_${linksLatency}LL.org"
|
|
|
|
[ -f "$logFile" ] && return
|
2019-04-24 16:49:40 +02:00
|
|
|
simCMD="$simulator --sensorsSendInterval=${sensorsSendInterval} --sensorsPktSize=${sensorsPktSize} --sensorsNumber=${sensorsNumber} --nbHop=${nbHop} --linksBandwidth=${linksBandwidth} --linksLatency=${linksLatency} 2>&1"
|
|
|
|
log=$(bash -c "$simCMD")
|
|
|
|
|
2019-04-25 09:00:11 +02:00
|
|
|
# Compute some metrics
|
|
|
|
energyLog=$(echo "$log" | $parseEnergyScript)
|
2019-04-25 15:51:05 +02:00
|
|
|
avgDelay=$(echo "$log" | $parseDelayScript)
|
2019-04-25 09:00:11 +02:00
|
|
|
totalEnergy=$(echo "$energyLog"| awk 'BEGIN{power=0;FS=","}NR!=1{power+=$2}END{print(power)}')
|
|
|
|
nbPacketCloud=$(echo "$log"|grep -c "CloudSwitch receive")
|
2019-04-25 15:51:05 +02:00
|
|
|
nbNodes=$(echo "$log"|awk '/Simulation used/{print($3)}')
|
|
|
|
ns3Version=$(echo "$log"|awk '/NS-3 Version/{print($3)}')
|
2019-04-24 16:49:40 +02:00
|
|
|
|
2019-04-25 09:00:11 +02:00
|
|
|
# Save logs
|
2019-04-25 15:51:05 +02:00
|
|
|
echo -e "#+TITLE: $(date) ns-3 (version ${ns3Version}) simulation\n" > $logFile
|
|
|
|
echo "* Environment Variables" >> $logFile
|
|
|
|
env >> $logFile
|
2019-04-24 16:49:40 +02:00
|
|
|
echo "* Full Command" >> $logFile
|
|
|
|
echo "$simCMD" >> $logFile
|
|
|
|
echo "* Output" >> $logFile
|
|
|
|
echo "$log" >> $logFile
|
2019-04-25 09:00:11 +02:00
|
|
|
echo "* Energy CSV (negative nodeId = WIFI, 0 = AP (Wireless+Wired), positive nodeId = ECOFEN" >> $logFile
|
|
|
|
echo "$energyLog" >> $logFile
|
2019-04-24 16:49:40 +02:00
|
|
|
echo "* Metrics" >> $logFile
|
2019-04-26 11:13:36 +02:00
|
|
|
echo "-METRICSLINE- sensorsSendInterval:${sensorsSendInterval} sensorsPktSize:${sensorsPktSize} sensorsNumber:${sensorsNumber} nbHop:${nbHop} linksBandwidth:${linksBandwidth} linksLatency:${linksLatency} totalEnergy:$totalEnergy nbPacketCloud:$nbPacketCloud nbNodes:$nbNodes avgDelay:${avgDelay} ns3Version:${ns3Version} simKey:${simKey}" >> $logFile
|
2019-04-24 16:49:40 +02:00
|
|
|
|
|
|
|
}
|
2019-04-26 11:13:36 +02:00
|
|
|
|
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
* Logs Analysis
|
|
|
|
To Generate all the plots, please execute the following line:
|
|
|
|
#+NAME: runAnalysis
|
|
|
|
#+CALL: plotToPDF(plots=genAllPlots(data=logToCSV()))
|
|
|
|
|
|
|
|
#+RESULTS: runAnalysis
|
|
|
|
|
|
|
|
** R Scripts
|
|
|
|
*** Generate all plots script
|
|
|
|
Available variables:
|
|
|
|
|---------------------|
|
|
|
|
| Name |
|
|
|
|
|---------------------|
|
|
|
|
| sensorsSendInterval |
|
|
|
|
| sensorsPktSize |
|
|
|
|
| sensorsNumber |
|
|
|
|
| nbHop |
|
|
|
|
| linksBandwidth |
|
|
|
|
| linksLatency |
|
|
|
|
| totalEnergy |
|
|
|
|
| nbPacketCloud |
|
|
|
|
| nbNodes |
|
|
|
|
| avgDelay |
|
|
|
|
| simKey |
|
|
|
|
|---------------------|
|
|
|
|
|
|
|
|
#+NAME: genAllPlots
|
|
|
|
#+BEGIN_SRC R :noweb yes :results output
|
|
|
|
<<RUtils>>
|
|
|
|
easyPlot("linksLatency","totalEnergy", "LATENCY")
|
|
|
|
easyPlot("linksBandwidth","totalEnergy", "BW")
|
|
|
|
easyPlot("sensorsNumber","totalEnergy", "NBSENSORS")
|
|
|
|
easyPlot("nbHop","totalEnergy", "NBHOP")
|
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
*** R Utils
|
|
|
|
RUtils is intended to load logs (data.csv) and providing
|
|
|
|
simple plot function for them.
|
|
|
|
|
|
|
|
#+NAME: RUtils
|
|
|
|
#+BEGIN_SRC R :eval never
|
|
|
|
library("tidyverse")
|
|
|
|
|
|
|
|
# Fell free to update the following
|
2019-04-26 11:14:44 +02:00
|
|
|
labels=c(nbNodes="Number of nodes",sensorsNumber="Number of sensors",totalEnergy="Total Energy (J)",
|
2019-04-26 11:13:36 +02:00
|
|
|
nbHop="Number of hop (AP to Cloud)", linksBandwidth="Links Bandwidth (Mbps)",
|
|
|
|
linksLatency="Links Latency (ms)")
|
|
|
|
|
|
|
|
# Load Data
|
|
|
|
data=read_csv("logs/data.csv")
|
|
|
|
|
|
|
|
# Get label according to varName
|
|
|
|
getLabel=function(varName){
|
|
|
|
if(is.na(labels[varName])){
|
|
|
|
return(varName)
|
|
|
|
}
|
|
|
|
return(labels[varName])
|
|
|
|
}
|
|
|
|
|
|
|
|
easyPlot=function(X,Y,KEY){
|
|
|
|
curData=data%>%filter(simKey==KEY)
|
|
|
|
stopifnot(NROW(curData)>0)
|
|
|
|
ggplot(curData,aes_string(x=X,y=Y))+geom_point()+geom_line()+xlab(getLabel(X))+ylab(getLabel(Y))
|
|
|
|
ggsave(paste0("plots/",KEY,"-",X,"_",Y,".png"))
|
|
|
|
}
|
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
** Plots -> PDF
|
|
|
|
Merge all plots in plots/ folder into a pdf file.
|
|
|
|
#+NAME: plotToPDF
|
|
|
|
#+BEGIN_SRC bash :results output :noweb yes
|
|
|
|
orgFile="plots/plots.org"
|
|
|
|
<<singleRun>> # To get all default arguments
|
|
|
|
|
|
|
|
# Write helper function
|
|
|
|
function write {
|
|
|
|
echo "$1" >> $orgFile
|
|
|
|
}
|
|
|
|
|
|
|
|
echo "#+TITLE: Analysis" > $orgFile
|
|
|
|
write "#+LATEX_HEADER: \usepackage{fullpage}"
|
|
|
|
write "#+OPTIONS: toc:nil"
|
|
|
|
# Default arguments
|
|
|
|
write '\begin{center}'
|
|
|
|
write '\begin{tabular}{lr}'
|
|
|
|
write 'Parameters & Values\\'
|
|
|
|
write '\hline'
|
|
|
|
write "sensorsPktSize & ${sensorsPktSize} bytes\\\\"
|
|
|
|
write "sensorsSendInterval & ${sensorsSendInterval}s\\\\"
|
|
|
|
write "sensorsNumber & ${sensorsNumber}\\\\"
|
|
|
|
write "nbHop & ${nbHop}\\\\"
|
|
|
|
write "linksBandwidth & ${linksBandwidth}Mbps\\\\"
|
|
|
|
write "linksLatency & ${linksLatency}ms\\\\"
|
|
|
|
write '\end{tabular}'
|
|
|
|
write '\newline'
|
|
|
|
write '\end{center}'
|
|
|
|
|
|
|
|
for plot in $(find plots/ -type f -name "*.png")
|
|
|
|
do
|
|
|
|
write "\includegraphics[width=0.5\linewidth]{$(basename ${plot})}"
|
|
|
|
done
|
|
|
|
|
|
|
|
# Export to pdf
|
|
|
|
emacs $orgFile --batch -f org-latex-export-to-pdf --kill
|
2019-04-24 16:49:40 +02:00
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
#+RESULTS:
|
|
|
|
|
2019-04-26 11:13:36 +02:00
|
|
|
** Log -> CSV
|
|
|
|
logToCSV extract usefull data from logs and put them into logs/data.csv.
|
|
|
|
|
2019-04-25 15:51:05 +02:00
|
|
|
#+NAME: logToCSV
|
|
|
|
#+BEGIN_SRC bash :results none
|
|
|
|
csvOutput="logs/data.csv"
|
|
|
|
|
|
|
|
# First save csv header line
|
|
|
|
aLog=$(find logs/ -type f -name "*.org"|head -n 1)
|
|
|
|
metrics=$(cat $aLog|grep "\-METRICSLINE\-"|sed "s/-METRICSLINE-//g")
|
|
|
|
echo $metrics | awk '{for(i=1;i<=NF;i++){split($i,elem,":");printf(elem[1]);if(i<NF)printf(",");else{print("")}}}' > $csvOutput
|
|
|
|
|
|
|
|
# Second save all values
|
|
|
|
for logFile in $(find logs/ -type f -name "*.org")
|
|
|
|
do
|
|
|
|
metrics=$(cat $logFile|grep "\-METRICSLINE\-"|sed "s/-METRICSLINE-//g")
|
|
|
|
echo $metrics | awk '{for(i=1;i<=NF;i++){split($i,elem,":");printf(elem[2]);if(i<NF)printf(",");else{print("")}}}' >> $csvOutput
|
|
|
|
done
|
|
|
|
#+END_SRC
|
|
|
|
|
|
|
|
|
|
|
|
|
2019-04-26 11:13:36 +02:00
|
|
|
|