Debug,comment and add g5k energy script

This commit is contained in:
Loic Guegan 2019-04-23 16:24:43 +02:00
parent 5720cf71f4
commit 39507e0bb5
2 changed files with 89 additions and 13 deletions

View file

@ -2,8 +2,8 @@
##### Parameters #####
delay=60 # Delay before starting simulation (let CPU energy going down on the server)
nbSensors=30
nbSensorsFactor=3
nbSensors=30 # Number of sensors that will send request to de server
nbSensorsFactor=3 # nbSensors*nbSensorFactor
requestPerSensor=10 # Theorical simulation time is $requestPerSensor/$sensorsRequestPerSec
sensorsRequestPerSec=10 # Number of request per seconds
######################
@ -14,12 +14,21 @@ log () {
echo -e "\033[0;34m$@\033[0m"
}
sshWait () {
log "Waiting for for an ssh connection to a vm ($1)"
error=255
until [ $error -eq 0 ]
do
ssh -q root@$1 echo "Connected to $(hostname)"
error=$?
sleep 4
done
}
if [ "$1" = "subscribe" ] && [ $# -eq 1 ]
then
# Subscribe
log "Subscribing..."
oarsub -l slash_22=1+{"virtual!='NO'"}/nodes=2 'sleep "10d"'
oarsub -l slash_22=1+{"virtual!='NO' AND cluster='nova'"}/nodes=2 'sleep "10d"' # On node send request to the other
elif [ "$1" = "deploy" ] && [ $# -eq 1 ]
then
# Get machine mac address
@ -44,21 +53,27 @@ then
oarsh $node qemu-img create -f qcow2 -o backing_file=/tmp/debian9-x64-base.qcow2 /tmp/img.qcow2
sleep 1 # Wait for fun
# Build cloud init iso (to have ssh access witouth password
log "Create $node cloud-init image"
oarsh $node cp /grid5000/virt-images/cloud-init-example.sh /tmp/
oarsh $node "cd /tmp && export cloud_init_key=\$(cat ~/.ssh/id_rsa.pub) && ./cloud-init-example.sh"
#oarsh $node "cd /tmp && sed -i \"s/public-keys/ssh_authorized_keys/g\" cloud-init-data/meta-data"
oarsh $node "cd /tmp && genisoimage -output cloud-init-data.iso -volid cidata -joliet -rock cloud-init-data/user-data cloud-init-data/meta-data"
# Refresh loop state
[ $isServer -eq 1 ] && { curMac=$serverMac; isServer=0; serverNode=$node; } || { curMac=$clientMac; finished=1; clientNode=$node; }
# Launch vm
log "Launch vm $node"
oarsh $node kvm -m 2048 -hda /tmp/img.qcow2 -netdev bridge,id=br0 -device virtio-net-pci,netdev=br0,id=nic1,mac=$curMac -cdrom /tmp/cloud-init-data.iso -display none -daemonize &
# Stop looping if we finish
[ $finished -eq 1 ] && break
done
log "Installing mysql on $serverIp"
onNode="ssh -q root@$serverIp"
onNode="ssh -q root@$serverIp" # For convenience
##### Server #####
sshWait $serverIp
# One apt-get update seems to be not enought to get mysql-server
$onNode "apt-get update && apt-get update"
$onNode apt-get -y install mysql-server
@ -69,7 +84,8 @@ then
# Then execute it
$onNode "mysql < /tmp/setup-mysql.sql"
# Switch to client
##### Client #####
$sshWait $clientIp
onNode="ssh -q root@$clientIp"
$onNode "apt-get update && apt-get update"
$onNode apt-get -y install mysql-server
@ -78,16 +94,29 @@ then
##### Start Simulation #####
log "Simulation will start in ${delay}s"
sleep $delay
echo "Simulation start at " $(date "+%s") >> $logFile
simStart=$(date "+%s")
echo "---------- Simulation start at $simStart" >> $logFile
$onNode python /tmp/clients.py $serverIp $nbSensors $nbSensorsFactor $requestPerSensor $sensorsRequestPerSec
echo "Simulation end at " $(date "+%s") >> $logFile
simEnd=$(date "+%s")
echo "Simulation parameters: serverNode:$serverNode serverIp:$serverIp serverMac:$serverMac clientNode:$clientNode clientNode:$clientNode clientMac:$clientMac delay:$delay nbSensors:$nbSensors nbSensorsFactor:$nbSensorsFactor requestPerSensors:$requestPerSensor sensorsRequestPerSec:$sensorsRequestPerSec" >> $logFile
echo "./recordEnergy.sh nova $serverNode $simStart $simEnd energy_${simStart}_${simEnd}.csv" >> $logFile
echo "---------- Simulation end at $simEnd" >> $logFile
log "Simulation end ! Please see $logFile for more infos"
##### End Simulation #####
##### Print some infos #####
log "Network Settings:"
log " - Server $serverIp $serverMac"
log " - Client $clientIp $clientMac"
log " - Server $serverNode, $serverIp, $serverMac"
log " - Client $clientNode, $clientIp, $clientMac"
log "Simulation Settings:"
log " - Simulation delay ${delay}s"
log " - Number of sensors $(( nbSensors * nbSensorsFactor))"
log " - Number of request per sensors $requestPerSensor"
log " - Number of request per seconds on eachsensors $sensorsRequestPerSec"
elif [ "$1" = "kill" ] && [ $# -eq 1 ]
then
##### Kill all kvm on the subscribed nodes #####
isServer=1
finished=0
for node in $(cat $OAR_NODE_FILE|uniq)
@ -99,13 +128,19 @@ then
done
elif [ "$1" = "inspect" ] && [ $# -eq 2 ]
then
##### Show content of the database #####
mysql --host="$2" -u user --password="mysql" experiment -e "SELECT * FROM temperature;"
elif [ "$1" = "flush" ] && [ $# -eq 2 ]
then
##### Flush content of the temperature table #####
log "Cleaning database table..."
mysql --host="$2" -u user --password="mysql" experiment -e "TRUNCATE TABLE temperature;"
else
echo "Usage:"
echo " - $0 subscribe"
echo " - $0 deploy"
echo " - $0 kill"
echo " - $0 inspect <serverIP>"
echo " - $0 flush <serverIP>"
fi

41
g5k/recordEnergy.sh Normal file
View file

@ -0,0 +1,41 @@
#!/bin/bash
# Parse argument
[ $# != 5 ] && { echo "Usage: $0 <cluster-name> <node-name> <from> <to> <output-file>"; exit 1; }
# Init arguments
clusterName="$1"
nodeName="$2"
outputFile="$5"
wattmeter=$(curl -s https://api.grid5000.fr/stable/sites/lyon/clusters/${clusterName}/nodes/${nodeName}.json | jq -r '.sensors.power.via.pdu[0].uid')
port=$(curl -s https://api.grid5000.fr/stable/sites/lyon/pdus/${wattmeter}.json | jq -r '.ports|to_entries|map(select(.value=="'${nodeName}'"))[0].key')
energyEntry=$(expr 5 + $port) # Define the entry in the CSV that correspond to the correct energy value
if [ -z $wattmeter ] || [ -z $port ]
then
echo -ne "\nCannot find energy informations (wattmeter/port) for node $nodeName\n"
exit 1
fi
echo "Node ${nodeName} is connected on wattmeter ${wattmeter} on port ${port}"
# Fetching energy and save in csv format
from=$(date -d "@$3" "+%s")
to=$(date -d "@$4" "+%s")
echo "ts,energy" > $outputFile # Write CSV header
for time in $(seq $from 3600 $to)
do
# We need gz extension if it is not the current hour
[ $(date -d "@$time" "+%Y-%m-%dT%H") != $(date "+%Y-%m-%dT%H") ] && ext='.gz' || ext=''
powerFilename=$(date -d "@$time" "+power.csv.%Y-%m-%dT%H${ext}")
url="http://wattmetre.lyon.grid5000.fr/data/${wattmeter}-log/${powerFilename}"
echo "- Fetching logs from ${url}"
# Fetch logs data
[ ! -z $ext ] && csvContent=$(curl -s "${url}" | zcat) || csvContent=$(curl -s "${url}")
# Parse data and extract the right values in csv format
toSave=$(echo "$csvContent" | awk -F, 'int($3)>='$from'&& int($3)<='$to'{printf "%s,%s\n",$3,$5+'$port'};')
echo "$toSave" >> $outputFile # Save data in csv
done
echo "Done"