From 4f1b2ea492d3e19c81ab98f050618d437b6e9ec5 Mon Sep 17 00:00:00 2001 From: Loïc Guégan Date: Fri, 19 Sep 2025 13:19:02 +0200 Subject: Clean repo and debug setup.sh --- README.md | 7 +- analysis/days.R | 254 --------------------------------------------- analysis/in-situ.R | 257 ++++++++++++++++++++++++++++++++++++++++++++++ analysis/learning.R | 214 -------------------------------------- analysis/offline.R | 214 ++++++++++++++++++++++++++++++++++++++ simulations/.gitignore | 1 + simulations/Makefile | 4 +- simulations/libs/setup.sh | 6 +- 8 files changed, 483 insertions(+), 474 deletions(-) delete mode 100644 analysis/days.R create mode 100644 analysis/in-situ.R delete mode 100644 analysis/learning.R create mode 100644 analysis/offline.R diff --git a/README.md b/README.md index 52a99f5..7e02742 100644 --- a/README.md +++ b/README.md @@ -1,10 +1,13 @@ # loosely-policies-analytics ## Analysis folder -- learning.R: contains two major functions: +- offline.R: contains two major functions: - build_models: To generate K-fold cross-validation results (note that hyper-parameters for decisions tree is fixed (no validation set)) - generate_inputs: generate the inputs for the simulations experiments + the decision tree plots -- days.R: Implement the in-situ learning approach +- in-situ.R: Implement the in-situ learning approach (Figure 4a 4b and 4c) + - For figure 4a and 4b we train the model with increasing amount of data from previous results as if we were using one policy per day (see section IV.A) + - For figure 4c, delta is generated by comparing using each policies in round-robin (one per days to perform the training) + to each previous paper results with single policy only (see paper section IV.A) Todo: remove minbucket=1 (does not impact the results) diff --git a/analysis/days.R b/analysis/days.R deleted file mode 100644 index 93d478e..0000000 --- a/analysis/days.R +++ /dev/null @@ -1,254 +0,0 @@ -########## INFORMATIONS ########## -# This file is made to study online classification -# So, each pair (wireless,wakeupfor) has its classification models (knn and decision tree) -# Note that is the following error appears: object 'accuracy' not found -# it means you should toggle the boolean in the if condition in the code to generate the accuracy onbject -################################## - -library("tidyverse") -options(dplyr.summarise.inform = FALSE) -library("class") -library("rpart") -library("rpart.plot") -library("viridis") -library("MLmetrics") -library("latex2exp") - -## Simulation Parameters: -## simkey {baseline,extended,hint,hintandextended} -## wireless {lora,nbiot} -## wakeupfor {60s,180s} -## seed [1,200] -## node on[0,12] -## isSender {0,1} -## dataSize {1MB} - -## Metrics: -## energy [0,+inf) -## nDataRcv [0,+inf) - -nseed=200 -nwakeupfor=2 -nwireless=2 -nsimkey=4 -nsimulations=nseed*nwakeupfor*nwireless*nsimkey # Must be 3200 - -## Load data and prepare the data -data=suppressMessages(read_csv("../CCGRID2022.csv"))%>%distinct() # Note that in the data experiment wireless=="lora",seed==1,wakeupfor==60,simkey=="baseline" is present 2 times in the CSV file -data_seed=data%>%group_by(simkey,wireless,wakeupfor,seed)%>%summarize(energy=sum(energy),coverage=sum(nDataRcv))%>% - mutate(efficiency=energy/coverage)%>% - ungroup() - -## F1_Score of multiclass vectors -F1_Score2=function(truth, pred){ - result=sapply(c("baseline","extended","hint","hintandextended"),function(c){ - cur_truth=truth[truth==c] - cur_pred=pred[truth==c] - col=paste0("f1_",c) - score=F1_Score(cur_truth,cur_pred) - if(is.nan(score)){score=0} - list(tibble(!!col:=score)) - }) - do.call("cbind",result) -} - -## Down scale data -reduce_days=function(data,every=6){data%>%filter(((days/4) %% every) == 0)} - -## GGPlot Theme -th = function(option="D") {list(theme_bw(), - theme(legend.box.background = element_rect(fill = "white", color = "black",size=0.9), - strip.background =element_rect(fill="#F5F5F5")), - scale_color_viridis(discrete=TRUE,option=option,end=0.95), - scale_fill_viridis(discrete=TRUE,option=option,end=0.95))} - -## Train models -generate_accuracy_for=function(ignore_hint=FALSE,seed_max=200,attempts_max=2,wrl="lora",wuf=180) { - attempts=seq(1,attempts_max) - results=sapply(attempts,function(attempt){ - ## Prepare data for traning - set.seed(1+attempt) # Reproducibility - wireless_map=c("lora"=1,"nbiot"=2) - cur_data_seed=data_seed%>%filter(wakeupfor==wuf,wireless==wrl) - data_ml=cur_data_seed%>%select(-efficiency)%>%mutate(wireless=wireless_map[cur_data_seed$wireless]) - if(ignore_hint){ - data_ml=data_ml%>%filter(simkey!="hint") - } - train_set=data_ml%>%filter(seed<=seed_max)%>%select(-seed) # train data on seed_max*3 days - test_set=data_ml%>%select(-seed) # build test_sed - #print(paste0("Test set ",NROW(test_set))) - #print(paste0("Train set ",NROW(train_set))) - ## KNN training - knn_predictions=knn(train=train_set%>%select(-simkey),test=test_set%>%select(-simkey),cl=train_set$simkey,k=min(10,NROW(train_set))) - ## KNN analysis - knn_cont_table=table(knn_predictions,test_set$simkey) - knn_accuracy=(sum(diag(knn_cont_table)/sum(rowSums(knn_cont_table)))) - #print(knn_accuracy) - knn_prop_table=round(prop.table(knn_cont_table),digits=2) - knn_f1_score=F1_Score2(test_set$simkey,knn_predictions) - - ## Decision tree - tree=rpart( - simkey ~ wireless + wakeupfor + energy + coverage, - data=train_set, - method="class", - minsplit=60, - minbucket=1) - tree_predictions=predict(tree,newdata=test_set%>%select(-simkey),type="class") - tree_cont_table=table(tree_predictions,test_set$simkey) - tree_accuracy=(sum(diag(tree_cont_table)/sum(rowSums(tree_cont_table)))) - tree_prop_table=round(prop.table(tree_cont_table),digits=2) - tree_f1_score=F1_Score2(test_set$simkey,tree_predictions) - - ## Format data - result_data=tibble(seed_max=seed_max,model=c("knn","tree"),accuracy=c(knn_accuracy,tree_accuracy)) - result_data=cbind(result_data,rbind(knn_f1_score,tree_f1_score)) - list(result_data) - }) - ## Prints - results=do.call("rbind",results) - results%>%mutate(seed_max=seed_max,attempts_max=attempts_max,wireless=wrl,wakeupfor=wuf) - -} - -generate_accuracy_energy = function(wireless,wakeupfor,steps=1, accuracy=10,ignore_hint=FALSE){ - ## Setup variables - npolicies=4 - data_seed_for_energy=data_seed%>%ungroup() - if(ignore_hint){npolicies=npolicies-1;data_seed_for_energy=data_seed%>%filter(simkey!="hint")} - data_seed_for_energy=data_seed_for_energy%>%filter(wireless==!!wireless,wakeupfor==!!wakeupfor) - - ## Generate inputs - result=tibble() - result_energy=tibble() - for(i in seq(1,160,by=steps)){ # We stop at 80% of the data (this way test set is at least 20%) - print(paste("Step",i)) - acc=generate_accuracy_for(ignore_hint=ignore_hint,seed=i,attempts_max=accuracy,wrl=wireless,wuf=wakeupfor) - result_energy=rbind(result_energy,data_seed_for_energy%>%filter(seed<=i)%>%summarize(energy=sum(energy),seed_max=i,days=i*npolicies,setup=paste0(!!wireless," ",!!wakeupfor,"s"),wireless=!!wireless,wakeupfor=!!wakeupfor)) - result=rbind(result,acc) - } - list(accuracy=result%>%mutate(days=seed_max*npolicies), # Since 3 policies (since ignore_hint=TRUE) - energy=result_energy) -} - - -########## Generate accuracy, energy, F1-Score and coverage data ########## -if(F){ # Toggle to train - lora180=generate_accuracy_energy("lora",180) - lora60=generate_accuracy_energy("lora",60) - nbiot60=generate_accuracy_energy("nbiot",60) - nbiot180=generate_accuracy_energy("nbiot",180) - accuracy=rbind(lora60$accuracy,lora180$accuracy,nbiot60$accuracy,nbiot180$accuracy) - coverage=data_seed%>%group_by(wireless,wakeupfor,seed)%>%summarize(coverage=sum(coverage))%>%mutate(days=seed*4)%>%filter(days %in% !!lora60$energy$days)%>%select(-seed) - energy=rbind(lora60$energy, - lora180$energy, - nbiot60$energy, - nbiot180$energy)%>%left_join(coverage,by=c("wireless","wakeupfor","days")) -} - - -########## Build learning curve (Accuracy+F1_Scores) ########## -learning_curves=accuracy%>%group_by(wireless,wakeupfor,days,model)%>% - summarize( - ## Accuracy - mean_accuracy=mean(accuracy),sd_accuracy=sd(accuracy), - min_accuracy=min(accuracy),max_accuracy=max(accuracy), - ## F1-Score Baseline - mean_f1_baseline=mean(f1_baseline),sd_f1_baseline=sd(f1_baseline), - min_f1_baseline=min(f1_baseline),max_f1_baseline=max(f1_baseline), - ## F1-Score Hint - mean_f1_hint=mean(f1_hint),sd_f1_hint=sd(f1_hint),min_f1_hint=min(f1_hint), - max_f1_hint=max(f1_hint), - ## F1-Score Extended - mean_f1_extended=mean(f1_extended),sd_f1_extended=sd(f1_extended), - min_f1_extended=min(f1_extended),max_f1_extended=max(f1_extended), - ## F1-Score HintAndExtended - mean_f1_hintandextended=mean(f1_hintandextended),sd_f1_hintandextended=sd(f1_hintandextended), - min_f1_hintandextended=min(f1_hintandextended),max_f1_hintandextended=max(f1_hintandextended)) - - -ggplot(data=learning_curves%>%mutate(model=ifelse(model=="knn","KNN","DT")),aes(linetype=model))+ - geom_line(aes(days/30,mean_f1_baseline,color="Baseline"),size=1.2)+ - geom_line(aes(days/30,mean_f1_extended,color="Extended"),size=1.2)+ - geom_line(aes(days/30,mean_f1_hint,color="Hint"),size=1.2)+ - geom_line(aes(days/30,mean_f1_hintandextended,color="Hintandextended"),size=1.2)+labs(color="Classes colors",linetype="Model")+ - facet_wrap(~wireless+wakeupfor)+ - scale_x_continuous(breaks = seq(0, max(learning_curves$days/30)))+ - scale_y_continuous(breaks = seq(0, 1, by = 0.1))+ - th()+ - theme(panel.grid.minor = element_blank(), - legend.position = c(0.9,0.72), - legend.margin = margin(2,4,2,4), - legend.spacing=unit(-0.2,"cm"), - legend.box.margin=margin(1,1,1,1))+ - xlab("Training duration (months)")+ylab("Classes F1-Score") -ggsave("figures/months_f1-score.pdf",width=8.5,height=6) - - -## Plot Merge Accuracy -ggplot(data=learning_curves%>%mutate(model=ifelse(model=="knn","KNN","DT")), - aes(days/30,mean_accuracy))+ - geom_line(aes(linetype=model),size=1.2)+xlab("Training duration (months)")+ylab("Model overall accuracy (OAcc)")+labs(linetype="Models")+ - scale_x_continuous(breaks = seq(0, max(learning_curves$days/30)))+ - scale_y_continuous(breaks = seq(0, 1, by = 0.1))+ - facet_wrap(~wireless+wakeupfor)+expand_limits(x = 0, y = 0)+ - th()+ - theme(panel.grid.minor = element_blank(), - legend.position = c(0.38,0.68)) -ggsave("figures/months_accuracy.pdf",width=8.5,height=6) - - -########## Energy and delta with raw policies ########## -npolicies=4 -## First we extended the number of seed to cover the entire duration of the training -data_seed_energy=rbind(data_seed, - data_seed%>%mutate(seed=seed+200), - data_seed%>%mutate(seed=seed+400), - data_seed%>%mutate(seed=seed+600)) # Almost same as if each experiment run 4 times more seed seed -## Compute the cumulative energy of each policies in each configuration accross the $npolicies*200 days -data_seed_energy=data_seed_energy%>%group_by(wireless,wakeupfor,simkey)%>%mutate(energy=cumsum(energy),setup=paste0(wireless," ",wakeupfor,"s"),days=seed) -## Now sum coverage of each $npolicies days (like in the energy data frame) -data_seed_energy=data_seed_energy%>%group_by(wireless,wakeupfor,simkey)%>%mutate(grp=ceiling(days/npolicies))%>%group_by(wireless,wakeupfor,simkey,grp)%>%mutate(coverage=cumsum(coverage))%>%filter(days%%npolicies==0) -## Now filter the data -data_seed_energy=data_seed_energy%>%filter(days %in% !!energy$days) -## Compute the delta data -energy_coverage_delta=data_seed_energy%>% - full_join(energy,by=c("days","wireless","wakeupfor"),suffix=c("","_training")) -energy_coverage_delta=energy_coverage_delta%>%group_by(wireless,wakeupfor)%>%summarize(delta_energy=energy-energy_training,simkey=simkey,days=days,delta_coverage=(coverage-coverage_training)/npolicies,coverage=coverage/npolicies,coverage_training=coverage_training/npolicies) # delta_coverage divide by $npolicies because we want the average per day (coverage is measure every $npolicies days (round-robin of $npolicies policies)) - - -write("wireless,wakeupfor,policy,slope,intercept,delta_coverage,coverage,coverage_training,latex","figures/delta_energy_coverage.csv") -energy_coverage_delta%>%group_by(wireless,wakeupfor,simkey)%>%group_walk(function(data,grp){ - grp=as.list(grp) - reg=lm(delta_energy ~ days,data) - slope=round(as.numeric(reg$coefficients["days"]),digits=0) - intercept=round(as.numeric(reg$coefficients[1]),digits=0) - mean_delta_coverage=round(mean(data$delta_coverage),digits=1) - mean_coverage=round(mean(data$coverage),digits=1) - mean_coverage_training=round(mean(data$coverage_training),digits=1) - print(paste0("Wireless=",grp$wireless," Wakeupfor=",grp$wakeupfor," Policy=",grp$simkey," Slope=",slope," Intercept=",intercept," Delta Coverage=",mean_delta_coverage)) - write(paste(grp$wireless,grp$wakeupfor,grp$simkey,slope,intercept,mean_delta_coverage,mean_coverage,mean_coverage_training,paste0(r"("$\mathbf{s=)",slope,",c_p=",mean_coverage,",c_t=",mean_coverage_training,r"(}$")"),sep=","),"figures/delta_energy_coverage.csv",append=T) -}) - -ggplot(energy_coverage_delta,aes(days/30,delta_energy/1e3,color=simkey,shape=simkey))+ - geom_line(size=1.2)+ylab(TeX("Delta in $E_{total}$ (kJ)"))+xlab("Training duration (months)")+ - facet_wrap(~wireless+wakeupfor,scale="free")+ - th()+theme(legend.position=c(0.61,0.97))+labs(color="Classes colors") -ggsave("figures/delta_energy_training.pdf",height=6,width=10) - -ggplot(energy_coverage_delta,aes(days/30,delta_coverage,color=simkey))+ - geom_line(size=1.2)+ylab("Delta in network coverage")+xlab("Training duration (months)")+ - facet_wrap(~wireless+wakeupfor,scale="free")+ - th()+theme(legend.position="top")+labs(color="Classes colors") -ggsave("figures/delta_coverage_training.pdf",width=9) - -## ggplot(data=energy,aes(days/30,energy/1e6,group=setup,fill=setup))+ -## geom_bar(stat="identity",position="dodge")+ -## labs(fill="Wireless and Uptime")+ -## scale_x_continuous(breaks = seq(0, max(energy$days/30)))+ -## xlab("Training duration (months)")+ylab("Energy consumption (MJ)")+ -## th()+theme(legend.position=c(0.12,0.75)) -## ggsave("figures/days_energy.pdf",width=8.5,height=4) - - - diff --git a/analysis/in-situ.R b/analysis/in-situ.R new file mode 100644 index 0000000..2d57666 --- /dev/null +++ b/analysis/in-situ.R @@ -0,0 +1,257 @@ +########## INFORMATIONS ########## +# This file is made to study online classification +# So, each pair (wireless,wakeupfor) has its classification models (knn and decision tree) +# Note that is the following error appears: object 'accuracy' not found +# it means you should toggle the boolean in the if condition in the code to generate the accuracy onbject +################################## + +library("tidyverse") +options(dplyr.summarise.inform = FALSE) +library("class") +library("rpart") +library("rpart.plot") +library("viridis") +library("MLmetrics") +library("latex2exp") + +## Simulation Parameters: +## simkey {baseline,extended,hint,hintandextended} +## wireless {lora,nbiot} +## wakeupfor {60s,180s} +## seed [1,200] +## node on[0,12] +## isSender {0,1} +## dataSize {1MB} + +## Metrics: +## energy [0,+inf) +## nDataRcv [0,+inf) + +## WARNING: Goto line 138 first and set the boolean to T (populate the R environment with accuracy results) afterwhich you can set it to FALSE (save time) + +nseed=200 +nwakeupfor=2 +nwireless=2 +nsimkey=4 +nsimulations=nseed*nwakeupfor*nwireless*nsimkey # Must be 3200 + +## Load data and prepare the data +data=suppressMessages(read_csv("../CCGRID2022.csv"))%>%distinct() # Note that in the data experiment wireless=="lora",seed==1,wakeupfor==60,simkey=="baseline" is present 2 times in the CSV file +data_seed=data%>%group_by(simkey,wireless,wakeupfor,seed)%>%summarize(energy=sum(energy),coverage=sum(nDataRcv))%>% + mutate(efficiency=energy/coverage)%>% + ungroup() + +## F1_Score of multiclass vectors +F1_Score2=function(truth, pred){ + result=sapply(c("baseline","extended","hint","hintandextended"),function(c){ + cur_truth=truth[truth==c] + cur_pred=pred[truth==c] + col=paste0("f1_",c) + score=F1_Score(cur_truth,cur_pred) + if(is.nan(score)){score=0} + list(tibble(!!col:=score)) + }) + do.call("cbind",result) +} + +## Down scale data +reduce_days=function(data,every=6){data%>%filter(((days/4) %% every) == 0)} + +## GGPlot Theme +th = function(option="D") {list(theme_bw(), + theme(legend.box.background = element_rect(fill = "white", color = "black",size=0.9), + strip.background =element_rect(fill="#F5F5F5")), + scale_color_viridis(discrete=TRUE,option=option,end=0.95), + scale_fill_viridis(discrete=TRUE,option=option,end=0.95))} + +## Train models +generate_accuracy_for=function(ignore_hint=FALSE,seed_max=200,attempts_max=2,wrl="lora",wuf=180) { + attempts=seq(1,attempts_max) + results=sapply(attempts,function(attempt){ + ## Prepare data for traning + set.seed(1+attempt) # Reproducibility + wireless_map=c("lora"=1,"nbiot"=2) + cur_data_seed=data_seed%>%filter(wakeupfor==wuf,wireless==wrl) + data_ml=cur_data_seed%>%select(-efficiency)%>%mutate(wireless=wireless_map[cur_data_seed$wireless]) + if(ignore_hint){ + data_ml=data_ml%>%filter(simkey!="hint") + } + train_set=data_ml%>%filter(seed<=seed_max)%>%select(-seed) # train data on seed_max*3 days + test_set=data_ml%>%select(-seed) # build test_set + #print(paste0("Test set ",NROW(test_set))) + #print(paste0("Train set ",NROW(train_set))) + ## KNN training + knn_predictions=knn(train=train_set%>%select(-simkey),test=test_set%>%select(-simkey),cl=train_set$simkey,k=min(10,NROW(train_set))) + ## KNN analysis + knn_cont_table=table(knn_predictions,test_set$simkey) + knn_accuracy=(sum(diag(knn_cont_table)/sum(rowSums(knn_cont_table)))) + #print(knn_accuracy) + knn_prop_table=round(prop.table(knn_cont_table),digits=2) + knn_f1_score=F1_Score2(test_set$simkey,knn_predictions) + + ## Decision tree + tree=rpart( + simkey ~ wireless + wakeupfor + energy + coverage, + data=train_set, + method="class", + minsplit=60, + minbucket=1) + tree_predictions=predict(tree,newdata=test_set%>%select(-simkey),type="class") + tree_cont_table=table(tree_predictions,test_set$simkey) + tree_accuracy=(sum(diag(tree_cont_table)/sum(rowSums(tree_cont_table)))) + tree_prop_table=round(prop.table(tree_cont_table),digits=2) + tree_f1_score=F1_Score2(test_set$simkey,tree_predictions) + + ## Format data + result_data=tibble(seed_max=seed_max,model=c("knn","tree"),accuracy=c(knn_accuracy,tree_accuracy)) + result_data=cbind(result_data,rbind(knn_f1_score,tree_f1_score)) + list(result_data) + }) + ## Prints + results=do.call("rbind",results) + results%>%mutate(seed_max=seed_max,attempts_max=attempts_max,wireless=wrl,wakeupfor=wuf) + +} + +########## Train models from seed 0 to seed 160 (this code assumes that each day, a given policy is used) +generate_accuracy_energy = function(wireless,wakeupfor,steps=1, accuracy=10,ignore_hint=FALSE){ + ## Setup variables + npolicies=4 + data_seed_for_energy=data_seed%>%ungroup() + if(ignore_hint){npolicies=npolicies-1;data_seed_for_energy=data_seed%>%filter(simkey!="hint")} + data_seed_for_energy=data_seed_for_energy%>%filter(wireless==!!wireless,wakeupfor==!!wakeupfor) # Note !! do unquote, here we select the proper scenario + + ## Generate inputs + result=tibble() + result_energy=tibble() + for(i in seq(1,160,by=steps)){ # We stop at 80% of the data (this way test set is at least 20%) here 160 over 200 seeds + print(paste("Step",i)) + acc=generate_accuracy_for(ignore_hint=ignore_hint,seed=i,attempts_max=accuracy,wrl=wireless,wuf=wakeupfor) + result_energy=rbind(result_energy,data_seed_for_energy%>%filter(seed<=i)%>%summarize(energy=sum(energy),seed_max=i,days=i*npolicies,setup=paste0(!!wireless," ",!!wakeupfor,"s"),wireless=!!wireless,wakeupfor=!!wakeupfor)) # days=i*npolicies since 1 policy only per day is used (see paper section IV.A) + result=rbind(result,acc) + } + list(accuracy=result%>%mutate(days=seed_max*npolicies), # Since 1 policy per days see L130 + energy=result_energy) +} + + +########## Generate accuracy, energy, F1-Score and coverage data ########## +if(F){ # Toggle to train + lora180=generate_accuracy_energy("lora",180) + lora60=generate_accuracy_energy("lora",60) + nbiot60=generate_accuracy_energy("nbiot",60) + nbiot180=generate_accuracy_energy("nbiot",180) + accuracy=rbind(lora60$accuracy,lora180$accuracy,nbiot60$accuracy,nbiot180$accuracy) + coverage=data_seed%>%group_by(wireless,wakeupfor,seed)%>%summarize(coverage=sum(coverage))%>%mutate(days=seed*4)%>%filter(days %in% !!lora60$energy$days)%>%select(-seed) + energy=rbind(lora60$energy, + lora180$energy, + nbiot60$energy, + nbiot180$energy)%>%left_join(coverage,by=c("wireless","wakeupfor","days")) +} + + +########## Build learning curve (Accuracy+F1_Scores) ########## +learning_curves=accuracy%>%group_by(wireless,wakeupfor,days,model)%>% + summarize( + ## Accuracy + mean_accuracy=mean(accuracy),sd_accuracy=sd(accuracy), + min_accuracy=min(accuracy),max_accuracy=max(accuracy), + ## F1-Score Baseline + mean_f1_baseline=mean(f1_baseline),sd_f1_baseline=sd(f1_baseline), + min_f1_baseline=min(f1_baseline),max_f1_baseline=max(f1_baseline), + ## F1-Score Hint + mean_f1_hint=mean(f1_hint),sd_f1_hint=sd(f1_hint),min_f1_hint=min(f1_hint), + max_f1_hint=max(f1_hint), + ## F1-Score Extended + mean_f1_extended=mean(f1_extended),sd_f1_extended=sd(f1_extended), + min_f1_extended=min(f1_extended),max_f1_extended=max(f1_extended), + ## F1-Score HintAndExtended + mean_f1_hintandextended=mean(f1_hintandextended),sd_f1_hintandextended=sd(f1_hintandextended), + min_f1_hintandextended=min(f1_hintandextended),max_f1_hintandextended=max(f1_hintandextended)) + + +ggplot(data=learning_curves%>%mutate(model=ifelse(model=="knn","KNN","DT")),aes(linetype=model))+ + geom_line(aes(days/30,mean_f1_baseline,color="Baseline"),size=1.2)+ + geom_line(aes(days/30,mean_f1_extended,color="Extended"),size=1.2)+ + geom_line(aes(days/30,mean_f1_hint,color="Hint"),size=1.2)+ + geom_line(aes(days/30,mean_f1_hintandextended,color="Hintandextended"),size=1.2)+labs(color="Classes colors",linetype="Model")+ + facet_wrap(~wireless+wakeupfor)+ + scale_x_continuous(breaks = seq(0, max(learning_curves$days/30)))+ + scale_y_continuous(breaks = seq(0, 1, by = 0.1))+ + th()+ + theme(panel.grid.minor = element_blank(), + legend.position = c(0.9,0.72), + legend.margin = margin(2,4,2,4), + legend.spacing=unit(-0.2,"cm"), + legend.box.margin=margin(1,1,1,1))+ + xlab("Training duration (months)")+ylab("Classes F1-Score") +ggsave("figures/months_f1-score.pdf",width=8.5,height=6) + + +## Plot Merge Accuracy +ggplot(data=learning_curves%>%mutate(model=ifelse(model=="knn","KNN","DT")), + aes(days/30,mean_accuracy))+ + geom_line(aes(linetype=model),size=1.2)+xlab("Training duration (months)")+ylab("Model overall accuracy (OAcc)")+labs(linetype="Models")+ + scale_x_continuous(breaks = seq(0, max(learning_curves$days/30)))+ + scale_y_continuous(breaks = seq(0, 1, by = 0.1))+ + facet_wrap(~wireless+wakeupfor)+expand_limits(x = 0, y = 0)+ + th()+ + theme(panel.grid.minor = element_blank(), + legend.position = c(0.38,0.68)) +ggsave("figures/months_accuracy.pdf",width=8.5,height=6) + + +########## Energy and delta with raw policies ########## +npolicies=4 +## First we extended the number of seed to cover the entire duration of the training +data_seed_energy=rbind(data_seed, + data_seed%>%mutate(seed=seed+200), + data_seed%>%mutate(seed=seed+400), + data_seed%>%mutate(seed=seed+600)) # Almost same as if each experiment run 4 times more seed seed +## Compute the cumulative energy of each policies in each configuration accross the $npolicies*200 days +data_seed_energy=data_seed_energy%>%group_by(wireless,wakeupfor,simkey)%>%mutate(energy=cumsum(energy),setup=paste0(wireless," ",wakeupfor,"s"),days=seed) +## Now sum coverage of each $npolicies days (like in the energy data frame) +data_seed_energy=data_seed_energy%>%group_by(wireless,wakeupfor,simkey)%>%mutate(grp=ceiling(days/npolicies))%>%group_by(wireless,wakeupfor,simkey,grp)%>%mutate(coverage=cumsum(coverage))%>%filter(days%%npolicies==0) +## Now filter the data +data_seed_energy=data_seed_energy%>%filter(days %in% !!energy$days) +## Compute the delta data +energy_coverage_delta=data_seed_energy%>% + full_join(energy,by=c("days","wireless","wakeupfor"),suffix=c("","_training")) +energy_coverage_delta=energy_coverage_delta%>%group_by(wireless,wakeupfor)%>%summarize(delta_energy=energy-energy_training,simkey=simkey,days=days,delta_coverage=(coverage-coverage_training)/npolicies,coverage=coverage/npolicies,coverage_training=coverage_training/npolicies) # delta_coverage divide by $npolicies because we want the average per day (coverage is measure every $npolicies days (round-robin of $npolicies policies)) + + +write("wireless,wakeupfor,policy,slope,intercept,delta_coverage,coverage,coverage_training,latex","figures/delta_energy_coverage.csv") +energy_coverage_delta%>%group_by(wireless,wakeupfor,simkey)%>%group_walk(function(data,grp){ + grp=as.list(grp) + reg=lm(delta_energy ~ days,data) + slope=round(as.numeric(reg$coefficients["days"]),digits=0) + intercept=round(as.numeric(reg$coefficients[1]),digits=0) + mean_delta_coverage=round(mean(data$delta_coverage),digits=1) + mean_coverage=round(mean(data$coverage),digits=1) + mean_coverage_training=round(mean(data$coverage_training),digits=1) + print(paste0("Wireless=",grp$wireless," Wakeupfor=",grp$wakeupfor," Policy=",grp$simkey," Slope=",slope," Intercept=",intercept," Delta Coverage=",mean_delta_coverage)) + write(paste(grp$wireless,grp$wakeupfor,grp$simkey,slope,intercept,mean_delta_coverage,mean_coverage,mean_coverage_training,paste0(r"("$\mathbf{s=)",slope,",c_p=",mean_coverage,",c_t=",mean_coverage_training,r"(}$")"),sep=","),"figures/delta_energy_coverage.csv",append=T) +}) + +ggplot(energy_coverage_delta,aes(days/30,delta_energy/1e3,color=simkey,shape=simkey))+ + geom_line(size=1.2)+ylab(TeX("Delta in $E_{total}$ (kJ)"))+xlab("Training duration (months)")+ + facet_wrap(~wireless+wakeupfor,scale="free")+ + th()+theme(legend.position=c(0.61,0.97))+labs(color="Classes colors") +ggsave("figures/delta_energy_training.pdf",height=6,width=10) + +ggplot(energy_coverage_delta,aes(days/30,delta_coverage,color=simkey))+ + geom_line(size=1.2)+ylab("Delta in network coverage")+xlab("Training duration (months)")+ + facet_wrap(~wireless+wakeupfor,scale="free")+ + th()+theme(legend.position="top")+labs(color="Classes colors") +ggsave("figures/delta_coverage_training.pdf",width=9) + +## ggplot(data=energy,aes(days/30,energy/1e6,group=setup,fill=setup))+ +## geom_bar(stat="identity",position="dodge")+ +## labs(fill="Wireless and Uptime")+ +## scale_x_continuous(breaks = seq(0, max(energy$days/30)))+ +## xlab("Training duration (months)")+ylab("Energy consumption (MJ)")+ +## th()+theme(legend.position=c(0.12,0.75)) +## ggsave("figures/days_energy.pdf",width=8.5,height=4) + + + diff --git a/analysis/learning.R b/analysis/learning.R deleted file mode 100644 index da444d8..0000000 --- a/analysis/learning.R +++ /dev/null @@ -1,214 +0,0 @@ -library("tidyverse") -options(dplyr.summarise.inform = FALSE) -library("class") -library("rpart") -library("rpart.plot") -library("viridis") -library("MLmetrics") - -## Simulation Parameters: -## simkey {baseline,extended,hint,hintandextended} -## wireless {lora,nbiot} -## wakeupfor {60s,180s} -## seed [1,200] -## node on[0,12] -## isSender {0,1} -## dataSize {1MB} - -## Metrics: -## energy [0,+inf) -## nDataRcv [0,+inf) - -nseed=200 -nwakeupfor=2 -nwireless=2 -nsimkey=4 -nsimulations=nseed*nwakeupfor*nwireless*nsimkey # Must be 3200 - -## Load data -data=suppressMessages(read_csv("../CCGRID2022.csv"))%>%distinct() # Note that in the data experiment wireless=="lora",seed==1,wakeupfor==60,simkey=="baseline" is present 2 times in the CSV file -data_seed=data%>%group_by(simkey,wireless,wakeupfor,seed)%>%summarize(energy=sum(energy),coverage=sum(nDataRcv))%>% - mutate(efficiency=energy/coverage)%>% - ungroup() - - -F1_Score2=function(truth, pred){ - result=sapply(c("baseline","extended","hint","hintandextended"),function(c){ - cur_truth=truth[truth==c] - cur_pred=pred[truth==c] - col=paste0("f1_",c) - score=F1_Score(cur_truth,cur_pred) - if(is.nan(score)){score=0} - list(tibble(!!col:=score)) - }) - do.call("cbind",result) -} - -build_models=function(ignore_hint=TRUE){ - ## Prepare data for traning - set.seed(1) # Reproducibility - wireless_map=c("lora"=1,"nbiot"=2) - data_ml=data_seed%>%select(-efficiency,-seed)%>%mutate(wireless=wireless_map[data_seed$wireless]) - if(ignore_hint){ - data_ml=data_ml%>%filter(simkey!="hint") - } - train_set=data_ml%>%sample_frac(0.8) # 80% of the data - test_set=data_ml%>%suppressMessages(anti_join(train_set)) # 20% of the data - - ## KNN predict function - knn_fn=function(inputs){ - as.vector(knn(train=train_set%>%select(-simkey),test=inputs%>%select(-simkey),cl=train_set$simkey,k=10)) - } - - ## Decision tree - tree=rpart( - simkey ~ wireless + wakeupfor + energy + coverage, - data=train_set, - method="class", - minsplit=60, - minbucket=1) - ## Tree predict function - tree_fn=function(inputs){ - as.vector(predict(tree,newdata=inputs%>%select(-simkey),type="class")) - } - - ## Build models - models=list(predict_knn=knn_fn,predict_tree=tree_fn) - - ## Computer performances - perfs=sapply(seq(1,20),function(i){ - ## Prepare data for traning - set.seed(1) # Reproducibility - wireless_map=c("lora"=1,"nbiot"=2) - data_ml=data_seed%>%select(-efficiency,-seed)%>%mutate(wireless=wireless_map[data_seed$wireless]) - if(ignore_hint){ - data_ml=data_ml%>%filter(simkey!="hint") - } - train_set=data_ml%>%sample_frac(0.8) # 80% of the data - test_set=data_ml%>%suppressMessages(anti_join(train_set)) # 20% of the data - - ## KNN - knn_predictions=as.vector(knn(train=train_set%>%select(-simkey),test=test_set%>%select(-simkey),cl=train_set$simkey,k=10)) - ## Decision tree - tree=rpart( - simkey ~ wireless + wakeupfor + energy + coverage, - data=train_set, - method="class", - minsplit=60, - minbucket=1) - tree_predictions=as.vector(predict(tree,newdata=test_set%>%select(-simkey),type="class")) - - ## Prefs - f1_knn=F1_Score2(test_set$simkey,knn_predictions) - f1_tree=F1_Score2(test_set$simkey,tree_predictions) - accuracy_knn=sum(test_set$simkey==knn_predictions)/length(test_set$simkey) - accuracy_tree=sum(test_set$simkey==tree_predictions)/length(test_set$simkey) - list(cbind(tibble(model=c("knn","tree")),rbind(f1_knn,f1_tree),tibble(accuracy=c(accuracy_knn,accuracy_tree)))) - }) - perfs=do.call("rbind",perfs)%>%mutate_if(is.numeric, ~round(.,digits=2)) - perfs=perfs%>%group_by(model)%>%summarize( - f1_baseline=mean(f1_baseline), - f1_hint=mean(f1_hint), - f1_extended=mean(f1_extended), - f1_hintandextended=mean(f1_hintandextended), - accuracy=mean(accuracy)) - write.csv(perfs,paste0("figures/f1_scores_offline_ignoreHINT",ignore_hint,".csv"),quote=FALSE,row.names=FALSE) - - ## Return models - models -} - -generate_inputs=function(ignore_hint=FALSE) { - ## Prepare data for traning - set.seed(1) # Reproducibility - wireless_map=c("lora"=1,"nbiot"=2) - data_ml=data_seed%>%select(-efficiency,-seed)%>%mutate(wireless=wireless_map[data_seed$wireless]) - if(ignore_hint){ - data_ml=data_ml%>%filter(simkey!="hint") - } - train_set=data_ml%>%sample_frac(0.8) # 80% of the data - test_set=data_ml%>%anti_join(train_set) # 20% of the data - - ## KNN training - knn_predictions=knn(train=train_set%>%select(-simkey),test=test_set%>%select(-simkey),cl=train_set$simkey,k=10) - ## KNN analysis - knn_cont_table=table(knn_predictions,test_set$simkey) - knn_accuracy=round((sum(diag(knn_cont_table)/sum(rowSums(knn_cont_table))))*100) - knn_prop_table=round(prop.table(knn_cont_table),digits=2) - - ## Decision tree - tree=rpart( - simkey ~ wireless + wakeupfor + energy + coverage, - data=train_set, - method="class", - minsplit=60, - minbucket=1) - tree_predictions=predict(tree,newdata=test_set%>%select(-simkey),type="class") - tree_cont_table=table(tree_predictions,test_set$simkey) - tree_accuracy=(sum(diag(tree_cont_table)/sum(rowSums(tree_cont_table)))) - tree_prop_table=round(prop.table(tree_cont_table),digits=2) - - ## Elbow plot - elbow_data=lapply(seq(1,10),function(kvalue){ - knn_predictions=knn(train=train_set%>%select(-simkey),test=test_set%>%select(-simkey),cl=train_set$simkey,k=kvalue) - ## KNN analysis - knn_cont_table=table(knn_predictions,test_set$simkey) - knn_accuracy=(sum(diag(knn_cont_table)/sum(rowSums(knn_cont_table)))) - knn_prop_table=round(prop.table(knn_cont_table),digits=2) - tibble(k=kvalue,accuracy=knn_accuracy) - }) - elbow_data=do.call("rbind",elbow_data) - ggplot(data=elbow_data,aes(k,accuracy))+geom_point()+geom_line()+ggtitle(paste("K-elbow for with NoHint to",as.character(ignore_hint)))+ylim(c(0,1)) - ggsave(paste0("figures/knn_elbow_NoHintIs",as.character(ignore_hint),".pdf")) - - ## Prints - print(paste0("Accuracy: KNN=",knn_accuracy,"% CART=",tree_accuracy,"%")) - pdf(paste0("figures/tree_",as.character(ignore_hint),".pdf")) - tree_plot=rpart.plot(tree,box.palette=as.list(viridis::viridis(4,begin=0.48)),tweak=1.111) - silent_call=dev.off() - ## Notes: KNN accuracy jump to 76% and CART to 80% accuracy without the hint policy - - ## Generate simulation inputs - inputs=tibble( - wakeupfor = c(60,180,60,180), - wireless = c("lora", "lora", "nbiot", "nbiot")) - constraints=apply(inputs,1,function(row){ - wi=row["wireless"] - wa=as.numeric(row["wakeupfor"]) - ## First extract energy/coverage boundaries - min_energy=min((data_seed%>%filter(wireless==wi,wakeupfor==wa))$energy) - max_energy=max((data_seed%>%filter(wireless==wi,wakeupfor==wa))$energy) - min_coverage=min((data_seed%>%filter(wireless==wi,wakeupfor==wa))$coverage) - max_coverage=max((data_seed%>%filter(wireless==wi,wakeupfor==wa))$coverage) - ## Generate random points (10 per scenarios) - n=100 - current_inputs=tibble( - wireless=rep(wi,n), - wakeupfor=rep(wa,n), - energy_constraint=runif(n,min_energy,max_energy), - coverage_constraint=round(runif(n,min_coverage,max_coverage))) - predictions_knn=knn(train=train_set%>%select(-simkey),test=current_inputs%>% - rename(energy=energy_constraint,coverage=coverage_constraint)%>% - mutate(wireless=wireless_map[wireless]),cl=train_set$simkey,k=10) - predictions_tree=predict(tree,newdata=current_inputs%>% - rename(energy=energy_constraint,coverage=coverage_constraint)%>% - mutate(wireless=wireless_map[wireless]),type="class") - knn_final=tibble(cbind(current_inputs,tibble(simkey=predictions_knn,model="knn"))) - tree_final=tibble(cbind(current_inputs,tibble(simkey=predictions_tree,model="tree"))) - rbind(knn_final,tree_final) - }) - inputs=do.call("rbind",constraints)%>%distinct() - ## Dimension Energy/Coverage - ggplot(data_seed%>%mutate(wakeupfor=as.character(wakeupfor)), - aes(coverage,energy,color=simkey))+geom_point()+ - geom_point(data=inputs%>%mutate(wakeupfor=as.character(wakeupfor)),aes(coverage_constraint,energy_constraint),size=3,pch=5)+ - ggtitle("Dimension Energy/Coverage")+xlab("Coverage")+ylab("Sum of nodes energy consumption (J)")+ - facet_wrap(~wakeupfor+wireless,scale="free") - ggsave(paste0("figures/random_inputs_NoHintIs",as.character(ignore_hint),".pdf"),width=15) - write.csv(inputs,paste0("../inputs_NoHintIs",as.character(ignore_hint),".csv"),row.names=FALSE, quote=FALSE) - -} - -## Generate inputs -generate_inputs(FALSE) -generate_inputs(TRUE) diff --git a/analysis/offline.R b/analysis/offline.R new file mode 100644 index 0000000..da444d8 --- /dev/null +++ b/analysis/offline.R @@ -0,0 +1,214 @@ +library("tidyverse") +options(dplyr.summarise.inform = FALSE) +library("class") +library("rpart") +library("rpart.plot") +library("viridis") +library("MLmetrics") + +## Simulation Parameters: +## simkey {baseline,extended,hint,hintandextended} +## wireless {lora,nbiot} +## wakeupfor {60s,180s} +## seed [1,200] +## node on[0,12] +## isSender {0,1} +## dataSize {1MB} + +## Metrics: +## energy [0,+inf) +## nDataRcv [0,+inf) + +nseed=200 +nwakeupfor=2 +nwireless=2 +nsimkey=4 +nsimulations=nseed*nwakeupfor*nwireless*nsimkey # Must be 3200 + +## Load data +data=suppressMessages(read_csv("../CCGRID2022.csv"))%>%distinct() # Note that in the data experiment wireless=="lora",seed==1,wakeupfor==60,simkey=="baseline" is present 2 times in the CSV file +data_seed=data%>%group_by(simkey,wireless,wakeupfor,seed)%>%summarize(energy=sum(energy),coverage=sum(nDataRcv))%>% + mutate(efficiency=energy/coverage)%>% + ungroup() + + +F1_Score2=function(truth, pred){ + result=sapply(c("baseline","extended","hint","hintandextended"),function(c){ + cur_truth=truth[truth==c] + cur_pred=pred[truth==c] + col=paste0("f1_",c) + score=F1_Score(cur_truth,cur_pred) + if(is.nan(score)){score=0} + list(tibble(!!col:=score)) + }) + do.call("cbind",result) +} + +build_models=function(ignore_hint=TRUE){ + ## Prepare data for traning + set.seed(1) # Reproducibility + wireless_map=c("lora"=1,"nbiot"=2) + data_ml=data_seed%>%select(-efficiency,-seed)%>%mutate(wireless=wireless_map[data_seed$wireless]) + if(ignore_hint){ + data_ml=data_ml%>%filter(simkey!="hint") + } + train_set=data_ml%>%sample_frac(0.8) # 80% of the data + test_set=data_ml%>%suppressMessages(anti_join(train_set)) # 20% of the data + + ## KNN predict function + knn_fn=function(inputs){ + as.vector(knn(train=train_set%>%select(-simkey),test=inputs%>%select(-simkey),cl=train_set$simkey,k=10)) + } + + ## Decision tree + tree=rpart( + simkey ~ wireless + wakeupfor + energy + coverage, + data=train_set, + method="class", + minsplit=60, + minbucket=1) + ## Tree predict function + tree_fn=function(inputs){ + as.vector(predict(tree,newdata=inputs%>%select(-simkey),type="class")) + } + + ## Build models + models=list(predict_knn=knn_fn,predict_tree=tree_fn) + + ## Computer performances + perfs=sapply(seq(1,20),function(i){ + ## Prepare data for traning + set.seed(1) # Reproducibility + wireless_map=c("lora"=1,"nbiot"=2) + data_ml=data_seed%>%select(-efficiency,-seed)%>%mutate(wireless=wireless_map[data_seed$wireless]) + if(ignore_hint){ + data_ml=data_ml%>%filter(simkey!="hint") + } + train_set=data_ml%>%sample_frac(0.8) # 80% of the data + test_set=data_ml%>%suppressMessages(anti_join(train_set)) # 20% of the data + + ## KNN + knn_predictions=as.vector(knn(train=train_set%>%select(-simkey),test=test_set%>%select(-simkey),cl=train_set$simkey,k=10)) + ## Decision tree + tree=rpart( + simkey ~ wireless + wakeupfor + energy + coverage, + data=train_set, + method="class", + minsplit=60, + minbucket=1) + tree_predictions=as.vector(predict(tree,newdata=test_set%>%select(-simkey),type="class")) + + ## Prefs + f1_knn=F1_Score2(test_set$simkey,knn_predictions) + f1_tree=F1_Score2(test_set$simkey,tree_predictions) + accuracy_knn=sum(test_set$simkey==knn_predictions)/length(test_set$simkey) + accuracy_tree=sum(test_set$simkey==tree_predictions)/length(test_set$simkey) + list(cbind(tibble(model=c("knn","tree")),rbind(f1_knn,f1_tree),tibble(accuracy=c(accuracy_knn,accuracy_tree)))) + }) + perfs=do.call("rbind",perfs)%>%mutate_if(is.numeric, ~round(.,digits=2)) + perfs=perfs%>%group_by(model)%>%summarize( + f1_baseline=mean(f1_baseline), + f1_hint=mean(f1_hint), + f1_extended=mean(f1_extended), + f1_hintandextended=mean(f1_hintandextended), + accuracy=mean(accuracy)) + write.csv(perfs,paste0("figures/f1_scores_offline_ignoreHINT",ignore_hint,".csv"),quote=FALSE,row.names=FALSE) + + ## Return models + models +} + +generate_inputs=function(ignore_hint=FALSE) { + ## Prepare data for traning + set.seed(1) # Reproducibility + wireless_map=c("lora"=1,"nbiot"=2) + data_ml=data_seed%>%select(-efficiency,-seed)%>%mutate(wireless=wireless_map[data_seed$wireless]) + if(ignore_hint){ + data_ml=data_ml%>%filter(simkey!="hint") + } + train_set=data_ml%>%sample_frac(0.8) # 80% of the data + test_set=data_ml%>%anti_join(train_set) # 20% of the data + + ## KNN training + knn_predictions=knn(train=train_set%>%select(-simkey),test=test_set%>%select(-simkey),cl=train_set$simkey,k=10) + ## KNN analysis + knn_cont_table=table(knn_predictions,test_set$simkey) + knn_accuracy=round((sum(diag(knn_cont_table)/sum(rowSums(knn_cont_table))))*100) + knn_prop_table=round(prop.table(knn_cont_table),digits=2) + + ## Decision tree + tree=rpart( + simkey ~ wireless + wakeupfor + energy + coverage, + data=train_set, + method="class", + minsplit=60, + minbucket=1) + tree_predictions=predict(tree,newdata=test_set%>%select(-simkey),type="class") + tree_cont_table=table(tree_predictions,test_set$simkey) + tree_accuracy=(sum(diag(tree_cont_table)/sum(rowSums(tree_cont_table)))) + tree_prop_table=round(prop.table(tree_cont_table),digits=2) + + ## Elbow plot + elbow_data=lapply(seq(1,10),function(kvalue){ + knn_predictions=knn(train=train_set%>%select(-simkey),test=test_set%>%select(-simkey),cl=train_set$simkey,k=kvalue) + ## KNN analysis + knn_cont_table=table(knn_predictions,test_set$simkey) + knn_accuracy=(sum(diag(knn_cont_table)/sum(rowSums(knn_cont_table)))) + knn_prop_table=round(prop.table(knn_cont_table),digits=2) + tibble(k=kvalue,accuracy=knn_accuracy) + }) + elbow_data=do.call("rbind",elbow_data) + ggplot(data=elbow_data,aes(k,accuracy))+geom_point()+geom_line()+ggtitle(paste("K-elbow for with NoHint to",as.character(ignore_hint)))+ylim(c(0,1)) + ggsave(paste0("figures/knn_elbow_NoHintIs",as.character(ignore_hint),".pdf")) + + ## Prints + print(paste0("Accuracy: KNN=",knn_accuracy,"% CART=",tree_accuracy,"%")) + pdf(paste0("figures/tree_",as.character(ignore_hint),".pdf")) + tree_plot=rpart.plot(tree,box.palette=as.list(viridis::viridis(4,begin=0.48)),tweak=1.111) + silent_call=dev.off() + ## Notes: KNN accuracy jump to 76% and CART to 80% accuracy without the hint policy + + ## Generate simulation inputs + inputs=tibble( + wakeupfor = c(60,180,60,180), + wireless = c("lora", "lora", "nbiot", "nbiot")) + constraints=apply(inputs,1,function(row){ + wi=row["wireless"] + wa=as.numeric(row["wakeupfor"]) + ## First extract energy/coverage boundaries + min_energy=min((data_seed%>%filter(wireless==wi,wakeupfor==wa))$energy) + max_energy=max((data_seed%>%filter(wireless==wi,wakeupfor==wa))$energy) + min_coverage=min((data_seed%>%filter(wireless==wi,wakeupfor==wa))$coverage) + max_coverage=max((data_seed%>%filter(wireless==wi,wakeupfor==wa))$coverage) + ## Generate random points (10 per scenarios) + n=100 + current_inputs=tibble( + wireless=rep(wi,n), + wakeupfor=rep(wa,n), + energy_constraint=runif(n,min_energy,max_energy), + coverage_constraint=round(runif(n,min_coverage,max_coverage))) + predictions_knn=knn(train=train_set%>%select(-simkey),test=current_inputs%>% + rename(energy=energy_constraint,coverage=coverage_constraint)%>% + mutate(wireless=wireless_map[wireless]),cl=train_set$simkey,k=10) + predictions_tree=predict(tree,newdata=current_inputs%>% + rename(energy=energy_constraint,coverage=coverage_constraint)%>% + mutate(wireless=wireless_map[wireless]),type="class") + knn_final=tibble(cbind(current_inputs,tibble(simkey=predictions_knn,model="knn"))) + tree_final=tibble(cbind(current_inputs,tibble(simkey=predictions_tree,model="tree"))) + rbind(knn_final,tree_final) + }) + inputs=do.call("rbind",constraints)%>%distinct() + ## Dimension Energy/Coverage + ggplot(data_seed%>%mutate(wakeupfor=as.character(wakeupfor)), + aes(coverage,energy,color=simkey))+geom_point()+ + geom_point(data=inputs%>%mutate(wakeupfor=as.character(wakeupfor)),aes(coverage_constraint,energy_constraint),size=3,pch=5)+ + ggtitle("Dimension Energy/Coverage")+xlab("Coverage")+ylab("Sum of nodes energy consumption (J)")+ + facet_wrap(~wakeupfor+wireless,scale="free") + ggsave(paste0("figures/random_inputs_NoHintIs",as.character(ignore_hint),".pdf"),width=15) + write.csv(inputs,paste0("../inputs_NoHintIs",as.character(ignore_hint),".csv"),row.names=FALSE, quote=FALSE) + +} + +## Generate inputs +generate_inputs(FALSE) +generate_inputs(TRUE) diff --git a/simulations/.gitignore b/simulations/.gitignore index a5fd42a..89d1606 100644 --- a/simulations/.gitignore +++ b/simulations/.gitignore @@ -1,6 +1,7 @@ simulator libs/simgrid libs/rapidjson +libs/*.tar* compile_commands.json platform.xml scenarios diff --git a/simulations/Makefile b/simulations/Makefile index 3ed792d..8702e66 100644 --- a/simulations/Makefile +++ b/simulations/Makefile @@ -7,7 +7,7 @@ CC := g++ $(addprefix -L , $(LIBS)) $(addprefix -I , $(INCLUDES)) all: $(EXEC) $(basename $(notdir $(SCENARIOS))) $(EXEC): $(filter-out $(SCENARIOS), $(wildcard src/*)) - $(CC) -lsimgrid $^ -o $@ + $(CC) $^ -lsimgrid -o $@ $(basename $(notdir $(SCENARIOS))): $(SCENARIOS) $(CC) $^ -o $@ @@ -16,4 +16,4 @@ run: $(EXEC) export LD_LIBRARY_PATH=$(addprefix :, $(LIBS)) && ./$(EXEC) 10 --cfg=network/bandwidth-factor:1.05 --cfg=network/model:CM02 -–cfg=network/crosstraffic:0 clean: - -rm $(EXEC) $(basename $(notdir $(SCENARIOS))) \ No newline at end of file + -rm $(EXEC) $(basename $(notdir $(SCENARIOS))) diff --git a/simulations/libs/setup.sh b/simulations/libs/setup.sh index 395a11e..14705f6 100755 --- a/simulations/libs/setup.sh +++ b/simulations/libs/setup.sh @@ -2,8 +2,10 @@ set -e -[ ! -e "simgrid" ] && git clone https://framagit.org/simgrid/simgrid -[ ! -e "rapidjson"] && git clone https://github.com/Tencent/rapidjson +[ ! -e "simgrid-v3.35.tar.bz2" ] && wget "https://framagit.org/simgrid/simgrid/-/archive/v3.35/simgrid-v3.35.tar.bz2?ref_type=tags" -O "simgrid-v3.35.tar.bz2" +[ ! -e "simgrid" ] && tar -xf "simgrid-v3.35.tar.bz2" && mv "simgrid-v3.35" simgrid + +[ ! -e "rapidjson" ] && git clone --depth 1 https://github.com/Tencent/rapidjson cd simgrid mkdir -p build -- cgit v1.2.3