From fb934ed98e65ec70ab07bd2cae694ef6c14cbde9 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Thu, 6 Apr 2017 19:39:55 -0400
Subject: [PATCH 001/771] tree ring: X * time-varying interaction term
(untested)
---
modules/data.land/R/InventoryGrowthFusion.R | 25 ++++++++++++++++-----
1 file changed, 20 insertions(+), 5 deletions(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index b7e1032062a..c1a75358530 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -118,7 +118,7 @@ model{
if(FALSE){
## DEV TESTING FOR X, polynomial X, and X interactions
- fixed <- "X + X^3 + X*bob + bob + dia"
+ fixed <- "X + X^3 + X*bob + bob + dia + X*Tmin[t]"
}
## Design matrix
if (is.null(fixed)) {
@@ -157,12 +157,27 @@ model{
covX <- strsplit(X.terms[i],"*",fixed=TRUE)[[1]]
covX <- covX[-which(toupper(covX)=="X")] ## remove X from terms
if(covX %in% colnames(cov.data)){ ## covariate present
- if(!(covX %in% names(data))){
- ## add cov variables to data object
- data[[covX]] <- cov.data[,covX]
- }
+
+ ##is covariate fixed or time varying?
+ tvar <- grep("[t]",covX,fixed=TRUE)
+ if(tvar){
+ covX <- sub("[t]","",covX,fixed = TRUE)
+ if(!(covX %in% names(data))){
+ ## add cov variables to data object
+ data[[covX]] <- time_varying[[covX]]
+ }
+ covX <- paste0(covX,"[i,t]")
+ } else {
+ ## variable is fixed
+ if(!(covX %in% names(data))){
+ ## add cov variables to data object
+ data[[covX]] <- cov.data[,covX]
+ }
+ } ## end fixed or time varying
+
myBeta <- paste0("betaX_",covX)
Xformula <- paste0(myBeta,"*x[i,t-1]*",covX,"[i]")
+
} else {
## covariate absent
print("covariate absent from covariate data:", covX)
From 23743cc2f0ed3df2b1a60a7be7fa21a8f7061d6d Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Thu, 6 Apr 2017 19:58:29 -0400
Subject: [PATCH 002/771] tree ring: bug fix in variable checking on X*fixed.
Start of timevar * fixed
---
modules/data.land/R/InventoryGrowthFusion.R | 50 ++++++++++++++++-----
1 file changed, 40 insertions(+), 10 deletions(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index c1a75358530..22ee5a8dc5b 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -156,7 +156,6 @@ model{
covX <- strsplit(X.terms[i],"*",fixed=TRUE)[[1]]
covX <- covX[-which(toupper(covX)=="X")] ## remove X from terms
- if(covX %in% colnames(cov.data)){ ## covariate present
##is covariate fixed or time varying?
tvar <- grep("[t]",covX,fixed=TRUE)
@@ -169,20 +168,21 @@ model{
covX <- paste0(covX,"[i,t]")
} else {
## variable is fixed
- if(!(covX %in% names(data))){
- ## add cov variables to data object
- data[[covX]] <- cov.data[,covX]
+ if(covX %in% colnames(cov.data)){ ## covariate present
+ if(!(covX %in% names(data))){
+ ## add cov variables to data object
+ data[[covX]] <- cov.data[,covX]
+ }
+ } else {
+ ## covariate absent
+ print("covariate absent from covariate data:", covX)
}
+
} ## end fixed or time varying
myBeta <- paste0("betaX_",covX)
Xformula <- paste0(myBeta,"*x[i,t-1]*",covX,"[i]")
- } else {
- ## covariate absent
- print("covariate absent from covariate data:", covX)
- }
-
} else if(length(grep("^",X.terms[i],fixed=TRUE))==1){ ## POLYNOMIAL
powX <- strsplit(X.terms[i],"^",fixed=TRUE)[[1]]
powX <- powX[-which(toupper(powX)=="X")] ## remove X from terms
@@ -254,11 +254,41 @@ model{
## parse equation into variable names
t_vars <- gsub(" ","",unlist(strsplit(time_varying,"+",fixed=TRUE))) ## split on +, remove whitespace
## check for interaction terms
- it_vars <- grep(pattern = "*",x=t_vars,fixed = TRUE)
+ it_vars <- t_vars[grep(pattern = "*",x=t_vars,fixed = TRUE)]
+ t_vars <- t_vars[!(tvars == it_vars)]
+
## need to deal with interactions with fixed variables
## will get really nasty if interactions are with catagorical variables
## need to create new data matrices on the fly
+ for(i in seq_along(it_vars)){
+
+ ##is covariate fixed or time varying?
+ covX <- strsplit(it_vars[i],"*",fixed=TRUE)[[1]]
+ tvar1 <- grep("[t]",covX[1],fixed=TRUE)
+ tvar2 <- grep("[t]",covX[2],fixed=TRUE)
+
+ if(tvar){
+ covX <- sub("[t]","",covX,fixed = TRUE)
+ if(!(covX %in% names(data))){
+ ## add cov variables to data object
+ data[[covX]] <- time_varying[[covX]]
+ }
+ covX <- paste0(covX,"[i,t]")
+ } else {
+ ## variable is fixed
+ if(!(covX %in% names(data))){
+ ## add cov variables to data object
+ data[[covX]] <- cov.data[,covX]
+ }
+ } ## end fixed or time varying
+
+ myBeta <- paste0("betaX_",covX)
+ Xformula <- paste0(myBeta,"*x[i,t-1]*",covX,"[i]")
+
+ }
+
+
## loop over variables
for(j in seq_along(t_vars)){
tvar <- t_vars[j]
From 4491fdb2e63ba8b796e7e07f7b3bb9fd906f3cd9 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Thu, 6 Apr 2017 20:00:23 -0400
Subject: [PATCH 003/771] tree ring: X*time-var -> change tvar to logical
---
modules/data.land/R/InventoryGrowthFusion.R | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index 22ee5a8dc5b..7c01b7505c8 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -158,7 +158,7 @@ model{
covX <- covX[-which(toupper(covX)=="X")] ## remove X from terms
##is covariate fixed or time varying?
- tvar <- grep("[t]",covX,fixed=TRUE)
+ tvar <- length(grep("[t]",covX,fixed=TRUE)) > 0
if(tvar){
covX <- sub("[t]","",covX,fixed = TRUE)
if(!(covX %in% names(data))){
From d85747c471c88ef5ee8b671302484d557b2e5331 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Thu, 6 Apr 2017 20:15:35 -0400
Subject: [PATCH 004/771] tree-rings: refined time_var interaction terms
---
modules/data.land/R/InventoryGrowthFusion.R | 44 ++++++++++++++-------
1 file changed, 29 insertions(+), 15 deletions(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index 7c01b7505c8..2bc304f18ff 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -250,6 +250,7 @@ model{
if (is.null(time_data)) {
print("time_varying formula provided but time_data is absent:", time_varying)
}
+ Xt.priors <- ""
## parse equation into variable names
t_vars <- gsub(" ","",unlist(strsplit(time_varying,"+",fixed=TRUE))) ## split on +, remove whitespace
@@ -265,28 +266,39 @@ model{
##is covariate fixed or time varying?
covX <- strsplit(it_vars[i],"*",fixed=TRUE)[[1]]
- tvar1 <- grep("[t]",covX[1],fixed=TRUE)
- tvar2 <- grep("[t]",covX[2],fixed=TRUE)
-
- if(tvar){
- covX <- sub("[t]","",covX,fixed = TRUE)
- if(!(covX %in% names(data))){
+ tvar <- length(grep("[t]",covX[1],fixed=TRUE)) > 0
+ tvar[2] <- length(grep("[t]",covX[2],fixed=TRUE)) > 0
+ myBeta <- "beta_"
+ for(j in 1:2){
+ if(j == 2) myBeta <- paste0(myBeta,"_")
+ if(tvar[j]){
+ covX[j] <- sub("[t]","",covX[j],fixed = TRUE)
+ if(!(covX[j] %in% names(data))){
## add cov variables to data object
- data[[covX]] <- time_varying[[covX]]
+ data[[covX[j]]] <- time_varying[[covX[j]]]
}
- covX <- paste0(covX,"[i,t]")
+ myBeta <- paste0(myBeta,covX[j])
+ covX[j] <- paste0(covX[j],"[i,t]")
} else {
## variable is fixed
- if(!(covX %in% names(data))){
+ if(!(covX[j] %in% names(data))){
## add cov variables to data object
- data[[covX]] <- cov.data[,covX]
+ data[[covX[j]]] <- cov.data[,covX[j]]
}
+ myBeta <- paste0(myBeta,covX[j])
+ covX[j] <- paste0(covX[j],"[i]")
} ## end fixed or time varying
- myBeta <- paste0("betaX_",covX)
- Xformula <- paste0(myBeta,"*x[i,t-1]*",covX,"[i]")
-
- }
+ } ## end building beta
+
+ ## append to process model formula
+ Pformula <- paste(Pformula,
+ paste0(myBeta,"*",covX[1],"*",covX[2]))
+
+ ## priors
+ Xt.priors <- paste0(Xt.priors,
+ " ",myBeta,"~dnorm(0,0.001)\n")
+ } ## end time-varying interaction terms
## loop over variables
@@ -308,7 +320,9 @@ model{
out.variables <- c(out.variables, paste0("beta", tvar))
}
## build prior
- Xt.priors <- paste0(" beta", t_vars, "~dnorm(0,0.001)", collapse = "\n")
+ Xt.priors <- paste0(Xt.priors,
+ paste0(" beta", t_vars, "~dnorm(0,0.001)", collapse = "\n")
+ )
TreeDataFusionMV <- sub(pattern = "## TIME VARYING BETAS", Xt.priors, TreeDataFusionMV)
} ## END time varying covariates
From de66d879701973701f699535cb620154b92c1b17 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Fri, 14 Apr 2017 10:42:22 -0400
Subject: [PATCH 005/771] Debugging tree-ring time-varying interaction terms
---
modules/data.land/R/InventoryGrowthFusion.R | 12 ++++++------
1 file changed, 6 insertions(+), 6 deletions(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index 2bc304f18ff..1926d745780 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -114,7 +114,7 @@ model{
## Substitute into code
TreeDataFusionMV <- sub(pattern = "## RANDOM EFFECT TAUS", Rpriors, TreeDataFusionMV)
TreeDataFusionMV <- gsub(pattern = "## RANDOM_EFFECTS", Reffects, TreeDataFusionMV)
- }
+ } ### END RANDOM EFFECTS
if(FALSE){
## DEV TESTING FOR X, polynomial X, and X interactions
@@ -163,7 +163,7 @@ model{
covX <- sub("[t]","",covX,fixed = TRUE)
if(!(covX %in% names(data))){
## add cov variables to data object
- data[[covX]] <- time_varying[[covX]]
+ data[[covX]] <- time_data[[covX]]
}
covX <- paste0(covX,"[i,t]")
} else {
@@ -203,7 +203,7 @@ model{
## add to out.variables
out.variables <- c(out.variables, myBeta)
- }
+ } ## END LOOP OVER X TERMS
## create priors
TreeDataFusionMV <- sub(pattern = "## ENDOGENOUS BETAS", Xpriors, TreeDataFusionMV)
@@ -219,7 +219,7 @@ model{
##Center the covariate data
Xf.center <- apply(Xf, 2, mean, na.rm = TRUE)
Xf <- t(t(Xf) - Xf.center)
- }
+ } ## end fixed effects parsing
## build formula in JAGS syntax
if (!is.null(Xf)) {
@@ -256,7 +256,7 @@ model{
t_vars <- gsub(" ","",unlist(strsplit(time_varying,"+",fixed=TRUE))) ## split on +, remove whitespace
## check for interaction terms
it_vars <- t_vars[grep(pattern = "*",x=t_vars,fixed = TRUE)]
- t_vars <- t_vars[!(tvars == it_vars)]
+ t_vars <- t_vars[!(t_vars == it_vars)]
## need to deal with interactions with fixed variables
## will get really nasty if interactions are with catagorical variables
@@ -293,7 +293,7 @@ model{
## append to process model formula
Pformula <- paste(Pformula,
- paste0(myBeta,"*",covX[1],"*",covX[2]))
+ paste0(" + ",myBeta,"*",covX[1],"*",covX[2]))
## priors
Xt.priors <- paste0(Xt.priors,
From 48b62e14478d0af92dee19d7ca2afcd25f8662c6 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Fri, 14 Apr 2017 12:32:39 -0400
Subject: [PATCH 006/771] tree rings: addition debugging of time varying
interactions
---
modules/data.land/R/InventoryGrowthFusion.R | 24 ++++++++++++++-------
1 file changed, 16 insertions(+), 8 deletions(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index 1926d745780..3b45d88f41f 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -92,6 +92,8 @@ model{
data[[length(data)+1]] <- as.numeric(as.factor(as.character(cov.data[,r_var[j]]))) ## multiple conversions to eliminate gaps
names(data)[length(data)] <- r_var[j]
}
+ if(any(duplicated(names(data)))){PEcAn.utils::logger.error("duplicated variable at r_var",names(data))}
+
nr[j] <- max(as.numeric(data[[r_var[j]]]))
}
index <- paste0("[",index,"]")
@@ -154,6 +156,7 @@ model{
Xformula <- NULL
if(length(grep("*",X.terms[i],fixed = TRUE)) == 1){ ## INTERACTION
+ myIndex <- "[i]"
covX <- strsplit(X.terms[i],"*",fixed=TRUE)[[1]]
covX <- covX[-which(toupper(covX)=="X")] ## remove X from terms
@@ -165,7 +168,10 @@ model{
## add cov variables to data object
data[[covX]] <- time_data[[covX]]
}
- covX <- paste0(covX,"[i,t]")
+ if(any(duplicated(names(data)))){PEcAn.utils::logger.error("duplicated variable at covX",names(data))}
+
+# covX <- paste0(covX,"[i,t-1]")
+ myIndex <- "[i,t-1]"
} else {
## variable is fixed
if(covX %in% colnames(cov.data)){ ## covariate present
@@ -173,6 +179,7 @@ model{
## add cov variables to data object
data[[covX]] <- cov.data[,covX]
}
+ if(any(duplicated(names(data)))){PEcAn.utils::logger.error("duplicated variable at covX2",names(data))}
} else {
## covariate absent
print("covariate absent from covariate data:", covX)
@@ -181,7 +188,7 @@ model{
} ## end fixed or time varying
myBeta <- paste0("betaX_",covX)
- Xformula <- paste0(myBeta,"*x[i,t-1]*",covX,"[i]")
+ Xformula <- paste0(myBeta,"*x[i,t-1]*",covX,myIndex)
} else if(length(grep("^",X.terms[i],fixed=TRUE))==1){ ## POLYNOMIAL
powX <- strsplit(X.terms[i],"^",fixed=TRUE)[[1]]
@@ -239,6 +246,8 @@ model{
out.variables <- c(out.variables, paste0("beta", Xf.names))
}
+ if(any(duplicated(names(data)))){PEcAn.utils::logger.error("duplicated variable at Xf",names(data))}
+
if(FALSE){
## DEVEL TESTING FOR TIME VARYING
time_varying <- "TminJuly + PrecipDec + TminJuly*PrecipDec"
@@ -305,13 +314,12 @@ model{
for(j in seq_along(t_vars)){
tvar <- t_vars[j]
- ## grab from the list of data matrices
- dtmp <- time_data[[tvar]]
+ if(!(tvar %in% names(data))){
+ ## add cov variables to data object
+ data[[tvar]] <- time_data[[tvar]]
+ }
+ if(any(duplicated(names(data)))){PEcAn.utils::logger.error("duplicated variable at tvar",names(data))}
- ## insert data into JAGS inputs
- data[[length(data)+1]] <- dtmp
- names(data)[length(data)] <- tvar
-
## append to process model formula
Pformula <- paste(Pformula,
paste0("+ beta", tvar, "*",tvar,"[i,t]"))
From 90179755cd5cd581828820f08f744d4105a9a1f1 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Fri, 14 Apr 2017 19:57:38 -0400
Subject: [PATCH 007/771] tree-ring: add time-var interaction to output
tracking variables
---
modules/data.land/R/InventoryGrowthFusion.R | 4 ++++
1 file changed, 4 insertions(+)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index 3b45d88f41f..37f7d6ed5b3 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -307,6 +307,10 @@ model{
## priors
Xt.priors <- paste0(Xt.priors,
" ",myBeta,"~dnorm(0,0.001)\n")
+
+ ## add to list of varibles JAGS is tracking
+ out.variables <- c(out.variables, myBeta)
+
} ## end time-varying interaction terms
From e0e763f6d65d84665d96eaee3aeb2be959f8d9ec Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Thu, 20 Apr 2017 17:42:40 -0400
Subject: [PATCH 008/771] Sitegroup to MultiSettings: bugfix to query of id vs
site_id
---
settings/R/createMultisiteMultiSettings.r | 6 +++---
1 file changed, 3 insertions(+), 3 deletions(-)
diff --git a/settings/R/createMultisiteMultiSettings.r b/settings/R/createMultisiteMultiSettings.r
index 28f13baf81e..ea4dea2ed91 100644
--- a/settings/R/createMultisiteMultiSettings.r
+++ b/settings/R/createMultisiteMultiSettings.r
@@ -25,13 +25,13 @@
#'
#' @example examples/examples.MultiSite.MultiSettings.r
createSitegroupMultiSettings = function(templateSettings, sitegroupId, nSite, con=NULL, params=templateSettings$database$bety) {
- query <- paste("SELECT id FROM sitegroups_sites WHERE sitegroup_id =", sitegroupId)
+ query <- paste("SELECT site_id FROM sitegroups_sites WHERE sitegroup_id =", sitegroupId)
allSites <- PEcAn.DB::db.query(query, con=con, params=params)
if(missing(nSite))
- siteIds <- allSites$id
+ siteIds <- allSites$site_id
else
- siteIds <- sample(allSites$id, nSite, replace=FALSE)
+ siteIds <- sample(allSites$site_id, nSite, replace=FALSE)
settings <- createMultiSiteSettings(templateSettings, siteIds)
}
From 69cbb61a1901f0ea4cc43160539e07e78a63b4b3 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sun, 21 May 2017 13:02:52 -0500
Subject: [PATCH 009/771] Experimenting with ggplotly for interactiveness
---
shiny/workflowPlot/helper.R | 14 +++++++++++++
shiny/workflowPlot/server.R | 40 +++++++++++++++++++++++++++----------
shiny/workflowPlot/ui.R | 13 ++++++++----
3 files changed, 52 insertions(+), 15 deletions(-)
create mode 100644 shiny/workflowPlot/helper.R
diff --git a/shiny/workflowPlot/helper.R b/shiny/workflowPlot/helper.R
new file mode 100644
index 00000000000..a014b844a8a
--- /dev/null
+++ b/shiny/workflowPlot/helper.R
@@ -0,0 +1,14 @@
+checkAndDownload<-function(packageNames) {
+ for(packageName in packageNames) {
+ if(!isInstalled(packageName)) {
+ install.packages(packageName,repos="http://lib.stat.cmu.edu/R/CRAN")
+ }
+ library(packageName,character.only=TRUE,quietly=TRUE,verbose=FALSE)
+ }
+}
+isInstalled <- function(mypkg){
+ is.element(mypkg, installed.packages()[,1])
+}
+
+checkAndDownload(c('plotly','scales'))
+# testVal = 5
\ No newline at end of file
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 1f4d31e2117..11ee7c1f271 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -3,8 +3,9 @@ library(PEcAn.DB)
library(shiny)
library(ncdf4)
library(ggplot2)
-
-
+source('helper.R')
+require(plotly)
+library(scales)
# Define server logic
server <- shinyServer(function(input, output, session) {
bety <- betyConnect()
@@ -30,6 +31,9 @@ server <- shinyServer(function(input, output, session) {
var_names <- reactive({
run_ids <- get_run_ids(bety, workflow_id())
var_names <- get_var_names(bety, workflow_id(), run_ids[1])
+ # Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
+ removeVarNames = c('Year','FracJulianDay')
+ var_names <-var_names[!var_names %in% removeVarNames]
return(var_names)
})
observe({
@@ -52,8 +56,14 @@ server <- shinyServer(function(input, output, session) {
ranges$y <- NULL
}
})
-
- output$outputPlot <- renderPlot({
+ # If want to render text
+ output$info <- renderText({
+ paste0(input$variable_name)
+ # paste0(testVal)
+ # paste0("x=", input$plot_dblclick$x, "\ny=", input$plot_dblclick$y)
+ })
+
+ output$outputPlot <- renderPlotly({
workflow_id <- isolate(input$workflow_id)
run_id <- isolate(input$run_id)
var_name <- input$variable_name
@@ -83,16 +93,24 @@ server <- shinyServer(function(input, output, session) {
xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
# plot result
print(ranges$x)
- plt <- ggplot(data.frame(dates, vals), aes(x=dates, y=vals)) +
- geom_point(aes(color="Model output")) +
+ dates = as.Date(dates)
+ df = data.frame(dates, vals)
+ # df$dates = as.factor(df$dates)
+
+ plt <- ggplot(df, aes(x=dates, y=vals)) +
+ # geom_point(aes(color="Model output")) +
+ geom_point() +
# geom_smooth(aes(fill = "Spline fit")) +
- coord_cartesian(xlim = ranges$x, ylim = ranges$y) +
- scale_y_continuous(labels=fancy_scientific) +
+ # coord_cartesian(xlim = ranges$x, ylim = ranges$y) +
+ # scale_y_continuous(labels=fancy_scientific) +
labs(title=title, x=xlab, y=ylab) +
scale_color_manual(name = "", values = "black") +
- scale_fill_manual(name = "", values = "grey50")
- plot(plt)
- add_icon()
+ scale_fill_manual(name = "", values = "grey50")
+ # theme(axis.text.x = element_text(angle = -90))
+
+ plt<-ggplotly(plt)
+ # plot(plt)
+ # add_icon()
}
}
})
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index 5f7c5962ec7..8b54949a691 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -1,4 +1,5 @@
library(shiny)
+source('helper.R')
# Define UI
ui <- shinyUI(fluidPage(
@@ -12,10 +13,14 @@ ui <- shinyUI(fluidPage(
selectInput("variable_name", "Variable Name", "")
),
mainPanel(
- plotOutput("outputPlot",
- brush = brushOpts(id = "plot_brush",
- resetOnNew = TRUE),
- dblclick = "plot_dblclick")
+ plotlyOutput("outputPlot"
+ ## brushOpts and dblclick not supported by plotly
+ # brush = brushOpts(id = "plot_brush",
+ # resetOnNew = TRUE),
+ # dblclick = "plot_dblclick"
+ )
+ # Checking variable names
+ ,verbatimTextOutput("info")
)
)
))
From 716735b4bb8705fa0b59f03233d7ca954875568d Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Thu, 25 May 2017 19:30:03 +0530
Subject: [PATCH 010/771] Added the Dockerfile to generate the
Containers new file: Dockerfile
---
Dockerfile | 40 ++++++++++++++++++++++++++++++++++++++++
1 file changed, 40 insertions(+)
create mode 100644 Dockerfile
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 00000000000..a61c62a9bd4
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,40 @@
+FROM ubuntu:16.04
+MAINTAINER amanskywalker (ak47su30ac@gmail.com)
+
+# expose port 80 for the web interface
+EXPOSE 80
+
+# expose port 22 for ssh maintance
+EXPOSE 22
+
+# updated ppa's
+RUN echo "deb http://cran.rstudio.com/bin/linux/ubuntu xenial/" > /etc/apt/sources.list.d/R.list &&\
+ apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E084DAB9
+
+# copy the installation script inside the container
+ADD docker/ /build
+
+# Run the OS System setup script
+RUN chmod 750 /build/system_services.sh
+RUN /build/system_services.sh
+
+# Set script mod +x for preprocessors
+RUN chmod 750 /build/*
+
+# run update machine to update machine
+RUN /build/update_machine.sh
+
+# run inatall packages to install required packages
+RUN /build/install_packages.sh
+
+# run install R to install R packages
+RUN /build/install_R.sh
+
+# run install pecan to install pecan cores
+RUN /build/install_pecan.sh
+
+# Clean up APT when done.
+RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+
+# startup
+CMD ["/sbin/my_init"]
From 4c4fbf777968fb49d1ddc2dbf418fb92f21e480d Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Thu, 25 May 2017 19:30:50 +0530
Subject: [PATCH 011/771] Added the Dockerfile supporting Scripts to
install PEcAn and needed packages
---
docker/bin/my_init | 189 +++++++++++++++++++++++++++
docker/config/cron_log_config | 5 +
docker/install_R.sh | 32 +++++
docker/install_packages.sh | 86 ++++++++++++
docker/install_pecan.sh | 38 ++++++
docker/install_pecan_preprocessor.sh | 36 +++++
docker/runit/cron | 3 +
docker/runit/cron_log | 3 +
docker/system_services.sh | 90 +++++++++++++
docker/update_machine.sh | 30 +++++
10 files changed, 512 insertions(+)
create mode 100644 docker/bin/my_init
create mode 100644 docker/config/cron_log_config
create mode 100644 docker/install_R.sh
create mode 100644 docker/install_packages.sh
create mode 100644 docker/install_pecan.sh
create mode 100644 docker/install_pecan_preprocessor.sh
create mode 100644 docker/runit/cron
create mode 100644 docker/runit/cron_log
create mode 100644 docker/system_services.sh
create mode 100644 docker/update_machine.sh
diff --git a/docker/bin/my_init b/docker/bin/my_init
new file mode 100644
index 00000000000..af87c28c78b
--- /dev/null
+++ b/docker/bin/my_init
@@ -0,0 +1,189 @@
+#! /bin/bash
+export LC_ALL=C TERM="xterm"
+
+trap 'shutdown_runit_services' INT TERM
+
+# variables
+env > /etc/envvars
+temp_var=$@
+FILES=/etc/container_environment/*
+runsvdir_PID=
+
+# functions
+
+# importing the environment variables from image
+import_envvars () {
+
+ clear_existing_environment=${1:-true}
+ override_existing_environment=${2:-true}
+
+ for file in $FILES
+ do
+ FILE=`basename $file`
+ if [ $override_existing_environment = true ] || !( env | grep -q $FILE)
+ then
+ export eval $FILE=`cat $file`
+ fi
+ done
+}
+
+# exporting the environment variables to the image
+export_envvars () {
+ to_dir=${1:-true}
+ no_record="HOME USER GROUP UID GID SHELL SHLVL PWD"
+ # to begin .json and.sh files
+ echo -n "{" > /etc/container_environment.json
+ echo -n "" > /etc/container_environment.sh
+ # saving variables into file. individual file by variable.
+ env | while read -r line
+ do
+ a=`expr index "$line" \=`
+ b=$((a-1))
+ file_name=${line:0:$b}
+ file_val=${line:$a}
+ if echo "$no_record" | grep -q "$file_name"
+ then
+ continue
+ else
+ # write to files
+ if [ $to_dir = true ] ; then echo $file_val > /etc/container_environment/$file_name ; fi
+ # write to .sh file
+ echo "export" $file_name"='"$file_val"'" >> /etc/container_environment.sh
+ # write to .json file
+ echo -n "\""$file_name"\":\""$file_val"\"," >> /etc/container_environment.json
+ fi
+ done
+ # adding } to closed the .json file
+ echo -e "\b}" >> /etc/container_environment.json
+}
+
+# function to run the command
+run_command () {
+ if [ -x $1 ]; then
+ echo >&2 "*** Running: $1"
+ $1
+ retval=$?
+ if [ $retval != 0 ];
+ then
+ echo >&2 "*** Failed with return value: $retval"
+ exit $retval
+ else
+ import_envvars
+ export_envvars false
+ fi
+ fi
+}
+
+# function to run the startup scripts
+run_startup_files() {
+ # running /etc/my_init.d/
+ echo "Starting pre-service scritps in /etc/my_init.d"
+ for script in /etc/my_init.d/*
+ do
+ run_command $script
+ done
+
+ echo "starting rc.local scritps"
+ run_command /etc/rc.local
+}
+
+
+# function to start corn jobs
+start_runit () {
+ echo "Booting runit daemon..."
+ /usr/bin/runsvdir -P /etc/service 'log:.........................................................................................................' &
+ runsvdir_PID=$!
+ echo "Process runsvdir running with PID $runsvdir_PID"
+}
+
+# function to shutdown corn jobs
+shutdown_runit_services() {
+ # need to check if runit service is runnung before shutdown ..
+ echo "Begin shutting down runit services..."
+ /usr/bin/sv down /etc/service/*
+ # need to give some time and check if service is down if time greater than allow them force exit
+ count=1
+ while [ $(/usr/bin/sv status /etc/service/* | grep -c "^run:") != 0 ]
+ do
+ sleep 1
+ count=`expr $count + 1`
+ if [ $count -gt 10 ]; then break ; fi
+ done
+ exit 0
+}
+
+# message to echo things to user
+message () {
+ echo "usage: my_init [-h|--help] [--skip-startup-files] [--skip-runit] [-- MAIN_COMMAND ]"
+ echo "optional arguments:"
+ echo " -h, --help show this help message and exit"
+ echo " --skip-startup-files Skip running /etc/my_init.d/* and /etc/rc.local"
+ echo " --skip-runit Do not run runit services"
+ echo " --quiet Only print warnings and errors"
+}
+
+# import & export env
+import_envvars false false
+export_envvars
+
+
+# condition for --help
+if [ `echo $temp_var | grep -c "\-\-help" ` -gt 0 ] || [ `echo $temp_var | grep -c "\-h" ` -gt 0 ] ; then
+ message
+ exit 0
+fi
+
+# condition for --quiet
+if ! [ `echo $temp_var | grep -c "\-\-quiet" ` -gt 0 ] ; then
+ :
+ else
+ temp_var=$(echo $temp_var|sed "s/--quiet//")
+ echo "--quiet still need to be implememted"
+fi
+
+# condition for --skip-startup-files
+if ! [ `echo $temp_var | grep -c "\-\-skip-startup-files" ` -gt 0 ] ; then
+ run_startup_files
+ else
+ temp_var=$(echo $temp_var|sed "s/--skip-startup-files//")
+fi
+
+# condition for --skip-runit
+if ! [ `echo $temp_var | grep -c "\-\-skip-runit" ` -gt 0 ] ; then
+ start_runit
+ else
+ temp_var=$(echo $temp_var|sed "s/--skip-runit//")
+ if [ `echo $temp_var | grep -c "\-\- " ` -gt 0 ] ; then
+ temp_var=$(echo $temp_var|sed "s/--//")
+ exec $temp_var
+ exit 0
+ else
+ echo "Need to add command to do something: -- command"
+ echo
+ message
+ exit 0
+ fi
+fi
+
+if [ `echo $temp_var | grep -c "\-\- " ` -gt 0 ] ; then
+temp_var=$(echo $temp_var|sed "s/--//")
+ if ! [ "$temp_var" = "" ] ; then
+ # need to check if all service are online before executing command
+ count=1
+ while [ $(/sbin/sv status /etc/service/* | grep -c "^down:") != 0 ]
+ do
+ sleep 1
+ count=`expr $count + 1`
+ if [ $count -gt 10 ]; then break ; fi
+ done
+ exec $temp_var
+ shutdown_runit_services
+ else
+ echo "Need to add command to do something: -- command "
+ echo
+ message
+ shutdown_runit_services
+ fi
+fi
+
+wait
diff --git a/docker/config/cron_log_config b/docker/config/cron_log_config
new file mode 100644
index 00000000000..786ed9b34ce
--- /dev/null
+++ b/docker/config/cron_log_config
@@ -0,0 +1,5 @@
+s100000
+n5
+N3
+t86400
+!logwatcher
diff --git a/docker/install_R.sh b/docker/install_R.sh
new file mode 100644
index 00000000000..1abf6c760c3
--- /dev/null
+++ b/docker/install_R.sh
@@ -0,0 +1,32 @@
+#!/bin/bash
+
+. /build/install_pecan_preprocessor.sh
+
+echo "######################################################################"
+echo "R"
+echo "######################################################################"
+if [ -z "${R_LIBS_USER}" ]; then
+ echo 'export R_LIBS_USER=${HOME}/R/library' >> ${HOME}/.bashrc
+ echo 'R_LIBS_USER=${HOME}/R/library' >> ${HOME}/.Renviron
+ export export R_LIBS_USER=${HOME}/R/library
+ mkdir -p ${R_LIBS_USER}
+
+ case "$OS_VERSION" in
+ RH_*)
+ echo 'export PATH=${PATH}:/usr/pgsql-9.5/bin' >> ${HOME}/.bashrc
+ export PATH=${PATH}:/usr/pgsql-9.5/bin
+ ;;
+ esac
+fi
+echo 'if(!"devtools" %in% installed.packages()) install.packages("devtools", repos="http://cran.rstudio.com/")' | R --vanilla
+echo 'if(!"udunits2" %in% installed.packages()) install.packages("udunits2", configure.args=c(udunits2="--with-udunits2-include=/usr/include/udunits2"), repo="http://cran.rstudio.com")' | R --vanilla
+
+# packages for BrownDog shiny app
+echo 'if(!"leaflet" %in% installed.packages()) install.packages("leaflet", repos="http://cran.rstudio.com/")' | R --vanilla
+echo 'if(!"RJSONIO" %in% installed.packages()) install.packages("RJSONIO", repos="http://cran.rstudio.com/")' | R --vanilla
+
+#echo 'update.packages(repos="http://cran.rstudio.com/", ask=FALSE)' | R --vanilla
+echo 'x <- rownames(old.packages(repos="http://cran.rstudio.com/")); update.packages(repos="http://cran.rstudio.com/", ask=FALSE, oldPkgs=x[!x %in% "rgl"])' | R --vanilla
+
+#echo 'update.packages(repos="http://cran.rstudio.com/", ask=FALSE)' | R --vanilla
+echo 'x <- rownames(old.packages(repos="http://cran.rstudio.com/")); update.packages(repos="http://cran.rstudio.com/", ask=FALSE, oldPkgs=x[!x %in% "rgl"])' | R --vanilla
diff --git a/docker/install_packages.sh b/docker/install_packages.sh
new file mode 100644
index 00000000000..a89e4110e02
--- /dev/null
+++ b/docker/install_packages.sh
@@ -0,0 +1,86 @@
+#!/bin/bash
+
+. /build/install_pecan_preprocessor.sh
+
+echo "######################################################################"
+echo "SETTING UP REPOS"
+echo "######################################################################"
+case "$OS_VERSION" in
+ RH_5)
+ yum install -y wget
+ wget -O /etc/yum.repos.d/cornell.repo http://download.opensuse.org/repositories/home:cornell_vrdc/CentOS_CentOS-6/home:cornell_vrdc.repo
+ rpm -Uvh http://download.fedoraproject.org/pub/epel/5/x86_64/epel-release-5-4.noarch.rpm
+ ;;
+ RH_6)
+ yum install -y wget
+ wget -O /etc/yum.repos.d/cornell.repo http://download.opensuse.org/repositories/home:cornell_vrdc/CentOS_CentOS-6/home:cornell_vrdc.repo
+ yum -y localinstall https://download.postgresql.org/pub/repos/yum/9.5/redhat/rhel-6-x86_64/pgdg-centos95-9.5-2.noarch.rpm
+ rpm -Uvh http://download.fedoraproject.org/pub/epel/6/x86_64/epel-release-6-8.noarch.rpm
+ ;;
+ RH_7)
+ yum install -y wget
+ wget -O /etc/yum.repos.d/cornell.repo wget http://download.opensuse.org/repositories/home:cornell_vrdc/CentOS_7/home:cornell_vrdc.repo
+ yum -y localinstall https://download.postgresql.org/pub/repos/yum/9.5/redhat/rhel-7-x86_64/pgdg-centos95-9.5-2.noarch.rpm
+ rpm -Uvh http://download.fedoraproject.org/pub/epel/7/x86_64/e/epel-release-7-6.noarch.rpm
+ setsebool -P httpd_can_network_connect 1
+ ;;
+ Ubuntu)
+ # if [ ! -e /etc/apt/sources.list.d/R.list ]; then
+ # sh -c 'echo "deb http://cran.rstudio.com/bin/linux/ubuntu `lsb_release -s -c`/" > /etc/apt/sources.list.d/R.list'
+ # apt-key adv --keyserver keyserver.ubuntu.com --recv E084DAB9
+ # fi
+ if [ ! -e /etc/apt/sources.list.d/ruby.list ]; then
+ sh -c 'echo "deb http://ppa.launchpad.net/brightbox/ruby-ng/ubuntu xenial main" > /etc/apt/sources.list.d/ruby.list'
+ apt-key adv --keyserver keyserver.ubuntu.com --recv C3173AA6
+ fi
+ # if [ ! -e /etc/apt/sources.list.d/pgdg.list ]; then
+ # sh -c 'echo "deb http://apt.postgresql.org/pub/repos/apt `lsb_release -s -c`-pgdg main" > /etc/apt/sources.list.d/pgdg.list'
+ # wget --quiet -O - https://www.postgresql.org/media/keys/ACCC4CF8.asc | apt-key add -
+ # fi
+ apt-get -qq -y update
+ ;;
+esac
+
+echo "######################################################################"
+echo "INSTALLING PACKAGES"
+echo "######################################################################"
+case "$OS_VERSION" in
+ RH_*)
+ yum install -y git R gfortran openssl-devel
+ yum install -y openmpi openmpi-devel netcdf netcdf-openmpi netcdf-devel netcdf-openmpi-devel netcdf-fortran-devel netcdf-fortran-openmpi
+ ln -s /usr/lib64/openmpi/bin/mpicc /usr/bin/mpicc
+ ln -s /usr/lib64/openmpi/bin/mpif90 /usr/bin/mpif90
+ # for ED
+ yum install -y hdf5-openmpi-devel
+ # for LPJ-GUESS
+ yum install -y cmake
+ # for DALEC
+ yum install -y gsl-devel liblas-devel lapack-devel
+ # for PEcAn
+ yum install -y ed libpng-devel libpng12-devel libjpeg-turbo-devel jags4 jags4-devel python-devel udunits2-devel gdal-devel proj-devel proj-devel proj-nad proj-epsg libxml2-devel udunits2-devel gmp-devel
+ # for PostgreSQL
+ yum install -y postgresql95-server postgresql95-devel postgis2_95
+ # web gui
+ yum install -y httpd php php-pgsql php-xml
+ ;;
+ Ubuntu)
+ apt-get -y install build-essential gfortran git r-base-core r-base r-base-dev jags liblapack-dev libnetcdf-dev netcdf-bin bc libcurl4-gnutls-dev curl udunits-bin libudunits2-dev libgmp-dev python-dev libgdal1-dev libproj-dev expect
+ apt-get -y install openmpi-bin libopenmpi-dev
+ apt-get -y install libgsl0-dev libssl-dev
+ #
+ apt-get -y install r-cran-ggplot2
+ # for maeswrap
+ apt-get -y install r-cran-rgl
+ # for R doc
+ apt-get -y install texinfo texlive-latex-base texlive-latex-extra texlive-fonts-recommended
+ # ruby
+ apt-get -y install ruby2.1 ruby2.1-dev
+ # for LPJ-GUESS
+ apt-get -y install cmake
+ # for PostgreSQL
+ # apt-get -y install libdbd-pgsql postgresql-9.5 postgresql-client-9.5 libpq-dev postgresql-9.5-postgis-2.2 postgresql-9.5-postgis-scripts
+ # for web gui
+ apt-get -y install apache2 libapache2-mod-php7.0 php7.0 libapache2-mod-passenger php7.0-xml php-ssh2 php7.0-pgsql
+ # Ubuntu 14.04 php5-pgsql libapache2-mod-php5 php5 and no php-xml
+ ;;
+esac
diff --git a/docker/install_pecan.sh b/docker/install_pecan.sh
new file mode 100644
index 00000000000..ffd6a30c128
--- /dev/null
+++ b/docker/install_pecan.sh
@@ -0,0 +1,38 @@
+#!/bin/bash
+
+. /build/install_pecan_preprocessor.sh
+
+echo "######################################################################"
+echo "PECAN"
+echo "######################################################################"
+if [ ! -e ${HOME}/pecan ]; then
+ cd
+ git clone https://github.com/PecanProject/pecan.git
+fi
+cd ${HOME}/pecan
+git pull
+make
+
+ curl -o /var/www/html/pecan.pdf https://www.gitbook.com/download/pdf/book/pecan/pecan-documentation
+ rm /var/www/html/index.html
+ ln -s ${HOME}/pecan/documentation/index_vm.html /var/www/html/index.html
+if [ ! -e ${HOME}/pecan/web/config.php ]; then
+ sed -e "s#browndog_url=.*#browndog_url=\"${BROWNDOG_URL}\";#" \
+ -e "s#browndog_username=.*#browndog_username=\"${BROWNDOG_USERNAME}\";#" \
+ -e "s#browndog_password=.*#browndog_password=\"${BROWNDOG_PASSWORD}\";#" \
+ -e "s#googleMapKey=.*#googleMapKey=\"${GOOGLE_MAP_KEY}\";#" \
+ -e "s/carya/$USER/g" ${HOME}/pecan/web/config.example.php > ${HOME}/pecan/web/config.php
+fi
+
+if [ ! -e ${HTTP_CONF}/pecan.conf ]; then
+ cat > /tmp/pecan.conf << EOF
+Alias /pecan ${HOME}/pecan/web
+
+ DirectoryIndex index.php
+ Options +ExecCGI
+ Require all granted
+
+EOF
+ cp /tmp/pecan.conf ${HTTP_CONF}/pecan.conf
+ rm /tmp/pecan.conf
+fi
diff --git a/docker/install_pecan_preprocessor.sh b/docker/install_pecan_preprocessor.sh
new file mode 100644
index 00000000000..c92a67378a1
--- /dev/null
+++ b/docker/install_pecan_preprocessor.sh
@@ -0,0 +1,36 @@
+#!/bin/bash
+
+set -e
+
+#if [ "`whoami`" == "root" ]; then
+# echo "Don't run this script as root"
+# exit -1
+#fi
+
+# configuration
+BROWNDOG_URL="http://dap.ncsa.illinois.edu:8184/convert/";
+BROWNDOG_USERNAME="";
+BROWNDOG_PASSWORD="";
+
+GOOGLE_MAP_KEY=""
+
+#SETUP_VM=""
+#SETUP_PALEON=""
+#REBUILD=""
+
+# commented out might need it later for communication purpose
+#RSTUDIO_SERVER="1.0.136"
+#SHINY_SERVER="1.5.3.838"
+
+if [ -e $(dirname $0)/install_pecan.config ]; then
+ . $(dirname $0)/install_pecan.config
+fi
+
+if [ -e /etc/redhat-release ]; then
+ OS_VERSION="RH_$( sed -r 's/.* ([0-9]+)\..*/\1/' /etc/redhat-release )"
+ HTTP_CONF="/etc/httpd/conf.d/"
+ chmod o+x ${HOME}
+else
+ OS_VERSION="Ubuntu"
+ HTTP_CONF="/etc/apache2/conf-available/"
+fi
diff --git a/docker/runit/cron b/docker/runit/cron
new file mode 100644
index 00000000000..e36fbfd51ec
--- /dev/null
+++ b/docker/runit/cron
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec /usr/sbin/cron -f
diff --git a/docker/runit/cron_log b/docker/runit/cron_log
new file mode 100644
index 00000000000..c0c15e1ca78
--- /dev/null
+++ b/docker/runit/cron_log
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec chpst -u nobody svlogd -tt /var/log/cron/
diff --git a/docker/system_services.sh b/docker/system_services.sh
new file mode 100644
index 00000000000..81088dcad13
--- /dev/null
+++ b/docker/system_services.sh
@@ -0,0 +1,90 @@
+#!/bin/bash
+export LC_ALL=C
+export DEBIAN_FRONTEND=noninteractive
+minimal_apt_get_install='apt-get install -y --no-install-recommends'
+
+## temporarily disable dpkg fsync to make building faster.
+if [[ ! -e /etc/dpkg/dpkg.cfg.d/docker-apt-speedup ]]; then
+ echo force-unsafe-io > /etc/dpkg/dpkg.cfg.d/docker-apt-speedup
+fi
+
+## prevent initramfs updates from trying to run grub and lilo.
+export INITRD=no
+mkdir -p /etc/container_environment
+echo -n no > /etc/container_environment/INITRD
+
+## enable Ubuntu Universe and Multiverse.
+sed -i 's/^#\s*\(deb.*universe\)$/\1/g' /etc/apt/sources.list
+sed -i 's/^#\s*\(deb.*multiverse\)$/\1/g' /etc/apt/sources.list
+apt-get update
+
+## fix some issues with APT packages.
+dpkg-divert --local --rename --add /sbin/initctl
+ln -sf /bin/true /sbin/initctl
+
+## replace the 'ischroot' tool to make it always return true.
+dpkg-divert --local --rename --add /usr/bin/ischroot
+ln -sf /bin/true /usr/bin/ischroot
+
+## upgrade all packages.
+apt-get dist-upgrade -y --no-install-recommends
+
+## install HTTPS support for APT.
+$minimal_apt_get_install apt-utils apt-transport-https ca-certificates language-pack-en
+
+## fix locale.
+locale-gen en_US.UTF-8
+update-locale LANG=en_US.UTF-8 LC_CTYPE=en_US.UTF-8 LANGUAGE=en_US:en LC_ALL=en_US.UTF-8
+echo -n en_US.UTF-8 > /etc/container_environment/LANG
+echo -n en_US.UTF-8 > /etc/container_environment/LC_CTYPE
+echo -n en_US:en > /etc/container_environment/LANGUAGE
+echo -n en_US.UTF-8 > /etc/container_environment/LC_ALL
+
+## install init process.
+cp /build/bin/my_init /sbin/
+chmod 750 /sbin/my_init
+mkdir -p /etc/my_init.d
+mkdir -p /etc/container_environment
+touch /etc/container_environment.sh
+touch /etc/container_environment.json
+chmod 700 /etc/container_environment
+
+groupadd -g 8377 docker_env
+chown :docker_env /etc/container_environment.sh /etc/container_environment.json
+chmod 640 /etc/container_environment.sh /etc/container_environment.json
+ln -s /etc/container_environment.sh /etc/profile.d/
+echo ". /etc/container_environment.sh" >> /root/.bashrc
+
+## install runit.
+$minimal_apt_get_install runit cron
+
+## install cron daemon.
+mkdir -p /etc/service/cron
+mkdir -p /var/log/cron
+chmod 600 /etc/crontabs
+cp /build/runit/cron /etc/service/cron/run
+cp /build/config/cron_log_config /var/log/cron/config
+chown -R nobody /var/log/cron
+chmod +x /etc/service/cron/run
+
+## remove useless cron entries.
+rm -f /etc/cron.daily/standard
+rm -f /etc/cron.daily/upstart
+rm -f /etc/cron.daily/dpkg
+rm -f /etc/cron.daily/password
+rm -f /etc/cron.weekly/fstrim
+
+## often used tools.
+$minimal_apt_get_install curl less nano psmisc wget
+
+## fix other small problem.
+rm /bin/sh
+ln -s /bin/bash /bin/sh
+echo `. /etc/lsb-release; echo ${DISTRIB_CODENAME/*, /}` >> /etc/container_environment/DISTRIB_CODENAME
+
+## cleanup
+apt-get clean
+rm -rf /build
+rm -rf /tmp/* /var/tmp/*
+rm -rf /var/lib/apt/lists/*
+rm -f /etc/dpkg/dpkg.cfg.d/02apt-speedup
diff --git a/docker/update_machine.sh b/docker/update_machine.sh
new file mode 100644
index 00000000000..4f7709df5ef
--- /dev/null
+++ b/docker/update_machine.sh
@@ -0,0 +1,30 @@
+#!/bin/bash
+
+. /build/install_pecan_preprocessor.sh
+
+# actual install/update
+echo "######################################################################"
+echo "UPDATING MACHINE"
+echo "######################################################################"
+mkdir /home/carya/
+chmod 755 /home/carya/
+case "$OS_VERSION" in
+ RH_*)
+ yum update -y
+ if [ "$SETUP_VM" != "" ]; then
+ sed -i -e "s/^127.0.0.1 .*\$/127.0.0.1 ${HOSTNAME}.pecan ${HOSTNAME} localhost localhost.localdomain localhost4 localhost4.localdomain4/" /etc/hosts
+ fi
+ ;;
+ Ubuntu)
+ apt-get -qq -y update
+ apt-get -y dist-upgrade
+ apt-get -y purge --auto-remove
+ if [ "$SETUP_VM" != "" ]; then
+ sed -i -e "s/^127.0.0.1 .*\$/127.0.0.1 ${HOSTNAME}.pecan ${HOSTNAME} localhost/" /etc/hosts
+ fi
+ ;;
+ *)
+ echo "Unknown OS"
+ exit 1
+ ;;
+esac
From a37e01e209a8664676f4d69956da125f324e0027 Mon Sep 17 00:00:00 2001
From: "Shawn P. Serbin"
Date: Sat, 27 May 2017 10:25:40 -0400
Subject: [PATCH 012/771] Some cleanup
---
models/fates/inst/template.job | 2 --
1 file changed, 2 deletions(-)
diff --git a/models/fates/inst/template.job b/models/fates/inst/template.job
index 855212c2e66..cd5c5af83c6 100644
--- a/models/fates/inst/template.job
+++ b/models/fates/inst/template.job
@@ -25,8 +25,6 @@ export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
## Seem to be stuck having to build a new case. Will try and avoid this in the future
cd ${SCRIPTROOT}
echo "*** Run create_newcase ***"
- #./create_newcase -case @CASEDIR@ -res CLM_USRDAT -compset ICLM45ED -mach eddi -compiler gnu
- #./create_newcase -case @CASEDIR@ -res 1x1_brazil -compset ICLM45ED -mach eddi -compiler gnu -project pecan
echo @MACHINE@
./create_newcase -case @CASEDIR@ -res 1x1_brazil -compset ICLM45ED -mach @MACHINE@ -compiler @COMPILER@ -project @PROJECT@
From 214d6b8b0beb3c34e1c9a03bb60b64d01bd72853 Mon Sep 17 00:00:00 2001
From: "Shawn P. Serbin"
Date: Mon, 29 May 2017 10:24:56 -0400
Subject: [PATCH 013/771] Updated template.job. Works on modex
---
models/fates/inst/template.job | 26 +++++++++++++++++++-------
1 file changed, 19 insertions(+), 7 deletions(-)
diff --git a/models/fates/inst/template.job b/models/fates/inst/template.job
index cd5c5af83c6..6174570327e 100644
--- a/models/fates/inst/template.job
+++ b/models/fates/inst/template.job
@@ -25,9 +25,17 @@ export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
## Seem to be stuck having to build a new case. Will try and avoid this in the future
cd ${SCRIPTROOT}
echo "*** Run create_newcase ***"
+ echo " ----- Case details:"
+ echo @CASEDIR@
+ #echo @RES@
+ #echo @COMPSET@
echo @MACHINE@
+ echo @COMPILER@
+ echo @PROJECT@
+ echo "--------------------------"
./create_newcase -case @CASEDIR@ -res 1x1_brazil -compset ICLM45ED -mach @MACHINE@ -compiler @COMPILER@ -project @PROJECT@
-
+ #./create_newcase -case @CASEDIR@ -res @RES@ -compset @COMPSET -mach @MACHINE@ -compiler @COMPILER@ -project @PROJECT@
+
cd "@RUNDIR@"
## RECURSIVELY COPY/SYMLINK REFERENCE INPUTS DIRECTORY (DIN_LOC_ROOT)
@@ -72,8 +80,9 @@ export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
## ENV_BUILD update configurations
./xmlchange -file env_build.xml -id CIME_OUTPUT_ROOT -val @CASEDIR@
- ./xmlchange -file env_build.xml -id EXEROOT -val @BLD@
- ./xmlchange -file env_build.xml -id BUILD_COMPLETE -val TRUE
+ #./xmlchange -file env_build.xml -id EXEROOT -val @BLD@
+ ./xmlchange -file env_build.xml -id EXEROOT -val @CASEDIR@/bld
+ #./xmlchange -file env_build.xml -id BUILD_COMPLETE -val TRUE
## DATES -> ENV_RUN
./xmlchange -file env_run.xml -id RUNDIR -val @CASEDIR@/run
@@ -81,7 +90,7 @@ export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
./xmlchange -file env_run.xml -id STOP_OPTION -val ndays
./xmlchange -file env_run.xml -id STOP_N -val @STOP_N@
- ## SITE INFO --> DOMAIN FILE
+
rm @INDIR@/share/domains/domain.clm/*
ln -s @RUNDIR@/domain.lnd.@SITE_NAME@.nc @INDIR@/share/domains/domain.clm/
@@ -111,10 +120,10 @@ EOF
echo "*** Run case.setup ***"
./case.setup
- ## ADDITIONAL MODS THAT ARE JUST ASSOCIATED WITH REFCASE
+ ## ADDITIONAL MODS THAT ARE JUST ASSOCIATED WITH REFCASE - removed 'NEP' 'NPP_column'
cat >> user_nl_clm << EOF
hist_empty_htapes = .true.
-hist_fincl1='EFLX_LH_TOT','TSOI_10CM','QVEGT','NEP','GPP','AR','ED_bleaf','ED_biomass','NPP_column','NPP','MAINT_RESP','GROWTH_RESP'
+hist_fincl1='EFLX_LH_TOT','TSOI_10CM','QVEGT','GPP','AR','ED_bleaf','ED_biomass','NPP','MAINT_RESP','GROWTH_RESP'
hist_mfilt = 8760
hist_nhtfrq = -1
EOF
@@ -124,7 +133,9 @@ EOF
#EOF
echo "*** Run case.build ***"
+ sleep 10
./case.build
+ #./case.build --sharedlib-only
## RUN
echo "*** Run ***"
@@ -145,7 +156,8 @@ EOF
ln -s @RUNDIR@/datm.streams.txt.PEcAn_met .
fi
- "@BINARY@"
+ #"@BINARY@" # EDITED BY SPS
+ "@CASEDIR@/bld/cesm.exe" # edited for testing
STATUS=$?
From bfa8bffa907e6bcd27c3be39fed2a809ed40eb0a Mon Sep 17 00:00:00 2001
From: "Shawn P. Serbin"
Date: Mon, 29 May 2017 10:52:00 -0400
Subject: [PATCH 014/771] Update to models/fates/R/model2netcdf.FATES.R to
isolate var outputting NANs
---
models/fates/R/model2netcdf.FATES.R | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/models/fates/R/model2netcdf.FATES.R b/models/fates/R/model2netcdf.FATES.R
index 1c9dcf8924a..ed3768e4b38 100644
--- a/models/fates/R/model2netcdf.FATES.R
+++ b/models/fates/R/model2netcdf.FATES.R
@@ -98,10 +98,12 @@ model2netcdf.FATES <- function(outdir) {
xyt <- list(lon, lat, t)
### build netCDF data
+ ## !! TODO: ADD MORE OUTPUTS HERE
out <- NULL
out <- var_update(out,"AR","AutoResp","kgC m-2 s-1")
out <- var_update(out,"GPP","GPP","kgC m-2 s-1")
- out <- var_update(out,"NPP_column","NPP","kgC m-2 s-1")
+ out <- var_update(out,"NPP","NPP","kgC m-2 s-1")
+ #out <- var_update(out,"NPP_column","NPP","kgC m-2 s-1") #!! RKnox suggested using NPP not NPP_column
#out <- var_update(out,"NEP","NEE","kgC m-2 s-1") # !!temporarily disabling NEE. See https://github.com/NGEET/ed-clm/issues/154
out <- var_update(out,"EFLX_LH_TOT","Qle","W m-2")
out <- var_update(out,"QVEGT","TVeg","mm s-1") ## equiv to std of kg m-2 s but don't trust udunits to get right
From 330bff647bda1e5534342f5517bb8ed57f06de8e Mon Sep 17 00:00:00 2001
From: "Shawn P. Serbin"
Date: Mon, 29 May 2017 11:13:10 -0400
Subject: [PATCH 015/771] Quick add back of comment
---
models/fates/inst/template.job | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/models/fates/inst/template.job b/models/fates/inst/template.job
index 6174570327e..27943f3487d 100644
--- a/models/fates/inst/template.job
+++ b/models/fates/inst/template.job
@@ -90,7 +90,7 @@ export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
./xmlchange -file env_run.xml -id STOP_OPTION -val ndays
./xmlchange -file env_run.xml -id STOP_N -val @STOP_N@
-
+ ## SITE INFO --> DOMAIN FILE
rm @INDIR@/share/domains/domain.clm/*
ln -s @RUNDIR@/domain.lnd.@SITE_NAME@.nc @INDIR@/share/domains/domain.clm/
From 1232952efb4c472c9c855d0a1e291b86361b1f1d Mon Sep 17 00:00:00 2001
From: "Shawn P. Serbin"
Date: Tue, 30 May 2017 09:01:56 -0400
Subject: [PATCH 016/771] A few more tweaks to template.job. Still need to get
this to work without re-building model each time
---
models/fates/inst/template.job | 12 +++++++-----
1 file changed, 7 insertions(+), 5 deletions(-)
diff --git a/models/fates/inst/template.job b/models/fates/inst/template.job
index 27943f3487d..7ed2e9b05fd 100644
--- a/models/fates/inst/template.job
+++ b/models/fates/inst/template.job
@@ -27,11 +27,11 @@ export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
echo "*** Run create_newcase ***"
echo " ----- Case details:"
echo @CASEDIR@
- #echo @RES@
- #echo @COMPSET@
- echo @MACHINE@
- echo @COMPILER@
- echo @PROJECT@
+ #echo "Res: @RES@ "
+ #echo "Compset: @COMPSET@ "
+ echo "Machine: @MACHINE@ "
+ echo "Compiler: @COMPILER@ "
+ echo "Project_name: @PROJECT@ "
echo "--------------------------"
./create_newcase -case @CASEDIR@ -res 1x1_brazil -compset ICLM45ED -mach @MACHINE@ -compiler @COMPILER@ -project @PROJECT@
#./create_newcase -case @CASEDIR@ -res @RES@ -compset @COMPSET -mach @MACHINE@ -compiler @COMPILER@ -project @PROJECT@
@@ -139,6 +139,8 @@ EOF
## RUN
echo "*** Run ***"
+ now=`date`
+ echo "Simulation start: $now"
cd run
mkdir timing
echo `pwd`
From 5fa6e79eac0dc966feeb3f2aa5fab2255263cd3a Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sat, 3 Jun 2017 21:48:29 -0500
Subject: [PATCH 017/771] Refactoring shiny code to load all variables at once.
Also allow models from different run and workflow ids
---
shiny/workflowPlot/helper.R | 4 +-
shiny/workflowPlot/server.R | 211 +++++++++++++++++++++++++++---------
shiny/workflowPlot/ui.R | 10 +-
3 files changed, 172 insertions(+), 53 deletions(-)
diff --git a/shiny/workflowPlot/helper.R b/shiny/workflowPlot/helper.R
index a014b844a8a..9390b6d30b7 100644
--- a/shiny/workflowPlot/helper.R
+++ b/shiny/workflowPlot/helper.R
@@ -9,6 +9,6 @@ checkAndDownload<-function(packageNames) {
isInstalled <- function(mypkg){
is.element(mypkg, installed.packages()[,1])
}
-
-checkAndDownload(c('plotly','scales'))
+checkAndDownload(c('plotly','scales','dplyr'))
+# devtools::install_github('hadley/ggplot2')
# testVal = 5
\ No newline at end of file
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 11ee7c1f271..06307552dcd 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -4,48 +4,48 @@ library(shiny)
library(ncdf4)
library(ggplot2)
source('helper.R')
-require(plotly)
+library(plotly)
library(scales)
+library(dplyr)
# Define server logic
server <- shinyServer(function(input, output, session) {
+ # options(shiny.trace=TRUE)
bety <- betyConnect()
-
+ # bety <- betyConnect('/home/carya/pecan/web/config.php')
ranges <- reactiveValues(x = NULL, y = NULL)
-
print("RESTART")
# set the workflow id(s)
ids <- get_workflow_ids(bety, session)
- updateSelectizeInput(session, "workflow_id", choices=ids)
+ # updateSelectizeInput(session, "workflow_id", choices=ids)
+ observe({
+ updateSelectizeInput(session, "workflow_id", choices=ids)
+ })
workflow_id <- reactive({
req(input$workflow_id)
workflow_id <- input$workflow_id
})
-
# update the run_ids if user changes workflow
run_ids <- reactive(get_run_ids(bety, workflow_id()))
observe({
updateSelectizeInput(session, "run_id", choices=run_ids())
})
-
# update variables if user changes run
var_names <- reactive({
run_ids <- get_run_ids(bety, workflow_id())
var_names <- get_var_names(bety, workflow_id(), run_ids[1])
# Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
- removeVarNames = c('Year','FracJulianDay')
+ removeVarNames <- c('Year','FracJulianDay')
var_names <-var_names[!var_names %in% removeVarNames]
return(var_names)
})
observe({
updateSelectizeInput(session, "variable_name", choices=var_names())
})
-
observe({
ignore <- input$variable_name
ranges$x <- NULL
ranges$y <- NULL
})
-
observeEvent(input$plot_dblclick, {
brush <- input$plot_brush
if (!is.null(brush)) {
@@ -59,61 +59,174 @@ server <- shinyServer(function(input, output, session) {
# If want to render text
output$info <- renderText({
paste0(input$variable_name)
- # paste0(testVal)
+ # paste0(run_ids(),length(run_ids()),ids)
+ # ,session$clientData$url_search)
# paste0("x=", input$plot_dblclick$x, "\ny=", input$plot_dblclick$y)
})
-
- output$outputPlot <- renderPlotly({
- workflow_id <- isolate(input$workflow_id)
- run_id <- isolate(input$run_id)
- var_name <- input$variable_name
- if (workflow_id != "" && run_id != "" && var_name != "") {
- workflow <- collect(workflow(bety, workflow_id))
- if(nrow(workflow) > 0) {
- outputfolder <- file.path(workflow$folder, 'out', run_id)
- files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
- dates <- NA
- vals <- NA
- title <- var_name
- ylab <- ""
- for(file in files) {
- nc <- nc_open(file)
- var <- ncdf4::ncatt_get(nc, var_name)
- #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
- sw <- TRUE
- title <- var$long_name
- ylab <- var$units
- x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
- y <- ncdf4::ncvar_get(nc, var_name)
- b <- !is.na(x) & !is.na(y) & sw != 0
- dates <- if(is.na(dates)) x[b] else c(dates, x[b])
- vals <- if(is.na(vals)) y[b] else c(vals, y[b])
- ncdf4::nc_close(nc)
+ workFlowData <-reactive({
+ # workflow_id = 99000000077
+ # run_id = 99000000002
+ # var_name = var_names
+ globalDF <- data.frame()
+ for(workflow_id in ids){
+ run_ids <- get_run_ids(bety,workflow_id)
+ for(run_id in run_ids){
+ var_names <- get_var_names(bety, workflow_id, run_id)
+ removeVarNames <- c('Year','FracJulianDay')
+ var_names <-var_names[!var_names %in% removeVarNames]
+ # if (workflow_id != "" && run_id != "" && var_name != "") {
+ workflow <- collect(workflow(bety, workflow_id))
+ if(nrow(workflow) > 0) {
+ outputfolder <- file.path(workflow$folder, 'out', run_id)
+ files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
+ for(file in files) {
+ nc <- nc_open(file)
+ for(var_name in var_names){
+ dates <- NA
+ vals <- NA
+ title <- var_name
+ ylab <- ""
+ var <- ncdf4::ncatt_get(nc, var_name)
+ #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
+ sw <- TRUE
+ if(!is.null(var$long_name)){
+ title <- var$long_name
+ }
+ if(!is.null(var$units)){
+ ylab <- var$units
+ }
+ x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
+ y <- ncdf4::ncvar_get(nc, var_name)
+ b <- !is.na(x) & !is.na(y) & sw != 0
+ dates <- if(is.na(dates)) x[b] else c(dates, x[b])
+ dates <- as.Date(dates)
+ vals <- if(is.na(vals)) y[b] else c(vals, y[b])
+ xlab <- "Time"
+ # xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
+ valuesDF <- data.frame(dates,vals)
+ metaDF <- data.frame(workflow_id,run_id,title,xlab,ylab,var_name)
+ # metaDF1<-metaDF[rep(seq_len(nrow(valuesDF))),]
+ currentDF = cbind(valuesDF,metaDF)
+ globalDF<-rbind(globalDF,currentDF)
+ }
+ ncdf4::nc_close(nc)
+ }
}
- xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
- # plot result
- print(ranges$x)
- dates = as.Date(dates)
- df = data.frame(dates, vals)
- # df$dates = as.factor(df$dates)
-
- plt <- ggplot(df, aes(x=dates, y=vals)) +
+ }
+ }
+ globalDF$title = as.character(globalDF$title)
+ globalDF$xlab = as.character(globalDF$xlab)
+ globalDF$ylab = as.character(globalDF$ylab)
+ globalDF$var_name = as.character(globalDF$var_name)
+ return(globalDF)
+ })
+ output$outputPlot <- renderPlotly({
+ # workflow_id <- isolate(input$workflow_id)
+ # run_id <- isolate(input$run_id)
+ # var_name <- input$variable_name
+ # if (workflow_id != "" && run_id != "" && var_name != "") {
+ # workflow <- collect(workflow(bety, workflow_id))
+ # if(nrow(workflow) > 0) {
+ # outputfolder <- file.path(workflow$folder, 'out', run_id)
+ # files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
+ # dates <- NA
+ # vals <- NA
+ # title <- var_name
+ # ylab <- ""
+ # for(file in files) {
+ # nc <- nc_open(file)
+ # var <- ncdf4::ncatt_get(nc, var_name)
+ # #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
+ # sw <- TRUE
+ # title <- var$long_name
+ # ylab <- var$units
+ # x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
+ # y <- ncdf4::ncvar_get(nc, var_name)
+ # b <- !is.na(x) & !is.na(y) & sw != 0
+ # dates <- if(is.na(dates)) x[b] else c(dates, x[b])
+ # vals <- if(is.na(vals)) y[b] else c(vals, y[b])
+ # ncdf4::nc_close(nc)
+ # }
+ # xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
+ # # plot result
+ # print(ranges$x)
+ # dates <- as.Date(dates)
+ # df <- data.frame(dates, vals)
+ # df <- workFlowData(input$workflow_id,input$run_id,input$variable_names)
+ masterDF<-workFlowData()
+ output$info1 <- renderText({
+ paste0(nrow(masterDF))
+ })
+ validate(
+ need(input$workflow_id, 'Found workflow id'),
+ need(input$run_id, 'Run id detected'),
+ need(input$variable_name, 'Please wait! Loading data')
+ )
+ masterDF$var_name = as.character(masterDF$var_name)
+ # masterDF$var_name = as.factor(masterDF$var_name)
+ # df1<-subset(masterDF,var_name==var_name)
+ df<-masterDF %>%
+ dplyr::filter(workflow_id == input$workflow_id &
+ run_id == input$run_id &
+ var_name == input$variable_name) %>%
+ dplyr::select(dates,vals)
+ title<-unique(df$title)[1]
+ xlab<-unique(df$xlab)[1]
+ ylab<-unique(df$ylab)[1]
+ output$info2 <- renderText({
+ paste0(nrow(df))
+ # paste0(typeof(title))
+ })
+ output$info3 <- renderText({
+ paste0('xlab')
+ # paste0(typeof(title))
+ })
+
+ # df1<-masterDF %>% filter(masterDF$var_name %in% var_name)
+ # workflow_id %in% workflow_id)
+ # & run_id == run_id & var_name == var_name)
+ # df<-masterDF %>% dplyr::filter(workflow_id == input$workflow_id)
+ plt <- ggplot(df, aes(x=dates, y=vals)) +
# geom_point(aes(color="Model output")) +
geom_point() +
# geom_smooth(aes(fill = "Spline fit")) +
# coord_cartesian(xlim = ranges$x, ylim = ranges$y) +
# scale_y_continuous(labels=fancy_scientific) +
- labs(title=title, x=xlab, y=ylab) +
+ labs(title=title, x=xlab, y=ylab) +
+ # labs(title=unique(df$title)[1], x=unique(df$xlab)[1], y=unique(df$ylab)[1]) +
scale_color_manual(name = "", values = "black") +
scale_fill_manual(name = "", values = "grey50")
# theme(axis.text.x = element_text(angle = -90))
-
plt<-ggplotly(plt)
# plot(plt)
# add_icon()
- }
- }
+ # }
+ # }
})
+
+# Shiny server closes here
})
+# global_df<-data.frame()
+# for(variable in var_names){
+# local_df<-data.frame()
+# for(file in files){
+# nc <-nc_open(file)
+# var <- ncdf4::ncatt_get(nc, var_name)
+# #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
+# sw <- TRUE
+# title <- var$long_name
+# ylab <- var$units
+# x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
+# y <- ncdf4::ncvar_get(nc, var_name)
+# b <- !is.na(x) & !is.na(y) & sw != 0
+# dates <- if(is.na(dates)) x[b] else c(dates, x[b])
+# vals <- if(is.na(vals)) y[b] else c(vals, y[b])
+# local_df<-rbind(local_df,data.frame(dates,vals,title,ylab,variable))
+# }
+# global_df<-rbind(global_df,local_df)
+# }
+
+# runApp(port=6480, launch.browser=FALSE)
+
# runApp(port=5658, launch.browser=FALSE)
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index 8b54949a691..7cc7ba427ac 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -11,6 +11,9 @@ ui <- shinyUI(fluidPage(
selectInput("workflow_id", "Workflow ID", c()),
selectInput("run_id", "Run ID", c()),
selectInput("variable_name", "Variable Name", "")
+ # selectInput("workflow_id", "Workflow ID", c(99000000077)),
+ # selectInput("run_id", "Run ID", c(99000000002)),
+ # selectInput("variable_name", "Variable Name", c("AutoResp","GPP"))
),
mainPanel(
plotlyOutput("outputPlot"
@@ -18,9 +21,12 @@ ui <- shinyUI(fluidPage(
# brush = brushOpts(id = "plot_brush",
# resetOnNew = TRUE),
# dblclick = "plot_dblclick"
- )
+ ),
# Checking variable names
- ,verbatimTextOutput("info")
+ verbatimTextOutput("info"),
+ verbatimTextOutput("info1"),
+ verbatimTextOutput("info2"),
+ verbatimTextOutput("info3")
)
)
))
From fa390dc84293e121c81622f1ec3329148f095cb3 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Wed, 7 Jun 2017 22:54:50 +0530
Subject: [PATCH 018/771] Commented out the cleanings as it removing the
useful files which needed in further stages modified:
docker/system_services.sh
---
docker/system_services.sh | 10 +++++-----
1 file changed, 5 insertions(+), 5 deletions(-)
diff --git a/docker/system_services.sh b/docker/system_services.sh
index 81088dcad13..9e991c011b0 100644
--- a/docker/system_services.sh
+++ b/docker/system_services.sh
@@ -83,8 +83,8 @@ ln -s /bin/bash /bin/sh
echo `. /etc/lsb-release; echo ${DISTRIB_CODENAME/*, /}` >> /etc/container_environment/DISTRIB_CODENAME
## cleanup
-apt-get clean
-rm -rf /build
-rm -rf /tmp/* /var/tmp/*
-rm -rf /var/lib/apt/lists/*
-rm -f /etc/dpkg/dpkg.cfg.d/02apt-speedup
+# apt-get clean
+# rm -rf /build
+# rm -rf /tmp/* /var/tmp/*
+# rm -rf /var/lib/apt/lists/*
+# rm -f /etc/dpkg/dpkg.cfg.d/02apt-speedup
From be83943a0be124b7aace4be88e9dba1195f78ce6 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Wed, 7 Jun 2017 22:56:20 +0530
Subject: [PATCH 019/771] Added the Volume Mounting point
modified: Dockerfile
---
Dockerfile | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index a61c62a9bd4..f5ad9d89695 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -14,13 +14,12 @@ RUN echo "deb http://cran.rstudio.com/bin/linux/ubuntu xenial/" > /etc/apt/sourc
# copy the installation script inside the container
ADD docker/ /build
+# Set script mod +x for preprocessors
+RUN chmod 750 /build/*.sh
+
# Run the OS System setup script
-RUN chmod 750 /build/system_services.sh
RUN /build/system_services.sh
-# Set script mod +x for preprocessors
-RUN chmod 750 /build/*
-
# run update machine to update machine
RUN /build/update_machine.sh
@@ -36,5 +35,8 @@ RUN /build/install_pecan.sh
# Clean up APT when done.
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+# Mounting pecan data volume
+VOLUME /home/skywalker/pecandata:/pecandata
+
# startup
CMD ["/sbin/my_init"]
From c8bade7e13e8240036274f4d165fc5d4d5009633 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Thu, 8 Jun 2017 21:10:22 +0530
Subject: [PATCH 020/771] Added docker composer file Only support
postgresql and pecancore till now new file: docker-compose.yml
---
docker-compose.yml | 24 ++++++++++++++++++++++++
1 file changed, 24 insertions(+)
create mode 100644 docker-compose.yml
diff --git a/docker-compose.yml b/docker-compose.yml
new file mode 100644
index 00000000000..9d99d4ca851
--- /dev/null
+++ b/docker-compose.yml
@@ -0,0 +1,24 @@
+version: '3'
+
+networks:
+ net1:
+ driver: bridge
+
+services:
+ postgresql:
+ image: 'postgres:latest'
+ networks:
+ - net1
+ ports:
+ - '5432:5432'
+
+ pecan-image:
+ build:
+ context: .
+ dockerfile: Dockerfile
+ networks:
+ - net1
+ ports:
+ - '8787:8787'
+ volumes:
+ - /home/skywalker/pecandata:/pecandata
From a57cf0a6c4a1987465dcb3a8abcfa761c819a587 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 10 Jun 2017 18:20:41 +0530
Subject: [PATCH 021/771] Added Bety in Compose file
Added bety in docker-compose.yml
Added SIPNET (with installation script) as default testing Model in Dockerfile
Added postgress setup script
---
Dockerfile | 10 ++++-----
docker-compose.yml | 12 +++++++++++
docker/install_sipnet.sh | 27 ++++++++++++++++++++++++
docker/setup_postgresql.sh | 43 ++++++++++++++++++++++++++++++++++++++
4 files changed, 87 insertions(+), 5 deletions(-)
create mode 100644 docker/install_sipnet.sh
create mode 100644 docker/setup_postgresql.sh
diff --git a/Dockerfile b/Dockerfile
index f5ad9d89695..c4698cff828 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,5 @@
FROM ubuntu:16.04
-MAINTAINER amanskywalker (ak47su30ac@gmail.com)
+MAINTAINER Aman Kumar (ak47su30ac@gmail.com)
# expose port 80 for the web interface
EXPOSE 80
@@ -32,11 +32,11 @@ RUN /build/install_R.sh
# run install pecan to install pecan cores
RUN /build/install_pecan.sh
-# Clean up APT when done.
-RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*
+# run install sipnet to install SIPNET (default testing Model)
+RUN /build/install_sipnet.sh
-# Mounting pecan data volume
-VOLUME /home/skywalker/pecandata:/pecandata
+# Clean up APT when done.
+RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /build/*
# startup
CMD ["/sbin/my_init"]
diff --git a/docker-compose.yml b/docker-compose.yml
index 9d99d4ca851..5f4c6bcd798 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -12,7 +12,17 @@ services:
ports:
- '5432:5432'
+ bety:
+ image: 'pecan/bety:latest'
+ networks:
+ - net1
+ ports:
+ - '3000:3000'
+
pecan-image:
+ depends_on:
+ - postgresql
+ - bety
build:
context: .
dockerfile: Dockerfile
@@ -20,5 +30,7 @@ services:
- net1
ports:
- '8787:8787'
+ - '22:22'
+ - '80:80'
volumes:
- /home/skywalker/pecandata:/pecandata
diff --git a/docker/install_sipnet.sh b/docker/install_sipnet.sh
new file mode 100644
index 00000000000..2034f81a9f5
--- /dev/null
+++ b/docker/install_sipnet.sh
@@ -0,0 +1,27 @@
+echo "######################################################################"
+echo "SIPNET"
+echo "######################################################################"
+if [ ! -e ${HOME}/sipnet_unk ]; then
+ cd
+ curl -o sipnet_unk.tar.gz http://isda.ncsa.illinois.edu/~kooper/PEcAn/models/sipnet_unk.tar.gz
+ tar zxf sipnet_unk.tar.gz
+ rm sipnet_unk.tar.gz
+fi
+cd ${HOME}/sipnet_unk/
+make clean
+make
+sudo cp sipnet /usr/local/bin/sipnet.runk
+make clean
+
+if [ ! -e ${HOME}/sipnet_r136 ]; then
+ cd
+ curl -o sipnet_r136.tar.gz http://isda.ncsa.illinois.edu/~kooper/EBI/sipnet_r136.tar.gz
+ tar zxf sipnet_r136.tar.gz
+ rm sipnet_r136.tar.gz
+ sed -i 's#$(LD) $(LIBLINKS) \(.*\)#$(LD) \1 $(LIBLINKS)#' ${HOME}/sipnet_r136/Makefile
+fi
+cd ${HOME}/sipnet_r136/
+make clean
+make
+sudo cp sipnet /usr/local/bin/sipnet.r136
+make clean
diff --git a/docker/setup_postgresql.sh b/docker/setup_postgresql.sh
new file mode 100644
index 00000000000..3e0250a3263
--- /dev/null
+++ b/docker/setup_postgresql.sh
@@ -0,0 +1,43 @@
+echo "######################################################################"
+echo "POSTGRES"
+echo "######################################################################"
+# ADD export PATH=${PATH}:/usr/pgsql-9.5/bin
+# ADD exclude=postgresql* to /etc/yum.repos.d/CentOS-Base.repo or /etc/yum/pluginconf.d/rhnplugin.conf
+# SEE https://wiki.postgresql.org/wiki/YUM_Installation#Configure_your_YUM_repository
+case "$OS_VERSION" in
+ RH_5)
+ echo "No PostgreSQL configuration (yet) for RedHat 5"
+ exit 1
+ ;;
+ RH_6)
+ sudo service postgresql-9.5 initdb
+ sudo sh -c 'if ! grep -Fq "bety" /var/lib/pgsql/9.5/data/pg_hba.conf ; then
+ sed -i "/# TYPE/ a\
+local all bety trust\n\
+host all bety 127.0.0.1/32 trust\n\
+host all bety ::1/128 trust" /var/lib/pgsql/9.5/data/pg_hba.conf
+ fi'
+ chkconfig postgresql-9.5 on
+ sudo service postgresql-9.5 start
+ ;;
+ RH_7)
+ sudo /usr/pgsql-9.5/bin/postgresql95-setup initdb
+ sudo sh -c 'if ! grep -Fq "bety" /var/lib/pgsql/9.5/data/pg_hba.conf ; then
+ sed -i "/# TYPE/ a\
+local all bety trust\n\
+host all bety 127.0.0.1/32 trust\n\
+host all bety ::1/128 trust" /var/lib/pgsql/9.5/data/pg_hba.conf
+ fi'
+ sudo systemctl enable postgresql-9.5.service
+ sudo systemctl start postgresql-9.5.service
+ ;;
+ Ubuntu)
+ sudo sh -c 'if ! grep -Fq "bety" /etc/postgresql/9.5/main/pg_hba.conf ; then
+ sed -i "/# TYPE/ a\
+local all bety trust\n\
+host all bety 127.0.0.1/32 trust\n\
+host all bety ::1/128 trust" /etc/postgresql/9.5/main/pg_hba.conf
+fi'
+ sudo service postgresql restart
+ ;;
+esac
From ae0da51e6655af64baa3df6515a524fee9ceb1ea Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sun, 11 Jun 2017 11:06:25 -0500
Subject: [PATCH 022/771] Code formatting related comments. Adding action
button to ui
---
shiny/workflowPlot/server.R | 47 +++++++++++--------------------------
shiny/workflowPlot/ui.R | 2 ++
2 files changed, 16 insertions(+), 33 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 06307552dcd..75d2864988e 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -102,22 +102,24 @@ server <- shinyServer(function(input, output, session) {
dates <- as.Date(dates)
vals <- if(is.na(vals)) y[b] else c(vals, y[b])
xlab <- "Time"
+ # Not required to change xlab by ranges. Using ggplotly.
# xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
valuesDF <- data.frame(dates,vals)
metaDF <- data.frame(workflow_id,run_id,title,xlab,ylab,var_name)
+ # Populating metaDF as same length of values DF
# metaDF1<-metaDF[rep(seq_len(nrow(valuesDF))),]
- currentDF = cbind(valuesDF,metaDF)
- globalDF<-rbind(globalDF,currentDF)
+ currentDF <- cbind(valuesDF,metaDF)
+ globalDF <- rbind(globalDF,currentDF)
}
ncdf4::nc_close(nc)
}
}
}
}
- globalDF$title = as.character(globalDF$title)
- globalDF$xlab = as.character(globalDF$xlab)
- globalDF$ylab = as.character(globalDF$ylab)
- globalDF$var_name = as.character(globalDF$var_name)
+ globalDF$title <- as.character(globalDF$title)
+ globalDF$xlab <- as.character(globalDF$xlab)
+ globalDF$ylab <- as.character(globalDF$ylab)
+ globalDF$var_name <- as.character(globalDF$var_name)
return(globalDF)
})
output$outputPlot <- renderPlotly({
@@ -153,7 +155,7 @@ server <- shinyServer(function(input, output, session) {
# dates <- as.Date(dates)
# df <- data.frame(dates, vals)
# df <- workFlowData(input$workflow_id,input$run_id,input$variable_names)
- masterDF<-workFlowData()
+ masterDF <- workFlowData()
output$info1 <- renderText({
paste0(nrow(masterDF))
})
@@ -162,17 +164,17 @@ server <- shinyServer(function(input, output, session) {
need(input$run_id, 'Run id detected'),
need(input$variable_name, 'Please wait! Loading data')
)
- masterDF$var_name = as.character(masterDF$var_name)
+ masterDF$var_name <- as.character(masterDF$var_name)
# masterDF$var_name = as.factor(masterDF$var_name)
# df1<-subset(masterDF,var_name==var_name)
- df<-masterDF %>%
+ df <- masterDF %>%
dplyr::filter(workflow_id == input$workflow_id &
run_id == input$run_id &
var_name == input$variable_name) %>%
dplyr::select(dates,vals)
- title<-unique(df$title)[1]
- xlab<-unique(df$xlab)[1]
- ylab<-unique(df$ylab)[1]
+ title <- unique(df$title)[1]
+ xlab <- unique(df$xlab)[1]
+ ylab <- unique(df$ylab)[1]
output$info2 <- renderText({
paste0(nrow(df))
# paste0(typeof(title))
@@ -207,26 +209,5 @@ server <- shinyServer(function(input, output, session) {
# Shiny server closes here
})
-# global_df<-data.frame()
-# for(variable in var_names){
-# local_df<-data.frame()
-# for(file in files){
-# nc <-nc_open(file)
-# var <- ncdf4::ncatt_get(nc, var_name)
-# #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
-# sw <- TRUE
-# title <- var$long_name
-# ylab <- var$units
-# x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
-# y <- ncdf4::ncvar_get(nc, var_name)
-# b <- !is.na(x) & !is.na(y) & sw != 0
-# dates <- if(is.na(dates)) x[b] else c(dates, x[b])
-# vals <- if(is.na(vals)) y[b] else c(vals, y[b])
-# local_df<-rbind(local_df,data.frame(dates,vals,title,ylab,variable))
-# }
-# global_df<-rbind(global_df,local_df)
-# }
-
# runApp(port=6480, launch.browser=FALSE)
-
# runApp(port=5658, launch.browser=FALSE)
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index 7cc7ba427ac..644a820fb35 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -10,7 +10,9 @@ ui <- shinyUI(fluidPage(
sidebarPanel(
selectInput("workflow_id", "Workflow ID", c()),
selectInput("run_id", "Run ID", c()),
+ actionButton("go", "Load Data"),
selectInput("variable_name", "Variable Name", "")
+
# selectInput("workflow_id", "Workflow ID", c(99000000077)),
# selectInput("run_id", "Run ID", c(99000000002)),
# selectInput("variable_name", "Variable Name", c("AutoResp","GPP"))
From 8151d89b81d03b7184a9fd0436a26f71ad7b2513 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Mon, 12 Jun 2017 10:41:24 -0500
Subject: [PATCH 023/771] Multiple workflow and run ids
---
shiny/workflowPlot/server.R | 69 ++++++++++++++++++++++++++++++-------
shiny/workflowPlot/ui.R | 9 +++--
2 files changed, 64 insertions(+), 14 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 75d2864988e..b3d1ccf901f 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -15,28 +15,73 @@ server <- shinyServer(function(input, output, session) {
ranges <- reactiveValues(x = NULL, y = NULL)
print("RESTART")
# set the workflow id(s)
- ids <- get_workflow_ids(bety, session)
- # updateSelectizeInput(session, "workflow_id", choices=ids)
- observe({
- updateSelectizeInput(session, "workflow_id", choices=ids)
- })
+ # Retrieving all workflow ids.
+ # Creating a new function here so that we wont have to modify the original one.
+ # Ideally the get_workflow_ids function in db/R/query.dplyr.R should take a flag to check
+ # if we want to load all workflow ids.
+ get_all_workflow_ids <- function(bety) {
+ ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
+ .[["workflow_id"]] %>% sort(decreasing = TRUE)
+ return(ids)
+ }
+ # get_workflow_ids
+ ids <- get_all_workflow_ids(bety)
+ # ids <- get_all_workflow_ids(bety, session)
+ updateSelectizeInput(session, "workflow_id", choices=ids)
+ # Removing observe here as we want to load workflow ids first
+ # observe({
+ # updateSelectizeInput(session, "workflow_id", choices=ids)
+ # })
workflow_id <- reactive({
req(input$workflow_id)
workflow_id <- input$workflow_id
})
# update the run_ids if user changes workflow
- run_ids <- reactive(get_run_ids(bety, workflow_id()))
+ # run_ids <- reactive(get_run_ids(bety, workflow_id()))
+ run_ids <- reactive({
+ w_ids <- input$workflow_id
+ run_id_list <- c()
+ for(w_id in w_ids){
+ r_ids <- get_run_ids(bety, w_id)
+ for(r_id in r_ids){
+ list_item <- paste0('workflow ',w_id,', run ',r_id)
+ run_id_list <- c(run_id_list,list_item)
+ }
+ }
+ return(run_id_list)
+ })
+ parse_workflowID_runID_from_input <- function(run_id_string){
+ id_list <- c()
+ split_string <- strsplit(run_id_string,',')[[1]]
+ # run_id_string: 'workflow' workflow_ID, 'run' run_id
+ wID <- as.numeric(strsplit(split_string[1],' ')[[1]][2])
+ runID <- as.numeric(strsplit(split_string[2],' ')[[1]][2])
+ id_list <- c(id_list,wID)
+ id_list <- c(id_list,runID)
+ # c(workflow_id,run_id)
+ return(id_list)
+ }
observe({
updateSelectizeInput(session, "run_id", choices=run_ids())
})
# update variables if user changes run
+ get_var_names_for_ID <- function(bety,wID,runID){
+ var_names <- get_var_names(bety, wID, runID)
+ return(var_names)
+ }
var_names <- reactive({
- run_ids <- get_run_ids(bety, workflow_id())
- var_names <- get_var_names(bety, workflow_id(), run_ids[1])
+ # run_ids <- get_run_ids(bety, workflow_id())
+ # var_names <- get_var_names(bety, workflow_id(), run_ids[1])
# Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
- removeVarNames <- c('Year','FracJulianDay')
- var_names <-var_names[!var_names %in% removeVarNames]
- return(var_names)
+
+ # run_ids <- input$run_id[1]
+ # # for(rID in run_ids){
+ # id_list <- parse_workflowID_runID_from_input(run_ids)
+ # # var_names <- get_var_names_for_ID(bety,id_list[1],id_list[2])
+ # # # }
+ # removeVarNames <- c('Year','FracJulianDay')
+ # var_names <-var_names[!var_names %in% removeVarNames]
+ # return(id_list)
})
observe({
updateSelectizeInput(session, "variable_name", choices=var_names())
@@ -63,7 +108,7 @@ server <- shinyServer(function(input, output, session) {
# ,session$clientData$url_search)
# paste0("x=", input$plot_dblclick$x, "\ny=", input$plot_dblclick$y)
})
- workFlowData <-reactive({
+ workFlowData <-eventReactive(input$go,{
# workflow_id = 99000000077
# run_id = 99000000002
# var_name = var_names
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index 644a820fb35..739b17be01b 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -8,9 +8,14 @@ ui <- shinyUI(fluidPage(
sidebarLayout(
sidebarPanel(
- selectInput("workflow_id", "Workflow ID", c()),
- selectInput("run_id", "Run ID", c()),
+ # helpText(),
+ p("Please select the workflow ID to continue. You can select multiple IDs"),
+ selectizeInput("workflow_id", "Mutliple Workflow IDs", c(),multiple=TRUE),
+ p("Please select the run ID. You can select multiple IDs"),
+ selectizeInput("run_id", "Mutliple Run IDs", c(),multiple=TRUE),
actionButton("go", "Load Data"),
+ selectInput("workflow_id_selected", "Workflow ID", c()),
+ selectInput("run_id_selected", "Run ID", c()),
selectInput("variable_name", "Variable Name", "")
# selectInput("workflow_id", "Workflow ID", c(99000000077)),
From b171ff5756f4ed874028c64d07dccb678b3b7314 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Tue, 13 Jun 2017 19:21:42 +0530
Subject: [PATCH 024/771] Removed the exposed port from the Dockerfile
Removed the exposed port 80 and 22 as they were not needed as of now
---
Dockerfile | 6 ------
1 file changed, 6 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index c4698cff828..7106d144857 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,12 +1,6 @@
FROM ubuntu:16.04
MAINTAINER Aman Kumar (ak47su30ac@gmail.com)
-# expose port 80 for the web interface
-EXPOSE 80
-
-# expose port 22 for ssh maintance
-EXPOSE 22
-
# updated ppa's
RUN echo "deb http://cran.rstudio.com/bin/linux/ubuntu xenial/" > /etc/apt/sources.list.d/R.list &&\
apt-key adv --keyserver keyserver.ubuntu.com --recv-keys E084DAB9
From 5e79026eedc8f4bb2d087e392fcdb49892f7b3d0 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Tue, 13 Jun 2017 20:51:59 +0530
Subject: [PATCH 025/771] Minor fixes modified: docker-compose.yml
---
docker-compose.yml | 8 +++++---
1 file changed, 5 insertions(+), 3 deletions(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index 5f4c6bcd798..e38529fe6fd 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -5,8 +5,8 @@ networks:
driver: bridge
services:
- postgresql:
- image: 'postgres:latest'
+ postgres:
+ image: 'mdillon/postgis:9.6'
networks:
- net1
ports:
@@ -18,10 +18,12 @@ services:
- net1
ports:
- '3000:3000'
+ link:
+ - postgres:pg
pecan-image:
depends_on:
- - postgresql
+ - postgres
- bety
build:
context: .
From 9e3815d1e8455192b8c046c321491d19b50e0d4b Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Wed, 14 Jun 2017 22:38:36 +0530
Subject: [PATCH 026/771] Removed pecan web codes from the core
Removed the pecan web codes from the core in docker/install_pecan.sh
Renamed the pecan-image to pecan-core in docker-compose.yml
---
docker-compose.yml | 4 +---
docker/install_pecan.sh | 24 ------------------------
2 files changed, 1 insertion(+), 27 deletions(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index e38529fe6fd..c9e8a0198d9 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -21,7 +21,7 @@ services:
link:
- postgres:pg
- pecan-image:
+ pecan-core:
depends_on:
- postgres
- bety
@@ -32,7 +32,5 @@ services:
- net1
ports:
- '8787:8787'
- - '22:22'
- - '80:80'
volumes:
- /home/skywalker/pecandata:/pecandata
diff --git a/docker/install_pecan.sh b/docker/install_pecan.sh
index ffd6a30c128..a56ef18f1cd 100644
--- a/docker/install_pecan.sh
+++ b/docker/install_pecan.sh
@@ -12,27 +12,3 @@ fi
cd ${HOME}/pecan
git pull
make
-
- curl -o /var/www/html/pecan.pdf https://www.gitbook.com/download/pdf/book/pecan/pecan-documentation
- rm /var/www/html/index.html
- ln -s ${HOME}/pecan/documentation/index_vm.html /var/www/html/index.html
-if [ ! -e ${HOME}/pecan/web/config.php ]; then
- sed -e "s#browndog_url=.*#browndog_url=\"${BROWNDOG_URL}\";#" \
- -e "s#browndog_username=.*#browndog_username=\"${BROWNDOG_USERNAME}\";#" \
- -e "s#browndog_password=.*#browndog_password=\"${BROWNDOG_PASSWORD}\";#" \
- -e "s#googleMapKey=.*#googleMapKey=\"${GOOGLE_MAP_KEY}\";#" \
- -e "s/carya/$USER/g" ${HOME}/pecan/web/config.example.php > ${HOME}/pecan/web/config.php
-fi
-
-if [ ! -e ${HTTP_CONF}/pecan.conf ]; then
- cat > /tmp/pecan.conf << EOF
-Alias /pecan ${HOME}/pecan/web
-
- DirectoryIndex index.php
- Options +ExecCGI
- Require all granted
-
-EOF
- cp /tmp/pecan.conf ${HTTP_CONF}/pecan.conf
- rm /tmp/pecan.conf
-fi
From 3b6b0caab6831e5dee7d6baccefd69e4b0b771cd Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Wed, 14 Jun 2017 23:08:10 +0530
Subject: [PATCH 027/771] Removed the pecan-web installation packages
Removed the pecan-web installation packages to reduce the size of the image
Typo fix in docker-compose.yml
---
docker-compose.yml | 2 +-
docker/install_packages.sh | 2 +-
docker/install_pecan_preprocessor.sh | 26 +++++++++++++-------------
3 files changed, 15 insertions(+), 15 deletions(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index c9e8a0198d9..0be09c4d9af 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -18,7 +18,7 @@ services:
- net1
ports:
- '3000:3000'
- link:
+ links:
- postgres:pg
pecan-core:
diff --git a/docker/install_packages.sh b/docker/install_packages.sh
index a89e4110e02..c665a887caf 100644
--- a/docker/install_packages.sh
+++ b/docker/install_packages.sh
@@ -80,7 +80,7 @@ case "$OS_VERSION" in
# for PostgreSQL
# apt-get -y install libdbd-pgsql postgresql-9.5 postgresql-client-9.5 libpq-dev postgresql-9.5-postgis-2.2 postgresql-9.5-postgis-scripts
# for web gui
- apt-get -y install apache2 libapache2-mod-php7.0 php7.0 libapache2-mod-passenger php7.0-xml php-ssh2 php7.0-pgsql
+ # apt-get -y install apache2 libapache2-mod-php7.0 php7.0 libapache2-mod-passenger php7.0-xml php-ssh2 php7.0-pgsql
# Ubuntu 14.04 php5-pgsql libapache2-mod-php5 php5 and no php-xml
;;
esac
diff --git a/docker/install_pecan_preprocessor.sh b/docker/install_pecan_preprocessor.sh
index c92a67378a1..e53311b38ad 100644
--- a/docker/install_pecan_preprocessor.sh
+++ b/docker/install_pecan_preprocessor.sh
@@ -8,11 +8,11 @@ set -e
#fi
# configuration
-BROWNDOG_URL="http://dap.ncsa.illinois.edu:8184/convert/";
-BROWNDOG_USERNAME="";
-BROWNDOG_PASSWORD="";
-
-GOOGLE_MAP_KEY=""
+# BROWNDOG_URL="http://dap.ncsa.illinois.edu:8184/convert/";
+# BROWNDOG_USERNAME="";
+# BROWNDOG_PASSWORD="";
+#
+# GOOGLE_MAP_KEY=""
#SETUP_VM=""
#SETUP_PALEON=""
@@ -26,11 +26,11 @@ if [ -e $(dirname $0)/install_pecan.config ]; then
. $(dirname $0)/install_pecan.config
fi
-if [ -e /etc/redhat-release ]; then
- OS_VERSION="RH_$( sed -r 's/.* ([0-9]+)\..*/\1/' /etc/redhat-release )"
- HTTP_CONF="/etc/httpd/conf.d/"
- chmod o+x ${HOME}
-else
- OS_VERSION="Ubuntu"
- HTTP_CONF="/etc/apache2/conf-available/"
-fi
+# if [ -e /etc/redhat-release ]; then
+# OS_VERSION="RH_$( sed -r 's/.* ([0-9]+)\..*/\1/' /etc/redhat-release )"
+# HTTP_CONF="/etc/httpd/conf.d/"
+# chmod o+x ${HOME}
+# else
+# OS_VERSION="Ubuntu"
+# HTTP_CONF="/etc/apache2/conf-available/"
+# fi
From 04198245c9cd0a43414cdedd84d07dfe1a074a82 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Wed, 14 Jun 2017 23:17:14 +0530
Subject: [PATCH 028/771] hard coded to git clone the repo not to pull
even if present
---
docker/install_pecan.sh | 8 ++------
1 file changed, 2 insertions(+), 6 deletions(-)
diff --git a/docker/install_pecan.sh b/docker/install_pecan.sh
index a56ef18f1cd..43528dc6399 100644
--- a/docker/install_pecan.sh
+++ b/docker/install_pecan.sh
@@ -5,10 +5,6 @@
echo "######################################################################"
echo "PECAN"
echo "######################################################################"
-if [ ! -e ${HOME}/pecan ]; then
- cd
- git clone https://github.com/PecanProject/pecan.git
-fi
-cd ${HOME}/pecan
-git pull
+git clone https://github.com/PecanProject/pecan.git
+cd pecan/
make
From 85e902de4ba5f943deb8f83985aee21d8af9ecd4 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Wed, 14 Jun 2017 23:22:22 +0530
Subject: [PATCH 029/771] Added OS version
---
docker/install_pecan_preprocessor.sh | 16 ++++++++--------
1 file changed, 8 insertions(+), 8 deletions(-)
diff --git a/docker/install_pecan_preprocessor.sh b/docker/install_pecan_preprocessor.sh
index e53311b38ad..60b62c1810d 100644
--- a/docker/install_pecan_preprocessor.sh
+++ b/docker/install_pecan_preprocessor.sh
@@ -26,11 +26,11 @@ if [ -e $(dirname $0)/install_pecan.config ]; then
. $(dirname $0)/install_pecan.config
fi
-# if [ -e /etc/redhat-release ]; then
-# OS_VERSION="RH_$( sed -r 's/.* ([0-9]+)\..*/\1/' /etc/redhat-release )"
-# HTTP_CONF="/etc/httpd/conf.d/"
-# chmod o+x ${HOME}
-# else
-# OS_VERSION="Ubuntu"
-# HTTP_CONF="/etc/apache2/conf-available/"
-# fi
+if [ -e /etc/redhat-release ]; then
+ OS_VERSION="RH_$( sed -r 's/.* ([0-9]+)\..*/\1/' /etc/redhat-release )"
+ HTTP_CONF="/etc/httpd/conf.d/"
+ chmod o+x ${HOME}
+else
+ OS_VERSION="Ubuntu"
+ HTTP_CONF="/etc/apache2/conf-available/"
+fi
From 1fb2695281a71f1d1cd6b918b7a036eb79b2cb99 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Fri, 16 Jun 2017 10:50:34 +0530
Subject: [PATCH 030/771] Minor fix in install_pecan.sh
Added depends_on bety service in docker-compose.yml
---
docker-compose.yml | 2 ++
docker/install_pecan.sh | 9 +++++++--
2 files changed, 9 insertions(+), 2 deletions(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index 0be09c4d9af..c4d4c2eb6c9 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -13,6 +13,8 @@ services:
- '5432:5432'
bety:
+ depends_on:
+ - postgres
image: 'pecan/bety:latest'
networks:
- net1
diff --git a/docker/install_pecan.sh b/docker/install_pecan.sh
index 43528dc6399..e848c17ff93 100644
--- a/docker/install_pecan.sh
+++ b/docker/install_pecan.sh
@@ -5,6 +5,11 @@
echo "######################################################################"
echo "PECAN"
echo "######################################################################"
-git clone https://github.com/PecanProject/pecan.git
-cd pecan/
+if [ ! -e ${HOME}/pecan ]; then
+ cd
+ git clone https://github.com/PecanProject/pecan.git
+fi
+cd ${HOME}/pecan
+git pull
+mkdir .install
make
From 241a36381798e30eadae5c74967e136681805adf Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Fri, 16 Jun 2017 19:57:51 +0530
Subject: [PATCH 031/771] Added web gui
Added Dockerfile_pecan_web to generate the web gui also added docker/install_pecan_web.sh as script to install the web gui
---
Dockerfile_pecan_web | 23 +++++++++++++++++++
docker/install_pecan_web.sh | 46 +++++++++++++++++++++++++++++++++++++
2 files changed, 69 insertions(+)
create mode 100644 Dockerfile_pecan_web
create mode 100644 docker/install_pecan_web.sh
diff --git a/Dockerfile_pecan_web b/Dockerfile_pecan_web
new file mode 100644
index 00000000000..a098331f034
--- /dev/null
+++ b/Dockerfile_pecan_web
@@ -0,0 +1,23 @@
+FROM amanskywalker/pecan-dev:latest
+MAINTAINER Aman Kumar (ak47su30ac@gmail.com)
+
+# copy the installation script inside the container
+ADD docker/ /build
+
+# Set script mod +x for preprocessors
+RUN chmod 750 /build/*.sh
+
+# Run the OS System setup script
+RUN /build/system_services.sh
+
+# run update machine to update machine
+RUN /build/update_machine.sh
+
+# install pecan web
+RUN /build/install_pecan_web.sh
+
+# Clean up APT when done.
+RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /build/*
+
+# startup
+CMD ["/sbin/my_init"]
diff --git a/docker/install_pecan_web.sh b/docker/install_pecan_web.sh
new file mode 100644
index 00000000000..666d2917d7c
--- /dev/null
+++ b/docker/install_pecan_web.sh
@@ -0,0 +1,46 @@
+#!/bin/bash
+
+. /build/install_pecan_preprocessor.sh
+
+echo "######################################################################"
+echo "PECAN-WEB"
+echo "######################################################################"
+
+#configuration
+
+BROWNDOG_URL="http://dap.ncsa.illinois.edu:8184/convert/";
+BROWNDOG_USERNAME="";
+BROWNDOG_PASSWORD="";
+
+GOOGLE_MAP_KEY=""
+
+echo "Intalling php and apache2"
+
+# for web gui
+apt-get -y install apache2 libapache2-mod-php7.0 php7.0 libapache2-mod-passenger php7.0-xml php-ssh2 php7.0-pgsql
+
+echo "Setting up web gui"
+sudo curl -o /var/www/html/pecan.pdf https://www.gitbook.com/download/pdf/book/pecan/pecan-documentation
+sudo rm /var/www/html/index.html
+sudo ln -s ${HOME}/pecan/documentation/index_vm.html /var/www/html/index.html
+
+if [ ! -e ${HOME}/pecan/web/config.php ]; then
+ sed -e "s#browndog_url=.*#browndog_url=\"${BROWNDOG_URL}\";#" \
+ -e "s#browndog_username=.*#browndog_username=\"${BROWNDOG_USERNAME}\";#" \
+ -e "s#browndog_password=.*#browndog_password=\"${BROWNDOG_PASSWORD}\";#" \
+ -e "s#googleMapKey=.*#googleMapKey=\"${GOOGLE_MAP_KEY}\";#" \
+ -e "s/carya/$USER/g" ${HOME}/pecan/web/config.example.php > ${HOME}/pecan/web/config.php
+fi
+
+if [ ! -e ${HTTP_CONF}/pecan.conf ]; then
+ cat > /tmp/pecan.conf << EOF
+Alias /pecan ${HOME}/pecan/web
+
+ DirectoryIndex index.php
+ Options +ExecCGI
+ Require all granted
+
+EOF
+ sudo cp /tmp/pecan.conf ${HTTP_CONF}/pecan.conf
+ rm /tmp/pecan.conf
+fi
From 2a6f3c522b15abf9a05ba13ec90595cd3ea2ee20 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sat, 17 Jun 2017 08:09:26 -0500
Subject: [PATCH 032/771] UI related changes. Working on server.R
---
shiny/workflowPlot/server.R | 124 ++++++++++++++++++------------------
shiny/workflowPlot/ui.R | 12 ++--
2 files changed, 68 insertions(+), 68 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index b3d1ccf901f..52d6df8f8ef 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -20,8 +20,8 @@ server <- shinyServer(function(input, output, session) {
# Ideally the get_workflow_ids function in db/R/query.dplyr.R should take a flag to check
# if we want to load all workflow ids.
get_all_workflow_ids <- function(bety) {
- ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
- .[["workflow_id"]] %>% sort(decreasing = TRUE)
+ ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
+ .[["workflow_id"]] %>% sort(decreasing = TRUE)
return(ids)
}
# get_workflow_ids
@@ -70,18 +70,18 @@ server <- shinyServer(function(input, output, session) {
return(var_names)
}
var_names <- reactive({
- # run_ids <- get_run_ids(bety, workflow_id())
- # var_names <- get_var_names(bety, workflow_id(), run_ids[1])
- # Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
-
- # run_ids <- input$run_id[1]
- # # for(rID in run_ids){
- # id_list <- parse_workflowID_runID_from_input(run_ids)
- # # var_names <- get_var_names_for_ID(bety,id_list[1],id_list[2])
- # # # }
- # removeVarNames <- c('Year','FracJulianDay')
- # var_names <-var_names[!var_names %in% removeVarNames]
- # return(id_list)
+ # run_ids <- get_run_ids(bety, workflow_id())
+ # var_names <- get_var_names(bety, workflow_id(), run_ids[1])
+ # Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
+
+ # run_ids <- input$run_id[1]
+ # # for(rID in run_ids){
+ # id_list <- parse_workflowID_runID_from_input(run_ids)
+ # # var_names <- get_var_names_for_ID(bety,id_list[1],id_list[2])
+ # # # }
+ # removeVarNames <- c('Year','FracJulianDay')
+ # var_names <-var_names[!var_names %in% removeVarNames]
+ # return(id_list)
})
observe({
updateSelectizeInput(session, "variable_name", choices=var_names())
@@ -199,59 +199,59 @@ server <- shinyServer(function(input, output, session) {
# print(ranges$x)
# dates <- as.Date(dates)
# df <- data.frame(dates, vals)
- # df <- workFlowData(input$workflow_id,input$run_id,input$variable_names)
- masterDF <- workFlowData()
- output$info1 <- renderText({
- paste0(nrow(masterDF))
- })
- validate(
- need(input$workflow_id, 'Found workflow id'),
- need(input$run_id, 'Run id detected'),
- need(input$variable_name, 'Please wait! Loading data')
- )
- masterDF$var_name <- as.character(masterDF$var_name)
- # masterDF$var_name = as.factor(masterDF$var_name)
- # df1<-subset(masterDF,var_name==var_name)
- df <- masterDF %>%
- dplyr::filter(workflow_id == input$workflow_id &
+ # df <- workFlowData(input$workflow_id,input$run_id,input$variable_names)
+ masterDF <- workFlowData()
+ output$info1 <- renderText({
+ paste0(nrow(masterDF))
+ })
+ validate(
+ need(input$workflow_id, 'Found workflow id'),
+ need(input$run_id, 'Run id detected'),
+ need(input$variable_name, 'Please wait! Loading data')
+ )
+ masterDF$var_name <- as.character(masterDF$var_name)
+ # masterDF$var_name = as.factor(masterDF$var_name)
+ # df1<-subset(masterDF,var_name==var_name)
+ df <- masterDF %>%
+ dplyr::filter(workflow_id == input$workflow_id &
run_id == input$run_id &
var_name == input$variable_name) %>%
- dplyr::select(dates,vals)
- title <- unique(df$title)[1]
- xlab <- unique(df$xlab)[1]
- ylab <- unique(df$ylab)[1]
- output$info2 <- renderText({
- paste0(nrow(df))
- # paste0(typeof(title))
- })
- output$info3 <- renderText({
- paste0('xlab')
- # paste0(typeof(title))
- })
-
- # df1<-masterDF %>% filter(masterDF$var_name %in% var_name)
- # workflow_id %in% workflow_id)
- # & run_id == run_id & var_name == var_name)
- # df<-masterDF %>% dplyr::filter(workflow_id == input$workflow_id)
- plt <- ggplot(df, aes(x=dates, y=vals)) +
- # geom_point(aes(color="Model output")) +
- geom_point() +
-# geom_smooth(aes(fill = "Spline fit")) +
- # coord_cartesian(xlim = ranges$x, ylim = ranges$y) +
- # scale_y_continuous(labels=fancy_scientific) +
- labs(title=title, x=xlab, y=ylab) +
- # labs(title=unique(df$title)[1], x=unique(df$xlab)[1], y=unique(df$ylab)[1]) +
- scale_color_manual(name = "", values = "black") +
- scale_fill_manual(name = "", values = "grey50")
- # theme(axis.text.x = element_text(angle = -90))
- plt<-ggplotly(plt)
- # plot(plt)
- # add_icon()
+ dplyr::select(dates,vals)
+ title <- unique(df$title)[1]
+ xlab <- unique(df$xlab)[1]
+ ylab <- unique(df$ylab)[1]
+ output$info2 <- renderText({
+ paste0(nrow(df))
+ # paste0(typeof(title))
+ })
+ output$info3 <- renderText({
+ paste0('xlab')
+ # paste0(typeof(title))
+ })
+
+ # df1<-masterDF %>% filter(masterDF$var_name %in% var_name)
+ # workflow_id %in% workflow_id)
+ # & run_id == run_id & var_name == var_name)
+ # df<-masterDF %>% dplyr::filter(workflow_id == input$workflow_id)
+ plt <- ggplot(df, aes(x=dates, y=vals)) +
+ # geom_point(aes(color="Model output")) +
+ geom_point() +
+ # geom_smooth(aes(fill = "Spline fit")) +
+ # coord_cartesian(xlim = ranges$x, ylim = ranges$y) +
+ # scale_y_continuous(labels=fancy_scientific) +
+ labs(title=title, x=xlab, y=ylab) +
+ # labs(title=unique(df$title)[1], x=unique(df$xlab)[1], y=unique(df$ylab)[1]) +
+ scale_color_manual(name = "", values = "black") +
+ scale_fill_manual(name = "", values = "grey50")
+ # theme(axis.text.x = element_text(angle = -90))
+ plt<-ggplotly(plt)
+ # plot(plt)
+ # add_icon()
# }
# }
})
-
-# Shiny server closes here
+
+ # Shiny server closes here
})
# runApp(port=6480, launch.browser=FALSE)
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index 739b17be01b..cc2880bb7b2 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -5,7 +5,7 @@ source('helper.R')
ui <- shinyUI(fluidPage(
# Application title
titlePanel("Workflow Plots"),
-
+
sidebarLayout(
sidebarPanel(
# helpText(),
@@ -24,11 +24,11 @@ ui <- shinyUI(fluidPage(
),
mainPanel(
plotlyOutput("outputPlot"
- ## brushOpts and dblclick not supported by plotly
- # brush = brushOpts(id = "plot_brush",
- # resetOnNew = TRUE),
- # dblclick = "plot_dblclick"
- ),
+ ## brushOpts and dblclick not supported by plotly
+ # brush = brushOpts(id = "plot_brush",
+ # resetOnNew = TRUE),
+ # dblclick = "plot_dblclick"
+ ),
# Checking variable names
verbatimTextOutput("info"),
verbatimTextOutput("info1"),
From 50cb285b73402d06d5e892cc317e1cbeb7bdaf52 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 17 Jun 2017 20:16:10 +0530
Subject: [PATCH 033/771] Added override for HOME variable and Few web
codes
---
docker/install_packages.sh | 1 +
docker/install_pecan_preprocessor.sh | 4 ++++
docker/install_pecan_web.sh | 12 ++++++++----
docker/install_sipnet.sh | 4 ++++
4 files changed, 17 insertions(+), 4 deletions(-)
diff --git a/docker/install_packages.sh b/docker/install_packages.sh
index c665a887caf..6379e8a9ecd 100644
--- a/docker/install_packages.sh
+++ b/docker/install_packages.sh
@@ -79,6 +79,7 @@ case "$OS_VERSION" in
apt-get -y install cmake
# for PostgreSQL
# apt-get -y install libdbd-pgsql postgresql-9.5 postgresql-client-9.5 libpq-dev postgresql-9.5-postgis-2.2 postgresql-9.5-postgis-scripts
+ apt-get -y install postgresql-client-9.4
# for web gui
# apt-get -y install apache2 libapache2-mod-php7.0 php7.0 libapache2-mod-passenger php7.0-xml php-ssh2 php7.0-pgsql
# Ubuntu 14.04 php5-pgsql libapache2-mod-php5 php5 and no php-xml
diff --git a/docker/install_pecan_preprocessor.sh b/docker/install_pecan_preprocessor.sh
index 60b62c1810d..d629d880fb3 100644
--- a/docker/install_pecan_preprocessor.sh
+++ b/docker/install_pecan_preprocessor.sh
@@ -7,6 +7,10 @@ set -e
# exit -1
#fi
+# overiding environment variables
+
+export HOME='/home/carya/'
+
# configuration
# BROWNDOG_URL="http://dap.ncsa.illinois.edu:8184/convert/";
# BROWNDOG_USERNAME="";
diff --git a/docker/install_pecan_web.sh b/docker/install_pecan_web.sh
index 666d2917d7c..fb45bb3be5d 100644
--- a/docker/install_pecan_web.sh
+++ b/docker/install_pecan_web.sh
@@ -20,9 +20,9 @@ echo "Intalling php and apache2"
apt-get -y install apache2 libapache2-mod-php7.0 php7.0 libapache2-mod-passenger php7.0-xml php-ssh2 php7.0-pgsql
echo "Setting up web gui"
-sudo curl -o /var/www/html/pecan.pdf https://www.gitbook.com/download/pdf/book/pecan/pecan-documentation
-sudo rm /var/www/html/index.html
-sudo ln -s ${HOME}/pecan/documentation/index_vm.html /var/www/html/index.html
+curl -o /var/www/html/pecan.pdf https://www.gitbook.com/download/pdf/book/pecan/pecan-documentation
+rm /var/www/html/index.html
+ln -s ${HOME}/pecan/documentation/index_vm.html /var/www/html/index.html
if [ ! -e ${HOME}/pecan/web/config.php ]; then
sed -e "s#browndog_url=.*#browndog_url=\"${BROWNDOG_URL}\";#" \
@@ -41,6 +41,10 @@ Alias /pecan ${HOME}/pecan/web
Require all granted
EOF
- sudo cp /tmp/pecan.conf ${HTTP_CONF}/pecan.conf
+ cp /tmp/pecan.conf ${HTTP_CONF}/pecan.conf
rm /tmp/pecan.conf
fi
+
+a2enconf pecan.conf
+
+services apache2 restart
diff --git a/docker/install_sipnet.sh b/docker/install_sipnet.sh
index 2034f81a9f5..2417c41aad0 100644
--- a/docker/install_sipnet.sh
+++ b/docker/install_sipnet.sh
@@ -1,3 +1,7 @@
+
+
+. /build/install_pecan_preprocessor.sh
+
echo "######################################################################"
echo "SIPNET"
echo "######################################################################"
From c1009c2abb927cef91224ab901926eac7477cb24 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 17 Jun 2017 20:38:15 +0530
Subject: [PATCH 034/771] Moved Pecan Web Dockerfile into docker dir
---
Dockerfile_pecan_web => docker/Dockerfile | 0
docker/install_packages.sh | 2 +-
2 files changed, 1 insertion(+), 1 deletion(-)
rename Dockerfile_pecan_web => docker/Dockerfile (100%)
diff --git a/Dockerfile_pecan_web b/docker/Dockerfile
similarity index 100%
rename from Dockerfile_pecan_web
rename to docker/Dockerfile
diff --git a/docker/install_packages.sh b/docker/install_packages.sh
index 6379e8a9ecd..6ece1b32631 100644
--- a/docker/install_packages.sh
+++ b/docker/install_packages.sh
@@ -79,7 +79,7 @@ case "$OS_VERSION" in
apt-get -y install cmake
# for PostgreSQL
# apt-get -y install libdbd-pgsql postgresql-9.5 postgresql-client-9.5 libpq-dev postgresql-9.5-postgis-2.2 postgresql-9.5-postgis-scripts
- apt-get -y install postgresql-client-9.4
+ apt-get -y install postgresql-client-9.5
# for web gui
# apt-get -y install apache2 libapache2-mod-php7.0 php7.0 libapache2-mod-passenger php7.0-xml php-ssh2 php7.0-pgsql
# Ubuntu 14.04 php5-pgsql libapache2-mod-php5 php5 and no php-xml
From cf5297cea0de2bc003ee8b6c2594172b542b4e85 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 17 Jun 2017 21:23:46 +0530
Subject: [PATCH 035/771] minor fixes in various files in docker/
---
docker/Dockerfile | 2 +-
docker/install_pecan_preprocessor.sh | 2 +-
docker/update_machine.sh | 7 +++++--
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/docker/Dockerfile b/docker/Dockerfile
index a098331f034..d386dd9a5b9 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -2,7 +2,7 @@ FROM amanskywalker/pecan-dev:latest
MAINTAINER Aman Kumar (ak47su30ac@gmail.com)
# copy the installation script inside the container
-ADD docker/ /build
+ADD . /build
# Set script mod +x for preprocessors
RUN chmod 750 /build/*.sh
diff --git a/docker/install_pecan_preprocessor.sh b/docker/install_pecan_preprocessor.sh
index d629d880fb3..0f715370898 100644
--- a/docker/install_pecan_preprocessor.sh
+++ b/docker/install_pecan_preprocessor.sh
@@ -9,7 +9,7 @@ set -e
# overiding environment variables
-export HOME='/home/carya/'
+export HOME='/home/carya'
# configuration
# BROWNDOG_URL="http://dap.ncsa.illinois.edu:8184/convert/";
diff --git a/docker/update_machine.sh b/docker/update_machine.sh
index 4f7709df5ef..ebbde9bba6b 100644
--- a/docker/update_machine.sh
+++ b/docker/update_machine.sh
@@ -6,8 +6,11 @@
echo "######################################################################"
echo "UPDATING MACHINE"
echo "######################################################################"
-mkdir /home/carya/
-chmod 755 /home/carya/
+if [ ! -e /home/carya/ ]; then
+ mkdir /home/carya/
+ chmod 755 /home/carya/
+fi
+
case "$OS_VERSION" in
RH_*)
yum update -y
From 5820a8993fe7d66761db734acf2895932cc1e9df Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 17 Jun 2017 22:42:55 +0530
Subject: [PATCH 036/771] Added scripts for the apache to keep running
in background
---
docker/Dockerfile | 11 +++++++++++
docker/apache2/runserver.sh | 3 +++
docker/apache2/startup.sh | 11 +++++++++++
docker/install_pecan_web.sh | 2 +-
4 files changed, 26 insertions(+), 1 deletion(-)
create mode 100644 docker/apache2/runserver.sh
create mode 100644 docker/apache2/startup.sh
diff --git a/docker/Dockerfile b/docker/Dockerfile
index d386dd9a5b9..043b6bf1c27 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,3 +1,4 @@
+# Dockerfile for the pecan web
FROM amanskywalker/pecan-dev:latest
MAINTAINER Aman Kumar (ak47su30ac@gmail.com)
@@ -16,6 +17,16 @@ RUN /build/update_machine.sh
# install pecan web
RUN /build/install_pecan_web.sh
+# simple scripts to do the startup task
+RUN mkdir -p /etc/my_init.d
+COPY /build/apache2/startup.sh /etc/my_init.d/startup.sh
+RUN chmod +x /etc/my_init.d/startup.sh
+
+# adding demons of apache2
+RUN mkdir -p /etc/service/rserver ; sync
+COPY /build/apache2/runserver.sh /etc/service/rserver/run
+RUN chmod +x /etc/service/rserver/run \
+
# Clean up APT when done.
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /build/*
diff --git a/docker/apache2/runserver.sh b/docker/apache2/runserver.sh
new file mode 100644
index 00000000000..3c111a74640
--- /dev/null
+++ b/docker/apache2/runserver.sh
@@ -0,0 +1,3 @@
+#!/bin/sh
+
+exec chpst -u root /usr/sbin/apache2 -DFOREGROUND off 2>&1
diff --git a/docker/apache2/startup.sh b/docker/apache2/startup.sh
new file mode 100644
index 00000000000..ca232920ba2
--- /dev/null
+++ b/docker/apache2/startup.sh
@@ -0,0 +1,11 @@
+#!/bin/bash
+
+set -e
+
+if [ -f /etc/configured ]; then
+ echo 'already configured'
+else
+ #code that need to run only one time ....
+ update-locale
+ date > /etc/configured
+fi
diff --git a/docker/install_pecan_web.sh b/docker/install_pecan_web.sh
index fb45bb3be5d..6f749e8bb53 100644
--- a/docker/install_pecan_web.sh
+++ b/docker/install_pecan_web.sh
@@ -47,4 +47,4 @@ fi
a2enconf pecan.conf
-services apache2 restart
+/etc/init.d/apache2 restart
From fc3472f92be156a8c9a1ed1ac6f4dc0a42948481 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 17 Jun 2017 23:00:30 +0530
Subject: [PATCH 037/771] Minor fixes
---
docker/Dockerfile | 6 +++---
docker/apache2/runserver.sh | 2 +-
2 files changed, 4 insertions(+), 4 deletions(-)
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 043b6bf1c27..63f3cbc130f 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -19,13 +19,13 @@ RUN /build/install_pecan_web.sh
# simple scripts to do the startup task
RUN mkdir -p /etc/my_init.d
-COPY /build/apache2/startup.sh /etc/my_init.d/startup.sh
+COPY apache2/startup.sh /etc/my_init.d/startup.sh
RUN chmod +x /etc/my_init.d/startup.sh
# adding demons of apache2
RUN mkdir -p /etc/service/rserver ; sync
-COPY /build/apache2/runserver.sh /etc/service/rserver/run
-RUN chmod +x /etc/service/rserver/run \
+COPY apache2/runserver.sh /etc/service/rserver/run
+RUN chmod +x /etc/service/rserver/run
# Clean up APT when done.
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /build/*
diff --git a/docker/apache2/runserver.sh b/docker/apache2/runserver.sh
index 3c111a74640..008a2465425 100644
--- a/docker/apache2/runserver.sh
+++ b/docker/apache2/runserver.sh
@@ -1,3 +1,3 @@
#!/bin/sh
-exec chpst -u root /usr/sbin/apache2 -DFOREGROUND off 2>&1
+exec /etc/init.d/apache2 start
From 25dbb08070627332439e4171ce608cb4245276a6 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sat, 17 Jun 2017 19:13:56 -0500
Subject: [PATCH 038/771] Updating query.dplyr.R. Multiple selection server.R
and ui.R
---
db/R/query.dplyr.R | 5 +-
shiny/workflowPlot/server.R | 194 ++++++++++++++++++++----------------
shiny/workflowPlot/ui.R | 10 +-
3 files changed, 115 insertions(+), 94 deletions(-)
diff --git a/db/R/query.dplyr.R b/db/R/query.dplyr.R
index 18c52d7cd16..5c36256a99b 100644
--- a/db/R/query.dplyr.R
+++ b/db/R/query.dplyr.R
@@ -134,9 +134,10 @@ runs <- function(bety, workflow_id) {
#' @inheritParams dbHostInfo
#' @param session Session object passed through Shiny
#' @export
-get_workflow_ids <- function(bety, session) {
+get_workflow_ids <- function(bety, session,all.ids=FALSE) {
query <- isolate(parseQueryString(session$clientData$url_search))
- if ("workflow_id" %in% names(query)) {
+ # If we dont want all workflow ids but only workflow id from the user url query
+ if (!all.ids & "workflow_id" %in% names(query)) {
ids <- unlist(query[names(query) == "workflow_id"], use.names = FALSE)
} else {
# Get all workflow IDs
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 52d6df8f8ef..1dc303523c5 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -12,107 +12,159 @@ server <- shinyServer(function(input, output, session) {
# options(shiny.trace=TRUE)
bety <- betyConnect()
# bety <- betyConnect('/home/carya/pecan/web/config.php')
- ranges <- reactiveValues(x = NULL, y = NULL)
+ # Ranges not required.
+ # ranges <- reactiveValues(x = NULL, y = NULL)
print("RESTART")
# set the workflow id(s)
# Retrieving all workflow ids.
# Creating a new function here so that we wont have to modify the original one.
# Ideally the get_workflow_ids function in db/R/query.dplyr.R should take a flag to check
# if we want to load all workflow ids.
- get_all_workflow_ids <- function(bety) {
- ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
- .[["workflow_id"]] %>% sort(decreasing = TRUE)
- return(ids)
- }
+ # get_all_workflow_ids <- function(bety) {
+ # ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
+ # .[["workflow_id"]] %>% sort(decreasing = TRUE)
+ # return(ids)
+ # }
# get_workflow_ids
- ids <- get_all_workflow_ids(bety)
+ # ids <- get_all_workflow_ids(bety)
# ids <- get_all_workflow_ids(bety, session)
- updateSelectizeInput(session, "workflow_id", choices=ids)
- # Removing observe here as we want to load workflow ids first
- # observe({
- # updateSelectizeInput(session, "workflow_id", choices=ids)
- # })
- workflow_id <- reactive({
- req(input$workflow_id)
- workflow_id <- input$workflow_id
+ # Get all workflow ids
+ # Using this function here for now.
+ get_workflow_ids_all <- function(bety, session,all.ids=FALSE) {
+ query <- isolate(parseQueryString(session$clientData$url_search))
+ # If we dont want all workflow ids but only workflow id from the user url query
+ if (!all.ids & "workflow_id" %in% names(query)) {
+ ids <- unlist(query[names(query) == "workflow_id"], use.names = FALSE)
+ } else {
+ # Get all workflow IDs
+ ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
+ .[["workflow_id"]] %>% sort(decreasing = TRUE)
+ }
+ return(ids)
+ } # get_workflow_ids
+
+ # Update all workflow ids
+ observe({
+ # get_workflow_id function from query.dplyr.R
+ all_ids <- get_workflow_ids_all(bety, session,all.ids=TRUE)
+ updateSelectizeInput(session, "all_workflow_id", choices=all_ids)
})
- # update the run_ids if user changes workflow
- # run_ids <- reactive(get_run_ids(bety, workflow_id()))
- run_ids <- reactive({
- w_ids <- input$workflow_id
+ # Retrieves all run ids for seleted workflow ids
+ # Returns ('workflow ',w_id,', run ',r_id)
+ all_run_ids <- reactive({
+ req(input$all_workflow_id)
+ w_ids <- input$all_workflow_id
run_id_list <- c()
for(w_id in w_ids){
r_ids <- get_run_ids(bety, w_id)
for(r_id in r_ids){
- list_item <- paste0('workflow ',w_id,', run ',r_id)
+ # . as a separator between multiple run ids
+ list_item <- paste0('workflow ',w_id,', run ',r_id, ';')
run_id_list <- c(run_id_list,list_item)
}
}
return(run_id_list)
})
- parse_workflowID_runID_from_input <- function(run_id_string){
- id_list <- c()
- split_string <- strsplit(run_id_string,',')[[1]]
- # run_id_string: 'workflow' workflow_ID, 'run' run_id
- wID <- as.numeric(strsplit(split_string[1],' ')[[1]][2])
- runID <- as.numeric(strsplit(split_string[2],' ')[[1]][2])
- id_list <- c(id_list,wID)
- id_list <- c(id_list,runID)
- # c(workflow_id,run_id)
- return(id_list)
- }
+ # Update all run_ids ('workflow ',w_id,', run ',r_id)
+ observe({
+ updateSelectizeInput(session, "all_run_id", choices=all_run_ids())
+ })
+ # Update on load: workflow id for selected run ids (models)
+ observe({
+ if(input$load){
+ req(input$all_workflow_id)
+ # Selected `multiple' ids
+ selected_id <- strsplit(input$all_workflow_id,' ')
+ # To allow caching
+ display_id <- selected_id
+ updateSelectizeInput(session, "workflow_id", choices=display_id)
+ } else{
+ session_workflow_id <- get_workflow_ids_all(bety, session)
+ updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
+ }
+ })
+ # Update run id for selected workflow id (model)
+ run_ids <- reactive({
+ req(input$workflow_id)
+ get_run_ids(bety, input$workflow_id)
+ })
observe({
updateSelectizeInput(session, "run_id", choices=run_ids())
})
- # update variables if user changes run
- get_var_names_for_ID <- function(bety,wID,runID){
- var_names <- get_var_names(bety, wID, runID)
- return(var_names)
+ parse_ids_from_input_runID <- function(run_id_string){
+ id_list <- c()
+ split_diff_ids <- strsplit(run_id_string,';')[[1]]
+ # run_id_string: 'workflow' workflow_ID, 'run' run_id
+ for(diff_ids in split_diff_ids){
+ split_string <- strsplit(diff_ids,',')[[1]]
+ wID <- as.numeric(strsplit(trimws(split_string[1],which = c("both")),' ')[[1]][2])
+ runID <- as.numeric(strsplit(trimws(split_string[2],which = c("both")),' ')[[1]][2])
+ ids <- list(wID,runID)
+ }
+ id_list <- c(id_list,ids)
+ return(id_list)
}
+ # Update variables if user changes run
+ # get_var_names_for_ID <- function(bety,wID,runID){
+ # var_names <- get_var_names(bety, wID, runID)
+ # return(var_names)
+ # }
var_names <- reactive({
# run_ids <- get_run_ids(bety, workflow_id())
# var_names <- get_var_names(bety, workflow_id(), run_ids[1])
# Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
+ req(input$workflow_id,input$run_id)
+ workflow_id <- input$workflow_id
+ run_id <- input$run_id
+ var_names <- get_var_names(bety, workflow_id, run_id)
- # run_ids <- input$run_id[1]
# # for(rID in run_ids){
# id_list <- parse_workflowID_runID_from_input(run_ids)
# # var_names <- get_var_names_for_ID(bety,id_list[1],id_list[2])
# # # }
- # removeVarNames <- c('Year','FracJulianDay')
- # var_names <-var_names[!var_names %in% removeVarNames]
+ removeVarNames <- c('Year','FracJulianDay')
+ var_names <-var_names[!var_names %in% removeVarNames]
+ return(var_names)
# return(id_list)
})
observe({
updateSelectizeInput(session, "variable_name", choices=var_names())
})
- observe({
- ignore <- input$variable_name
- ranges$x <- NULL
- ranges$y <- NULL
- })
- observeEvent(input$plot_dblclick, {
- brush <- input$plot_brush
- if (!is.null(brush)) {
- ranges$x <- as.POSIXct(c(brush$xmin, brush$xmax), origin = "1970-01-01", tz = "UTC")
- ranges$y <- c(brush$ymin, brush$ymax)
- } else {
- ranges$x <- NULL
- ranges$y <- NULL
- }
- })
+ # observe({
+ # ignore <- input$variable_name
+ # ranges$x <- NULL
+ # ranges$y <- NULL
+ # })
+ # observeEvent(input$plot_dblclick, {
+ # brush <- input$plot_brush
+ # if (!is.null(brush)) {
+ # ranges$x <- as.POSIXct(c(brush$xmin, brush$xmax), origin = "1970-01-01", tz = "UTC")
+ # ranges$y <- c(brush$ymin, brush$ymax)
+ # } else {
+ # ranges$x <- NULL
+ # ranges$y <- NULL
+ # }
+ # })
# If want to render text
output$info <- renderText({
- paste0(input$variable_name)
+ # indicators <- strsplit(input$indicators, ",")[[1]]
+
+ # if(input$load){
+ # all_workflow_id <- strsplit(input$all_workflow_id,',')
+ # }
+ # d <- typeof(all_workflow_id)
+ paste0(input$all_run_id)
+ # paste0(input$variable_name)
# paste0(run_ids(),length(run_ids()),ids)
# ,session$clientData$url_search)
# paste0("x=", input$plot_dblclick$x, "\ny=", input$plot_dblclick$y)
})
- workFlowData <-eventReactive(input$go,{
+ workFlowData <-eventReactive(input$load,{
# workflow_id = 99000000077
# run_id = 99000000002
# var_name = var_names
globalDF <- data.frame()
+ ids
for(workflow_id in ids){
run_ids <- get_run_ids(bety,workflow_id)
for(run_id in run_ids){
@@ -168,38 +220,6 @@ server <- shinyServer(function(input, output, session) {
return(globalDF)
})
output$outputPlot <- renderPlotly({
- # workflow_id <- isolate(input$workflow_id)
- # run_id <- isolate(input$run_id)
- # var_name <- input$variable_name
- # if (workflow_id != "" && run_id != "" && var_name != "") {
- # workflow <- collect(workflow(bety, workflow_id))
- # if(nrow(workflow) > 0) {
- # outputfolder <- file.path(workflow$folder, 'out', run_id)
- # files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
- # dates <- NA
- # vals <- NA
- # title <- var_name
- # ylab <- ""
- # for(file in files) {
- # nc <- nc_open(file)
- # var <- ncdf4::ncatt_get(nc, var_name)
- # #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
- # sw <- TRUE
- # title <- var$long_name
- # ylab <- var$units
- # x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
- # y <- ncdf4::ncvar_get(nc, var_name)
- # b <- !is.na(x) & !is.na(y) & sw != 0
- # dates <- if(is.na(dates)) x[b] else c(dates, x[b])
- # vals <- if(is.na(vals)) y[b] else c(vals, y[b])
- # ncdf4::nc_close(nc)
- # }
- # xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
- # # plot result
- # print(ranges$x)
- # dates <- as.Date(dates)
- # df <- data.frame(dates, vals)
- # df <- workFlowData(input$workflow_id,input$run_id,input$variable_names)
masterDF <- workFlowData()
output$info1 <- renderText({
paste0(nrow(masterDF))
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index cc2880bb7b2..b7b938b43bb 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -10,12 +10,12 @@ ui <- shinyUI(fluidPage(
sidebarPanel(
# helpText(),
p("Please select the workflow ID to continue. You can select multiple IDs"),
- selectizeInput("workflow_id", "Mutliple Workflow IDs", c(),multiple=TRUE),
+ selectizeInput("all_workflow_id", "Mutliple Workflow IDs", c(),multiple=TRUE),
p("Please select the run ID. You can select multiple IDs"),
- selectizeInput("run_id", "Mutliple Run IDs", c(),multiple=TRUE),
- actionButton("go", "Load Data"),
- selectInput("workflow_id_selected", "Workflow ID", c()),
- selectInput("run_id_selected", "Run ID", c()),
+ selectizeInput("all_run_id", "Mutliple Run IDs", c(),multiple=TRUE),
+ actionButton("load", "Load Data"),
+ selectInput("workflow_id", "Workflow ID", c()),
+ selectInput("run_id", "Run ID", c()),
selectInput("variable_name", "Variable Name", "")
# selectInput("workflow_id", "Workflow ID", c(99000000077)),
From 5bfa5afe5a10d4b7d52d680590f4c84be770189c Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sun, 18 Jun 2017 23:17:36 +0530
Subject: [PATCH 039/771] Added pecan-web image in docker-compose
Added a new web config file to set the pecan web configuration at build time
---
docker-compose.yml | 51 +++++++++++++---
docker/web/config.docker.php | 114 +++++++++++++++++++++++++++++++++++
2 files changed, 157 insertions(+), 8 deletions(-)
create mode 100644 docker/web/config.docker.php
diff --git a/docker-compose.yml b/docker-compose.yml
index c4d4c2eb6c9..5f656b5774b 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -6,22 +6,31 @@ networks:
services:
postgres:
- image: 'mdillon/postgis:9.6'
+ image: 'mdillon/postgis:9.4'
networks:
- - net1
+ net1:
+ aliases:
+ - pg
ports:
- '5432:5432'
+ environment:
+ - PGDATA=/var/lib/postgresql/data/pgdata
+ volumes:
+ - /home/skywalker/pgdata:/var/lib/postgresql/data/pgdata
bety:
depends_on:
- postgres
image: 'pecan/bety:latest'
networks:
- - net1
+ net1:
+ aliases:
+ - bety
ports:
- '3000:3000'
- links:
- - postgres:pg
+ environment:
+ - PG_PORT_5432_TCP_ADDR=pg
+ - PG_PORT_5432_TCP_PORT=5432
pecan-core:
depends_on:
@@ -31,8 +40,34 @@ services:
context: .
dockerfile: Dockerfile
networks:
- - net1
- ports:
- - '8787:8787'
+ net1:
+ aliases:
+ - pecan-core
+ environment:
+ - PG_HOST=pg
+ - PG_PORT=5432
+ - PG_USER=bety
+ - PG_PASSWORD=bety
+ - PG_DATABASE_NAME=bety
volumes:
- /home/skywalker/pecandata:/pecandata
+
+pecan-web:
+ depends_on:
+ - postgres
+ - bety
+ build:
+ context: ./docker
+ dockerfile: Dockerfile
+ networks:
+ net1:
+ aliases:
+ - pecan-web
+ environment:
+ - PG_HOST=pg
+ - PG_PORT=5432
+ - PG_USER=bety
+ - PG_PASSWORD=bety
+ - PG_DATABASE_NAME=bety
+ volumes:
+ - /home/skywalker/pecandata:/pecandata
diff --git a/docker/web/config.docker.php b/docker/web/config.docker.php
new file mode 100644
index 00000000000..37f016ce898
--- /dev/null
+++ b/docker/web/config.docker.php
@@ -0,0 +1,114 @@
+ array(),
+ "geo.bu.edu" =>
+ array("qsub" => "qsub -V -N @NAME@ -o @STDOUT@ -e @STDERR@ -S /bin/bash",
+ "jobid" => "Your job ([0-9]+) .*",
+ "qstat" => "qstat -j @JOBID@ || echo DONE",
+ "prerun" => "module load udunits R/R-3.0.0_gnu-4.4.6",
+ "postrun" => "sleep 60",
+ "models" => array("ED2" =>
+ array("prerun" => "module load hdf5"))));
+
+# Folder where PEcAn is installed
+$R_library_path="/home/carya/R/library";
+
+# Location where PEcAn is installed, not really needed anymore
+$pecan_home="/home/carya/pecan/";
+
+# Folder where the runs are stored
+$output_folder="/home/carya/output/";
+
+# Folder where the generated files are stored
+$dbfiles_folder=$output_folder . "/dbfiles";
+
+# location of BETY DB set to empty to not create links, can be both
+# relative or absolute paths or full URL's. Should point to the base
+# of BETYDB
+$betydb="/bety";
+
+# ----------------------------------------------------------------------
+# SIMPLE EDITING OF BETY DATABSE
+# ----------------------------------------------------------------------
+# Number of items to show on a page
+$pagesize = 30;
+
+# Location where logs should be written
+$logfile = "/home/carya/output/betydb.log";
+
+# uncomment the following variable to enable the simple interface
+#$simpleBETY = TRUE;
+
+
+?>
From 89ced0fdc53627eab6cfb97503b6eb1cd381d018 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sun, 18 Jun 2017 23:20:57 +0530
Subject: [PATCH 040/771] Added web configuration for the image to setup
at build time
---
docker/Dockerfile | 2 ++
1 file changed, 2 insertions(+)
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 63f3cbc130f..5226796b0a8 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -27,6 +27,8 @@ RUN mkdir -p /etc/service/rserver ; sync
COPY apache2/runserver.sh /etc/service/rserver/run
RUN chmod +x /etc/service/rserver/run
+# add the pecan-web configuration
+COPY web/config.docker.php /home/carya/pecan/web/config.php
# Clean up APT when done.
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /build/*
From f20fbae25f949299edf0e855829c7f5abae3f675 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sun, 18 Jun 2017 23:24:43 +0530
Subject: [PATCH 041/771] intendation fix in docker-compose.yml
---
docker-compose.yml | 38 +++++++++++++++++++-------------------
1 file changed, 19 insertions(+), 19 deletions(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index 5f656b5774b..fdb65eba3b6 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -52,22 +52,22 @@ services:
volumes:
- /home/skywalker/pecandata:/pecandata
-pecan-web:
- depends_on:
- - postgres
- - bety
- build:
- context: ./docker
- dockerfile: Dockerfile
- networks:
- net1:
- aliases:
- - pecan-web
- environment:
- - PG_HOST=pg
- - PG_PORT=5432
- - PG_USER=bety
- - PG_PASSWORD=bety
- - PG_DATABASE_NAME=bety
- volumes:
- - /home/skywalker/pecandata:/pecandata
+ pecan-web:
+ depends_on:
+ - postgres
+ - bety
+ build:
+ context: ./docker
+ dockerfile: Dockerfile
+ networks:
+ net1:
+ aliases:
+ - pecan-web
+ environment:
+ - PG_HOST=pg
+ - PG_PORT=5432
+ - PG_USER=bety
+ - PG_PASSWORD=bety
+ - PG_DATABASE_NAME=bety
+ volumes:
+ - /home/skywalker/pecandata:/pecandata
From d514254735de9c9eb72157054aa3a9d48c8a8d0e Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Mon, 19 Jun 2017 14:03:27 +0530
Subject: [PATCH 042/771] Added port maping for pecan-web in
docker-compose.yml
Added script to setup web config
---
Dockerfile | 2 +-
docker-compose.yml | 2 ++
docker/Dockerfile | 7 ++++---
3 files changed, 7 insertions(+), 4 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 7106d144857..28c7ece0548 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -1,5 +1,5 @@
FROM ubuntu:16.04
-MAINTAINER Aman Kumar (ak47su30ac@gmail.com)
+MAINTAINER Aman Kumar (ak47su30@gmail.com)
# updated ppa's
RUN echo "deb http://cran.rstudio.com/bin/linux/ubuntu xenial/" > /etc/apt/sources.list.d/R.list &&\
diff --git a/docker-compose.yml b/docker-compose.yml
index fdb65eba3b6..5e0ed4bd695 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -63,6 +63,8 @@ services:
net1:
aliases:
- pecan-web
+ ports:
+ - '8080':'80'
environment:
- PG_HOST=pg
- PG_PORT=5432
diff --git a/docker/Dockerfile b/docker/Dockerfile
index 5226796b0a8..f94caf4d4e2 100644
--- a/docker/Dockerfile
+++ b/docker/Dockerfile
@@ -1,6 +1,6 @@
# Dockerfile for the pecan web
-FROM amanskywalker/pecan-dev:latest
-MAINTAINER Aman Kumar (ak47su30ac@gmail.com)
+FROM amanskywalker/pecan-dev:0.1
+MAINTAINER Aman Kumar (ak47su30@gmail.com)
# copy the installation script inside the container
ADD . /build
@@ -28,7 +28,8 @@ COPY apache2/runserver.sh /etc/service/rserver/run
RUN chmod +x /etc/service/rserver/run
# add the pecan-web configuration
-COPY web/config.docker.php /home/carya/pecan/web/config.php
+COPY web/config.docker.php /home/carya/pecan/web/config.php
+
# Clean up APT when done.
RUN apt-get clean && rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/* /build/*
From 8023aba533ae4fa275cb28d4d326528e44ac1250 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Mon, 19 Jun 2017 19:56:59 +0530
Subject: [PATCH 043/771] Added image for pecan-core as fallback if
build failed in docker-compose.yml
---
docker-compose.yml | 1 +
1 file changed, 1 insertion(+)
diff --git a/docker-compose.yml b/docker-compose.yml
index 5e0ed4bd695..f94c272490d 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -39,6 +39,7 @@ services:
build:
context: .
dockerfile: Dockerfile
+ image: amanskywalker/pecan-dev:0.1
networks:
net1:
aliases:
From 0579d2b3a92ed9a2169f06ea4a92bfb32f392dc6 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Tue, 20 Jun 2017 19:22:05 +0530
Subject: [PATCH 044/771] Some minor fixes in docker-compose.yml
docker/install_sipnet.sh
---
docker-compose.yml | 2 +-
docker/install_sipnet.sh | 2 +-
2 files changed, 2 insertions(+), 2 deletions(-)
diff --git a/docker-compose.yml b/docker-compose.yml
index f94c272490d..3f087eed7eb 100644
--- a/docker-compose.yml
+++ b/docker-compose.yml
@@ -65,7 +65,7 @@ services:
aliases:
- pecan-web
ports:
- - '8080':'80'
+ - '8080:80'
environment:
- PG_HOST=pg
- PG_PORT=5432
diff --git a/docker/install_sipnet.sh b/docker/install_sipnet.sh
index 2417c41aad0..9bab523b408 100644
--- a/docker/install_sipnet.sh
+++ b/docker/install_sipnet.sh
@@ -27,5 +27,5 @@ fi
cd ${HOME}/sipnet_r136/
make clean
make
-sudo cp sipnet /usr/local/bin/sipnet.r136
+cp sipnet /usr/local/bin/sipnet.r136
make clean
From 836c3ed26606c724fbce18b7e8275a3995e22c4b Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Tue, 20 Jun 2017 15:54:27 -0500
Subject: [PATCH 045/771] Changing load data button to load model outputs
---
shiny/workflowPlot/ui.R | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index b7b938b43bb..a8db281f2bb 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -13,7 +13,7 @@ ui <- shinyUI(fluidPage(
selectizeInput("all_workflow_id", "Mutliple Workflow IDs", c(),multiple=TRUE),
p("Please select the run ID. You can select multiple IDs"),
selectizeInput("all_run_id", "Mutliple Run IDs", c(),multiple=TRUE),
- actionButton("load", "Load Data"),
+ actionButton("load", "Load Model outputs"),
selectInput("workflow_id", "Workflow ID", c()),
selectInput("run_id", "Run ID", c()),
selectInput("variable_name", "Variable Name", "")
From cb9e1cf054b15ccaf3195cb05806f16cb6163894 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Tue, 20 Jun 2017 20:14:54 -0500
Subject: [PATCH 046/771] Changes for backend
---
shiny/workflowPlot/server.R | 109 ++++++++++++++++++++++++++++--------
1 file changed, 86 insertions(+), 23 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 1dc303523c5..b725a84eadc 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -59,7 +59,7 @@ server <- shinyServer(function(input, output, session) {
r_ids <- get_run_ids(bety, w_id)
for(r_id in r_ids){
# . as a separator between multiple run ids
- list_item <- paste0('workflow ',w_id,', run ',r_id, ';')
+ list_item <- paste0('workflow ',w_id,', run ',r_id)
run_id_list <- c(run_id_list,list_item)
}
}
@@ -72,38 +72,97 @@ server <- shinyServer(function(input, output, session) {
# Update on load: workflow id for selected run ids (models)
observe({
if(input$load){
- req(input$all_workflow_id)
+ req(input$all_run_id)
# Selected `multiple' ids
- selected_id <- strsplit(input$all_workflow_id,' ')
- # To allow caching
- display_id <- selected_id
+ selected_id <- parse_ids_from_input_runID(input$all_run_id)$wID
+ # To allow caching later
+ display_id <- c(input$workflow_id,selected_id)
updateSelectizeInput(session, "workflow_id", choices=display_id)
} else{
session_workflow_id <- get_workflow_ids_all(bety, session)
updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
}
+
+ # if(input$load){
+ # req(input$all_workflow_id)
+ # # Selected `multiple' ids
+ # selected_id <- strsplit(input$all_workflow_id,' ')
+ # # To allow caching later
+ # display_id <- selected_id
+ # updateSelectizeInput(session, "workflow_id", choices=display_id)
+ # } else{
+ # session_workflow_id <- get_workflow_ids_all(bety, session)
+ # updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
+ # }
+
})
# Update run id for selected workflow id (model)
- run_ids <- reactive({
+
+ observe({
req(input$workflow_id)
- get_run_ids(bety, input$workflow_id)
+ r_ID <- get_run_ids(bety, input$workflow_id)
+ if(input$load){
+ req(input$all_run_id)
+ # Selected `multiple' ids
+ ids_DF <- parse_ids_from_input_runID(input$all_run_id)
+ ids_DF %>% filter(wID %in% input$workflow_id)
+ # To allow caching later
+ r_ID <- intersect(r_ID,ids_DF$runID)
+ }
+ updateSelectizeInput(session, "run_id", choices=r_ID)
})
- observe({
- updateSelectizeInput(session, "run_id", choices=run_ids())
- })
- parse_ids_from_input_runID <- function(run_id_string){
- id_list <- c()
- split_diff_ids <- strsplit(run_id_string,';')[[1]]
- # run_id_string: 'workflow' workflow_ID, 'run' run_id
- for(diff_ids in split_diff_ids){
- split_string <- strsplit(diff_ids,',')[[1]]
- wID <- as.numeric(strsplit(trimws(split_string[1],which = c("both")),' ')[[1]][2])
- runID <- as.numeric(strsplit(trimws(split_string[2],which = c("both")),' ')[[1]][2])
- ids <- list(wID,runID)
- }
- id_list <- c(id_list,ids)
- return(id_list)
+
+
+
+
+ # run_ids <- reactive({
+ # req(input$workflow_id)
+ # r_ID <- get_run_ids(bety, input$workflow_id)
+ # if(input$load){
+ # req(input$all_run_id)
+ # # Selected `multiple' ids
+ # selected_id <- parse_ids_from_input_runID(input$all_run_id)$wID
+ # # To allow caching later
+ # display_id <- c(input$workflow_id,selected_id)
+ # updateSelectizeInput(session, "workflow_id", choices=display_id)
+ # } else{
+ # session_workflow_id <- get_workflow_ids_all(bety, session)
+ # updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
+ # }
+ #
+ # })
+ # observe({
+ # updateSelectizeInput(session, "run_id", choices=run_ids())
+ # })
+ return_DF_from_run_ID <- function(diff_ids){
+ # Called by the function parse_ids_from_input_runID
+ # Returns a DF for a particular run_id
+ # print(diff_ids)
+ split_string <- strsplit(diff_ids,',')[[1]]
+ # Workflow id is the first element. Trim leading and ending white spaces. Split by space now
+ wID <- as.numeric(strsplit(trimws(split_string[1],which = c("both")),' ')[[1]][2])
+ # Run id is the second element
+ runID <- as.numeric(strsplit(trimws(split_string[2],which = c("both")),' ')[[1]][2])
+ return(data.frame(wID,runID))
}
+ parse_ids_from_input_runID <- function(run_id_list){
+ # global_id_DF <- data.frame()
+ # split_diff_ids <- strsplit(run_id_string,';')[[1]]
+ # for(diff_ids in split_diff_ids){
+ # # run_id_string: 'workflow' workflow_ID, 'run' run_id
+ # # Split by comma to get workflow and run ids
+ #
+ #
+ globalDF <- data.frame()
+ for(w_run_id in run_id_list){
+ globalDF <- rbind(globalDF,return_DF_from_run_ID(w_run_id))
+ }
+ # split_ids <- lapply(split_diff_ids , function(x) list_workflow_run_id(x))
+ # local_id_DF <- data.frame(wID,runID)
+ # global_id_DF <- rbind(global_id_DF,local_id_DF)
+ return(globalDF)
+ }
+ # }
# Update variables if user changes run
# get_var_names_for_ID <- function(bety,wID,runID){
# var_names <- get_var_names(bety, wID, runID)
@@ -153,7 +212,10 @@ server <- shinyServer(function(input, output, session) {
# all_workflow_id <- strsplit(input$all_workflow_id,',')
# }
# d <- typeof(all_workflow_id)
- paste0(input$all_run_id)
+ # paste0(input$all_run_id)
+
+ paste0(parse_ids_from_input_runID(input$all_run_id)$wID)
+ # paste0(input$all_run_id[length(input$all_run_id)])
# paste0(input$variable_name)
# paste0(run_ids(),length(run_ids()),ids)
# ,session$clientData$url_search)
@@ -224,6 +286,7 @@ server <- shinyServer(function(input, output, session) {
output$info1 <- renderText({
paste0(nrow(masterDF))
})
+ # Error messages
validate(
need(input$workflow_id, 'Found workflow id'),
need(input$run_id, 'Run id detected'),
From c51eee9482d924047bf801619f5c53b84173bd09 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Wed, 21 Jun 2017 15:05:35 +0530
Subject: [PATCH 047/771] Minor fix in docker/web/config.docker.php
---
docker/web/config.docker.php | 27 +++++++++++++++++++++------
1 file changed, 21 insertions(+), 6 deletions(-)
diff --git a/docker/web/config.docker.php b/docker/web/config.docker.php
index 37f016ce898..46e3900378d 100644
--- a/docker/web/config.docker.php
+++ b/docker/web/config.docker.php
@@ -2,11 +2,26 @@
# Information to connect to the BETY database
$db_bety_type="pgsql";
-$db_bety_hostname=getenv('PG_HOST');
-$db_bety_port=getenv('PG_PORT');
-$db_bety_username=getenv('PG_USER');
-$db_bety_password=getenv('PG_PASSWORD');
-$db_bety_database=getenv('PG_DATABASE_NAME');
+$db_bety_hostname="pg";
+$db_bety_port="5432";
+$db_bety_username="postgres";
+$db_bety_password="bety";
+$db_bety_database="bety";
+
+// under development code to get the data from the environment variables
+// $db_bety_hostname=getenv('PG_HOST');
+// $db_bety_port=getenv('PG_PORT');
+// $db_bety_username=getenv('PG_USER');
+// $db_bety_password=getenv('PG_PASSWORD');
+// $db_bety_database=getenv('PG_DATABASE_NAME');
+
+# use only for debuging
+#var_dump($db_bety_type);
+#var_dump($db_bety_hostname);
+#var_dump($db_bety_port);
+#var_dump($db_bety_username);
+#var_dump($db_bety_password);
+#var_dump($db_bety_database);
# Information to connect to the FIA database
# leave this blank if you do not have the FIA database installed.
@@ -28,7 +43,7 @@
$SSHtunnel=dirname(__FILE__) . DIRECTORY_SEPARATOR . "sshtunnel.sh";
# google map key
-$googleMapKey="";
+$googleMapKey="AIzaSyDBBrRM8Ygo-wGAnubrtVGZklK3bmXlUPI";
# Require username/password, can set min level to 0 so nobody can run/delete.
# 4 = viewer
From 78e26feaae72cb6d6b3c01928a875d9740143c70 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Wed, 21 Jun 2017 04:41:25 -0500
Subject: [PATCH 048/771] Working demo. Caching not done yet
---
shiny/workflowPlot/server.R | 141 +++++++++++++++++-------------------
1 file changed, 68 insertions(+), 73 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index b725a84eadc..8188df3822a 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -82,19 +82,6 @@ server <- shinyServer(function(input, output, session) {
session_workflow_id <- get_workflow_ids_all(bety, session)
updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
}
-
- # if(input$load){
- # req(input$all_workflow_id)
- # # Selected `multiple' ids
- # selected_id <- strsplit(input$all_workflow_id,' ')
- # # To allow caching later
- # display_id <- selected_id
- # updateSelectizeInput(session, "workflow_id", choices=display_id)
- # } else{
- # session_workflow_id <- get_workflow_ids_all(bety, session)
- # updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
- # }
-
})
# Update run id for selected workflow id (model)
@@ -111,10 +98,6 @@ server <- shinyServer(function(input, output, session) {
}
updateSelectizeInput(session, "run_id", choices=r_ID)
})
-
-
-
-
# run_ids <- reactive({
# req(input$workflow_id)
# r_ID <- get_run_ids(bety, input$workflow_id)
@@ -129,7 +112,6 @@ server <- shinyServer(function(input, output, session) {
# session_workflow_id <- get_workflow_ids_all(bety, session)
# updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
# }
- #
# })
# observe({
# updateSelectizeInput(session, "run_id", choices=run_ids())
@@ -168,6 +150,7 @@ server <- shinyServer(function(input, output, session) {
# var_names <- get_var_names(bety, wID, runID)
# return(var_names)
# }
+
var_names <- reactive({
# run_ids <- get_run_ids(bety, workflow_id())
# var_names <- get_var_names(bety, workflow_id(), run_ids[1])
@@ -176,11 +159,6 @@ server <- shinyServer(function(input, output, session) {
workflow_id <- input$workflow_id
run_id <- input$run_id
var_names <- get_var_names(bety, workflow_id, run_id)
-
- # # for(rID in run_ids){
- # id_list <- parse_workflowID_runID_from_input(run_ids)
- # # var_names <- get_var_names_for_ID(bety,id_list[1],id_list[2])
- # # # }
removeVarNames <- c('Year','FracJulianDay')
var_names <-var_names[!var_names %in% removeVarNames]
return(var_names)
@@ -215,64 +193,56 @@ server <- shinyServer(function(input, output, session) {
# paste0(input$all_run_id)
paste0(parse_ids_from_input_runID(input$all_run_id)$wID)
+ # paste0(input$load)
# paste0(input$all_run_id[length(input$all_run_id)])
# paste0(input$variable_name)
# paste0(run_ids(),length(run_ids()),ids)
# ,session$clientData$url_search)
# paste0("x=", input$plot_dblclick$x, "\ny=", input$plot_dblclick$y)
})
- workFlowData <-eventReactive(input$load,{
- # workflow_id = 99000000077
- # run_id = 99000000002
- # var_name = var_names
+
+ load_data_single_run <- function(workflow_id,run_id){
globalDF <- data.frame()
- ids
- for(workflow_id in ids){
- run_ids <- get_run_ids(bety,workflow_id)
- for(run_id in run_ids){
- var_names <- get_var_names(bety, workflow_id, run_id)
- removeVarNames <- c('Year','FracJulianDay')
- var_names <-var_names[!var_names %in% removeVarNames]
- # if (workflow_id != "" && run_id != "" && var_name != "") {
- workflow <- collect(workflow(bety, workflow_id))
- if(nrow(workflow) > 0) {
- outputfolder <- file.path(workflow$folder, 'out', run_id)
- files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
- for(file in files) {
- nc <- nc_open(file)
- for(var_name in var_names){
- dates <- NA
- vals <- NA
- title <- var_name
- ylab <- ""
- var <- ncdf4::ncatt_get(nc, var_name)
- #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
- sw <- TRUE
- if(!is.null(var$long_name)){
- title <- var$long_name
- }
- if(!is.null(var$units)){
- ylab <- var$units
- }
- x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
- y <- ncdf4::ncvar_get(nc, var_name)
- b <- !is.na(x) & !is.na(y) & sw != 0
- dates <- if(is.na(dates)) x[b] else c(dates, x[b])
- dates <- as.Date(dates)
- vals <- if(is.na(vals)) y[b] else c(vals, y[b])
- xlab <- "Time"
- # Not required to change xlab by ranges. Using ggplotly.
- # xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
- valuesDF <- data.frame(dates,vals)
- metaDF <- data.frame(workflow_id,run_id,title,xlab,ylab,var_name)
- # Populating metaDF as same length of values DF
- # metaDF1<-metaDF[rep(seq_len(nrow(valuesDF))),]
- currentDF <- cbind(valuesDF,metaDF)
- globalDF <- rbind(globalDF,currentDF)
- }
- ncdf4::nc_close(nc)
+ workflow <- collect(workflow(bety, workflow_id))
+ var_names <- get_var_names(bety, workflow_id, run_id)
+ removeVarNames <- c('Year','FracJulianDay')
+ var_names <-var_names[!var_names %in% removeVarNames]
+ if(nrow(workflow) > 0) {
+ outputfolder <- file.path(workflow$folder, 'out', run_id)
+ files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
+ for(file in files) {
+ nc <- nc_open(file)
+ for(var_name in var_names){
+ dates <- NA
+ vals <- NA
+ title <- var_name
+ ylab <- ""
+ var <- ncdf4::ncatt_get(nc, var_name)
+ #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
+ sw <- TRUE
+ if(!is.null(var$long_name)){
+ title <- var$long_name
}
+ if(!is.null(var$units)){
+ ylab <- var$units
+ }
+ x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
+ y <- ncdf4::ncvar_get(nc, var_name)
+ b <- !is.na(x) & !is.na(y) & sw != 0
+ dates <- if(is.na(dates)) x[b] else c(dates, x[b])
+ dates <- as.Date(dates)
+ vals <- if(is.na(vals)) y[b] else c(vals, y[b])
+ xlab <- "Time"
+ # Not required to change xlab by ranges. Using ggplotly.
+ # xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
+ valuesDF <- data.frame(dates,vals)
+ metaDF <- data.frame(workflow_id,run_id,title,xlab,ylab,var_name)
+ # Populating metaDF as same length of values DF
+ # metaDF1<-metaDF[rep(seq_len(nrow(valuesDF))),]
+ currentDF <- cbind(valuesDF,metaDF)
+ globalDF <- rbind(globalDF,currentDF)
}
+ ncdf4::nc_close(nc)
}
}
globalDF$title <- as.character(globalDF$title)
@@ -280,9 +250,34 @@ server <- shinyServer(function(input, output, session) {
globalDF$ylab <- as.character(globalDF$ylab)
globalDF$var_name <- as.character(globalDF$var_name)
return(globalDF)
+ }
+
+
+
+ loadNewData <-eventReactive(input$load,{
+ # workflow_id = 99000000077
+ # run_id = 99000000002
+ # var_name = var_names
+ req(input$all_run_id)
+ globalDF <- data.frame()
+ ids_DF <- parse_ids_from_input_runID(input$all_run_id)
+ for(i in nrow(ids_DF)){
+ globalDF <- rbind(globalDF, load_data_single_run(ids_DF$wID[i],ids_DF$runID[i]))
+ }
+ return(globalDF)
+ # for(workflow_id in ids){
+ # run_ids <- get_run_ids(bety,workflow_id)
+ # for(run_id in run_ids){
+ # var_names <- get_var_names(bety, workflow_id, run_id)
+ # removeVarNames <- c('Year','FracJulianDay')
+ # var_names <-var_names[!var_names %in% removeVarNames]
+ # # if (workflow_id != "" && run_id != "" && var_name != "") {
+ # }
+ # }
})
output$outputPlot <- renderPlotly({
- masterDF <- workFlowData()
+ masterDF <- load_data_single_run(input$workflow_id,input$run_id)
+ masterDF <- rbind(masterDF,loadNewData())
output$info1 <- renderText({
paste0(nrow(masterDF))
})
From 7130b9ee05f895c0ad61f93d6cf6448fd3a35504 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Wed, 21 Jun 2017 19:52:17 +0530
Subject: [PATCH 049/771] Minor fix
Removed sudo from docker/install_sipnet.sh
---
docker/install_sipnet.sh | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/docker/install_sipnet.sh b/docker/install_sipnet.sh
index 9bab523b408..cd251803cd7 100644
--- a/docker/install_sipnet.sh
+++ b/docker/install_sipnet.sh
@@ -14,7 +14,7 @@ fi
cd ${HOME}/sipnet_unk/
make clean
make
-sudo cp sipnet /usr/local/bin/sipnet.runk
+cp sipnet /usr/local/bin/sipnet.runk
make clean
if [ ! -e ${HOME}/sipnet_r136 ]; then
From 64acd9dded29f43c6503251e9eaae717f03ba51e Mon Sep 17 00:00:00 2001
From: Ann Raiho
Date: Wed, 21 Jun 2017 21:59:08 -0400
Subject: [PATCH 050/771] Changing units and a testing sda.enkf as a function
---
models/linkages/R/read_restart.LINKAGES.R | 2 +-
models/linkages/R/write_restart.LINKAGES.R | 2 +-
models/sipnet/R/read_restart.SIPNET.R | 2 +-
models/sipnet/R/write_restart.SIPNET.R | 2 +-
.../assim.sequential/R/load_data_paleon_sda.R | 14 +++++------
modules/assim.sequential/R/sda.enkf.R | 13 ++++++----
modules/assim.sequential/inst/paleon_sda.R | 24 ++++++++++++++-----
7 files changed, 37 insertions(+), 22 deletions(-)
diff --git a/models/linkages/R/read_restart.LINKAGES.R b/models/linkages/R/read_restart.LINKAGES.R
index 81e83c1a1db..54dce1b1761 100644
--- a/models/linkages/R/read_restart.LINKAGES.R
+++ b/models/linkages/R/read_restart.LINKAGES.R
@@ -42,7 +42,7 @@ read_restart.LINKAGES <- function(outdir, runid, stop.time, settings, var.names
forecast <- list()
if ("AGB.pft" %in% var.names) {
- forecast[[1]] <- udunits2::ud.convert(ens$AGB.pft, "kg/m^2", "Mg/ha") #already has C #* unit.conv
+ forecast[[1]] <- ens$AGB.pft #already has C #* unit.conv
names(forecast[[1]]) <- paste0('AGB.pft.',pft.names)
}
diff --git a/models/linkages/R/write_restart.LINKAGES.R b/models/linkages/R/write_restart.LINKAGES.R
index c1fc9227cdf..0b116ca1d0e 100644
--- a/models/linkages/R/write_restart.LINKAGES.R
+++ b/models/linkages/R/write_restart.LINKAGES.R
@@ -41,7 +41,7 @@ write_restart.LINKAGES <- function(outdir, runid, start.time, stop.time, setting
names.keep <- names(new.state)
- new.state <- udunits2::ud.convert(as.matrix(new.state), "Mg/ha", "kg/m^2")
+ new.state <- as.matrix(new.state)
names(new.state) <- names.keep
diff --git a/models/sipnet/R/read_restart.SIPNET.R b/models/sipnet/R/read_restart.SIPNET.R
index 1725bb70918..413ee154455 100644
--- a/models/sipnet/R/read_restart.SIPNET.R
+++ b/models/sipnet/R/read_restart.SIPNET.R
@@ -38,7 +38,7 @@ read_restart.SIPNET <- function(outdir, runid, stop.time, settings, var.names, p
#### PEcAn Standard Outputs
if ("NPP" %in% var.names) {
- forecast[[1]] <- udunits2::ud.convert(mean(ens$NPP), "kg/m^2/s", "Mg/ha/yr") #* unit.conv
+ forecast[[1]] <- mean(ens$NPP) #* unit.conv
names(forecast[[1]]) <- c("NPP")
}
diff --git a/models/sipnet/R/write_restart.SIPNET.R b/models/sipnet/R/write_restart.SIPNET.R
index bedd1c34642..acf4c919b16 100644
--- a/models/sipnet/R/write_restart.SIPNET.R
+++ b/models/sipnet/R/write_restart.SIPNET.R
@@ -50,7 +50,7 @@ write_restart.SIPNET <- function(outdir, runid, start.time, stop.time, settings,
analysis.save <- list()
if ("NPP" %in% variables) {
- analysis.save[[1]] <- new.state$NPP #*unit.conv -> Mg/ha/yr
+ analysis.save[[1]] <- udunits2::ud.convert(new.state$NPP, "kg/m^2/s", "Mg/ha/yr") #*unit.conv -> Mg/ha/yr
names(analysis.save[[1]]) <- c("NPP")
}
diff --git a/modules/assim.sequential/R/load_data_paleon_sda.R b/modules/assim.sequential/R/load_data_paleon_sda.R
index 73bf153fb18..7557d7c0961 100644
--- a/modules/assim.sequential/R/load_data_paleon_sda.R
+++ b/modules/assim.sequential/R/load_data_paleon_sda.R
@@ -52,7 +52,7 @@ load_data_paleon_sda <- function(settings){
obs.cov <- obs.cov.tmp <- list()
obs.times <- seq(as.Date(start_date), as.Date(end_date), by = settings$state.data.assimilation$forecast.time.step)
- obs.times <- year(obs.times)
+ obs.times <- lubridate::year(obs.times)
biomass2carbon <- 0.48
@@ -97,9 +97,9 @@ load_data_paleon_sda <- function(settings){
logger.info('Now, mapping data species to model PFTs')
dataset$pft.cat <- x[dataset$species_id]
- dataset <- dataset[dataset$pft.cat!='NA_AbvGrndWood',]
+ dataset <- dataset[dataset$pft.cat!='AGB.pft.NA',]
- variable <- sub('AGB.pft','AbvGrndWood',variable)
+ variable <- c('AbvGrndWood')
arguments <- list(.(year, MCMC_iteration, site_id, pft.cat), .(variable))
arguments2 <- list(.(year, pft.cat), .(variable))
arguments3 <- list(.(MCMC_iteration), .(pft.cat, variable), .(year))
@@ -118,12 +118,12 @@ load_data_paleon_sda <- function(settings){
cov.test <- apply(iter_mat,3,function(x){cov(x)})
for(t in seq_along(obs.times)){
- obs.mean.tmp[[t]] <- mean_mat[mean_mat[,time.type]==obs.times[t], variable] #THIS WONT WORK IF TIMESTEP ISNT ANNUAL
+ obs.mean.tmp[[t]] <- mean_mat[mean_mat[,time.type]==obs.times[t], -c(1)] #THIS WONT WORK IF TIMESTEP ISNT ANNUAL
if(any(var.names == 'AGB.pft')){
- obs.mean.tmp[[t]] <- rep(NA, length(x))
- names(obs.mean.tmp[[t]]) <- sort(x)
- for(r in seq_along(x)){
+ obs.mean.tmp[[t]] <- rep(NA, length(unique(dataset$pft.cat)))
+ names(obs.mean.tmp[[t]]) <- sort(unique(dataset$pft.cat))
+ for(r in seq_along(unique(dataset$pft.cat))){
k <- mean_mat[mean_mat$year==obs.times[t] & mean_mat$pft.cat==names(obs.mean.tmp[[t]][r]), variable]
if(any(k)){
obs.mean.tmp[[t]][r] <- k
diff --git a/modules/assim.sequential/R/sda.enkf.R b/modules/assim.sequential/R/sda.enkf.R
index d3b4a1c6226..b2ada4c422c 100644
--- a/modules/assim.sequential/R/sda.enkf.R
+++ b/modules/assim.sequential/R/sda.enkf.R
@@ -206,7 +206,8 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
## start model runs
start.model.runs(settings, settings$database$bety$write)
- #save.image(file.path(outdir, "sda.initial.runs.Rdata"))
+ save(list = ls(envir = environment(), all.names = TRUE),
+ file = file.path(outdir, "sda.initial.runs.Rdata"), envir = environment())
###-------------------------------------------------------------------###
### tests before data assimilation ###
@@ -336,7 +337,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
###-------------------------------------------------------------------###
### loop over time ###
###-------------------------------------------------------------------###
- for (t in seq_len(nt)) {#
+ for (t in 1:2) {#
###-------------------------------------------------------------------###
### read restart ###
@@ -377,8 +378,10 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
choose <- na.omit(charmatch(colnames(X),names(obs.mean[[t]])))
Y <- unlist(obs.mean[[t]][choose])
+ Y[is.na(Y)] <- 0
R <- as.matrix(obs.cov[[t]][choose,choose])
+ R[is.na(R)]<-0
if (length(obs.mean[[t]]) > 1) {
for (s in seq_along(obs.mean[[t]])) {
@@ -459,7 +462,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
# H[i, i] <- 1/sum(mu.f) #? this seems to get us on the right track. mu.f[i]/sum(mu.f) doesn't work.
# }
## process error
- if (exists("Q")) {
+ if (!is.null(Q)) {
Pf <- Pf + Q
}
## Kalman Gain
@@ -604,7 +607,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
constants.tobit = list(N = ncol(X), YN = length(y.ind)) #, nc = 1
dimensions.tobit = list(X = ncol(X), X.mod = ncol(X), Q = c(ncol(X),ncol(X))) # b = dim(inits.pred$b),
- data.tobit = list(muf = as.vector(mu.f), pf = Pf, aq = aqq[t,,], bq = bqq[t],
+ data.tobit = list(muf = as.vector(mu.f), pf = solve(Pf), aq = aqq[t,,], bq = bqq[t],
y.ind = y.ind,
y.censored = y.censored,
r = solve(R))
@@ -644,7 +647,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
Cmodel$aq <- aqq[t,,]
Cmodel$bq <- bqq[t]
Cmodel$muf <- mu.f
- Cmodel$pf <- Pf
+ Cmodel$pf <- solve(Pf)
Cmodel$r <- solve(R)
for(i in 1:length(y.ind)) {
diff --git a/modules/assim.sequential/inst/paleon_sda.R b/modules/assim.sequential/inst/paleon_sda.R
index 3374d3e40fd..304b7a26996 100644
--- a/modules/assim.sequential/inst/paleon_sda.R
+++ b/modules/assim.sequential/inst/paleon_sda.R
@@ -1,13 +1,23 @@
library(PEcAn.all)
library(PEcAn.SIPNET)
+library(PEcAn.LINKAGES)
library(PEcAn.assim.sequential)
+library(nimble)
+library(lubridate)
+
+ciEnvelope <- function(x,ylo,yhi,...){
+ polygon(cbind(c(x, rev(x), x[1]), c(ylo, rev(yhi),
+ ylo[1])), border = NA,...)
+}
#LINKAGES #AGB.pft #Harvard Forest
-#setwd('/fs/data2/output//PEcAn_1000003314/')
+setwd('/fs/data2/output//PEcAn_1000003314/')
#SIPNET
-setwd('/fs/data2/output//PEcAn_1000003356')
+#setwd('/fs/data2/output//PEcAn_1000003356')
+#TO DO: Normalize state vector because NPP is too small.
+#See talk with with Mike on 6/21/17
#---------------- Load PEcAn settings file. --------------------------------#
# Open and read in settings file for PEcAn run.
@@ -15,10 +25,12 @@ settings <- read.settings("pecan.SDA.xml")
obs.list <- PEcAn.assim.sequential::load_data_paleon_sda(settings = settings)
-status.start("IC")
-ne <- as.numeric(settings$state.data.assimilation$n.ensemble)
-IC <- sample.IC.SIPNET(ne, state = c('AGB','NPP'))
-status.end()
+IC <- NULL
+
+# status.start("IC")
+# ne <- as.numeric(settings$state.data.assimilation$n.ensemble)
+# IC <- sample.IC.SIPNET(ne, state = c('AGB','NPP'))
+# status.end()
sda.enkf(settings, obs.mean = obs.list$obs.mean, obs.cov = obs.list$obs.cov, IC = IC)
From de1e091fad268e64211091530d2f8e32399134d2 Mon Sep 17 00:00:00 2001
From: Ann Raiho
Date: Thu, 22 Jun 2017 13:18:32 -0400
Subject: [PATCH 051/771] making some changes to be able to redo years in sda
without any hassle. Now, .nc files will be remade instead of skipped if they
already exist (might want to look back into if that's an okay thing to do).
And the restart files will look for old output if the new output isn't there.
(might want to think about always looking for the output related to time)
---
models/linkages/R/model2netcdf.LINKAGES.R | 8 ++++----
models/linkages/R/write.config.LINKAGES.R | 10 +++++-----
models/linkages/R/write_restart.LINKAGES.R | 6 ++++--
models/sipnet/R/model2netcdf.SIPNET.R | 6 +++---
modules/assim.sequential/R/load_data_paleon_sda.R | 3 ++-
5 files changed, 18 insertions(+), 15 deletions(-)
diff --git a/models/linkages/R/model2netcdf.LINKAGES.R b/models/linkages/R/model2netcdf.LINKAGES.R
index e944f3b9c4a..76a6200b68f 100644
--- a/models/linkages/R/model2netcdf.LINKAGES.R
+++ b/models/linkages/R/model2netcdf.LINKAGES.R
@@ -26,7 +26,7 @@ model2netcdf.LINKAGES <- function(outdir, sitelat, sitelon, start_date = NULL, e
# , PFTs) { logger.severe('NOT IMPLEMENTED')
library(PEcAn.utils)
-
+
### Read in model output in linkages format
load(file.path(outdir, "linkages.out.Rdata"))
# linkages.output.dims <- dim(linkages.output)
@@ -39,9 +39,9 @@ model2netcdf.LINKAGES <- function(outdir, sitelat, sitelon, start_date = NULL, e
### Loop over years in linkages output to create separate netCDF outputs
for (y in seq_along(years)) {
- if (file.exists(file.path(outdir, paste(years[y], "nc", sep = ".")))) {
- next
- }
+ # if (file.exists(file.path(outdir, paste(years[y], "nc", sep = "."))) & overwrite ==FALSE) {
+ # next
+ # }
print(paste("---- Processing year: ", years[y])) # turn on for debugging
## Subset data for processing sub.linkages.output <- subset(linkages.output, year == y)
diff --git a/models/linkages/R/write.config.LINKAGES.R b/models/linkages/R/write.config.LINKAGES.R
index 3e66072f730..af0b7afa138 100644
--- a/models/linkages/R/write.config.LINKAGES.R
+++ b/models/linkages/R/write.config.LINKAGES.R
@@ -124,8 +124,8 @@ write.config.LINKAGES <- function(defaults = NULL, trait.values, settings, run.i
vals <- trait.values[[group]]
# replace defaults with traits
- new.params.locs <- which(names(spp.params) %in% names(vals))
- new.vals.locs <- which(names(vals) %in% names(spp.params))
+ #new.params.locs <- which(names(spp.params) %in% names(vals))
+ #new.vals.locs <- which(names(vals) %in% names(spp.params))
#spp.params[which(spp.params$Spp_Name == group), new.params.locs] <- vals[new.vals.locs]
# conversion of some traits to match what LINKAGES needs Going to have to look up this paper
@@ -150,11 +150,11 @@ write.config.LINKAGES <- function(defaults = NULL, trait.values, settings, run.i
if ("DMIN" %in% names(vals)) {
spp.params[spp.params$Spp_Name == group, ]$DMIN <- vals$DMIN
}
- if ("AGEMAX" %in% names(vals)) {
- spp.params[spp.params$Spp_Name == group, ]$AGEMAX <- vals$AGEMAX
+ if ("AGEMX" %in% names(vals)) {
+ spp.params[spp.params$Spp_Name == group, ]$AGEMX <- vals$AGEMX
}
if ("G" %in% names(vals)) {
- spp.params[spp.params$Spp_Name == group, ]$G <- vals$Gmax
+ spp.params[spp.params$Spp_Name == group, ]$G <- vals$G
}
if ("SPRTND" %in% names(vals)) {
spp.params[spp.params$Spp_Name == group, ]$SPRTND <- vals$SPRTND
diff --git a/models/linkages/R/write_restart.LINKAGES.R b/models/linkages/R/write_restart.LINKAGES.R
index 0b116ca1d0e..fcde7ef0bb4 100644
--- a/models/linkages/R/write_restart.LINKAGES.R
+++ b/models/linkages/R/write_restart.LINKAGES.R
@@ -129,8 +129,10 @@ write_restart.LINKAGES <- function(outdir, runid, start.time, stop.time, setting
# skip ensemble member if no file availible
outfile <- file.path(outdir, runid, "linkages.out.Rdata")
if (!file.exists(outfile)) {
- print(paste0("missing outfile ens #", runid))
- next
+ outfile <- file.path(outdir, runid, paste0(start.time, "linkages.out.Rdata"))
+ if (!file.exists(outfile)) {
+ logger.severe(paste0("missing outfile ens #", runid))
+ }
}
print(paste0("runid = ", runid))
diff --git a/models/sipnet/R/model2netcdf.SIPNET.R b/models/sipnet/R/model2netcdf.SIPNET.R
index 6b22c750443..cd1ba769240 100644
--- a/models/sipnet/R/model2netcdf.SIPNET.R
+++ b/models/sipnet/R/model2netcdf.SIPNET.R
@@ -36,9 +36,9 @@ model2netcdf.SIPNET <- function(outdir, sitelat, sitelon, start_date, end_date,
### Loop over years in SIPNET output to create separate netCDF outputs
for (y in years) {
- if (file.exists(file.path(outdir, paste(y, "nc", sep = ".")))) {
- next
- }
+ # if (file.exists(file.path(outdir, paste(y, "nc", sep = "."))) & overwrite == FALSE) {
+ # next
+ # }
print(paste("---- Processing year: ", y)) # turn on for debugging
## Subset data for processing
diff --git a/modules/assim.sequential/R/load_data_paleon_sda.R b/modules/assim.sequential/R/load_data_paleon_sda.R
index 7557d7c0961..b54841a05cf 100644
--- a/modules/assim.sequential/R/load_data_paleon_sda.R
+++ b/modules/assim.sequential/R/load_data_paleon_sda.R
@@ -71,7 +71,6 @@ load_data_paleon_sda <- function(settings){
logger.info(paste('Using PEcAn.benchmark::load_data.R on format_id',format_id[[i]],'-- may take a few minutes'))
obvs[[i]] <- PEcAn.benchmark::load_data(data.path, format, start_year = lubridate::year(start_date), end_year = lubridate::year(end_date), site)
- dataset <- obvs[[i]]
variable <- intersect(var.names,colnames(obvs[[i]]))
### Tree Ring Data Product
@@ -86,6 +85,8 @@ load_data_paleon_sda <- function(settings){
logger.severe('ERROR: This data format has not been added to this function (ツ)_/¯ ')
}
+ dataset <- obvs[[i]]
+
### Map species to model specific PFTs
if(any(var.names == 'AGB.pft')){
spp_id <- match_species_id(unique(dataset$species_id),format_name = 'usda',bety)
From a33f6310b7371626b190c7834304011f23c7d1c9 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Thu, 22 Jun 2017 15:39:42 -0500
Subject: [PATCH 052/771] Allowing multiple load. Modified server.R
---
shiny/workflowPlot/server.R | 77 +++++++++++++++----------------------
1 file changed, 30 insertions(+), 47 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 8188df3822a..97362bfb00d 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -39,6 +39,7 @@ server <- shinyServer(function(input, output, session) {
# Get all workflow IDs
ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
.[["workflow_id"]] %>% sort(decreasing = TRUE)
+ # pull(.,workflow_id) %>% sort(decreasing = TRUE)
}
return(ids)
} # get_workflow_ids
@@ -91,13 +92,13 @@ server <- shinyServer(function(input, output, session) {
if(input$load){
req(input$all_run_id)
# Selected `multiple' ids
- ids_DF <- parse_ids_from_input_runID(input$all_run_id)
- ids_DF %>% filter(wID %in% input$workflow_id)
+ ids_DF <- parse_ids_from_input_runID(input$all_run_id) %>% filter(wID %in% input$workflow_id)
# To allow caching later
+ # Change variable name
r_ID <- intersect(r_ID,ids_DF$runID)
}
updateSelectizeInput(session, "run_id", choices=r_ID)
- })
+ })
# run_ids <- reactive({
# req(input$workflow_id)
# r_ID <- get_run_ids(bety, input$workflow_id)
@@ -167,21 +168,6 @@ server <- shinyServer(function(input, output, session) {
observe({
updateSelectizeInput(session, "variable_name", choices=var_names())
})
- # observe({
- # ignore <- input$variable_name
- # ranges$x <- NULL
- # ranges$y <- NULL
- # })
- # observeEvent(input$plot_dblclick, {
- # brush <- input$plot_brush
- # if (!is.null(brush)) {
- # ranges$x <- as.POSIXct(c(brush$xmin, brush$xmax), origin = "1970-01-01", tz = "UTC")
- # ranges$y <- c(brush$ymin, brush$ymax)
- # } else {
- # ranges$x <- NULL
- # ranges$y <- NULL
- # }
- # })
# If want to render text
output$info <- renderText({
# indicators <- strsplit(input$indicators, ",")[[1]]
@@ -192,7 +178,7 @@ server <- shinyServer(function(input, output, session) {
# d <- typeof(all_workflow_id)
# paste0(input$all_run_id)
- paste0(parse_ids_from_input_runID(input$all_run_id)$wID)
+ paste0(parse_ids_from_input_runID(input$all_run_id)$runID)
# paste0(input$load)
# paste0(input$all_run_id[length(input$all_run_id)])
# paste0(input$variable_name)
@@ -251,9 +237,7 @@ server <- shinyServer(function(input, output, session) {
globalDF$var_name <- as.character(globalDF$var_name)
return(globalDF)
}
-
-
-
+
loadNewData <-eventReactive(input$load,{
# workflow_id = 99000000077
# run_id = 99000000002
@@ -261,40 +245,39 @@ server <- shinyServer(function(input, output, session) {
req(input$all_run_id)
globalDF <- data.frame()
ids_DF <- parse_ids_from_input_runID(input$all_run_id)
- for(i in nrow(ids_DF)){
- globalDF <- rbind(globalDF, load_data_single_run(ids_DF$wID[i],ids_DF$runID[i]))
+ for(row_num in 1:nrow(ids_DF)){
+ globalDF <- rbind(globalDF, load_data_single_run(ids_DF$wID[row_num],ids_DF$runID[row_num]))
}
return(globalDF)
- # for(workflow_id in ids){
- # run_ids <- get_run_ids(bety,workflow_id)
- # for(run_id in run_ids){
- # var_names <- get_var_names(bety, workflow_id, run_id)
- # removeVarNames <- c('Year','FracJulianDay')
- # var_names <-var_names[!var_names %in% removeVarNames]
- # # if (workflow_id != "" && run_id != "" && var_name != "") {
- # }
- # }
})
output$outputPlot <- renderPlotly({
- masterDF <- load_data_single_run(input$workflow_id,input$run_id)
- masterDF <- rbind(masterDF,loadNewData())
+ # masterDF <- load_data_single_run(input$workflow_id,input$run_id)
+ masterDF <- loadNewData()
output$info1 <- renderText({
paste0(nrow(masterDF))
+ paste0(length(unique(masterDF$run_id)))
})
# Error messages
validate(
- need(input$workflow_id, 'Found workflow id'),
- need(input$run_id, 'Run id detected'),
+ # need(input$workflow_id, 'Found workflow id'),
+ # need(input$run_id, 'Run id detected'),
need(input$variable_name, 'Please wait! Loading data')
)
masterDF$var_name <- as.character(masterDF$var_name)
+ masterDF$run_id <- as.factor(as.character(masterDF$run_id))
+
# masterDF$var_name = as.factor(masterDF$var_name)
# df1<-subset(masterDF,var_name==var_name)
- df <- masterDF %>%
- dplyr::filter(workflow_id == input$workflow_id &
- run_id == input$run_id &
- var_name == input$variable_name) %>%
- dplyr::select(dates,vals)
+
+ # Drop filtering
+ df <- masterDF %>%
+ dplyr::filter(
+ # workflow_id == input$workflow_id &
+ # run_id == input$run_id &
+ var_name == input$variable_name)
+ # %>%
+ # dplyr::select(dates,vals,workflow_id,run_id)
+
title <- unique(df$title)[1]
xlab <- unique(df$xlab)[1]
ylab <- unique(df$ylab)[1]
@@ -311,16 +294,16 @@ server <- shinyServer(function(input, output, session) {
# workflow_id %in% workflow_id)
# & run_id == run_id & var_name == var_name)
# df<-masterDF %>% dplyr::filter(workflow_id == input$workflow_id)
- plt <- ggplot(df, aes(x=dates, y=vals)) +
+ plt <- ggplot(df, aes(x=dates, y=vals, color=run_id)) +
# geom_point(aes(color="Model output")) +
- geom_point() +
+ geom_point()
# geom_smooth(aes(fill = "Spline fit")) +
# coord_cartesian(xlim = ranges$x, ylim = ranges$y) +
# scale_y_continuous(labels=fancy_scientific) +
- labs(title=title, x=xlab, y=ylab) +
+ # labs(title=title, x=xlab, y=ylab) +
# labs(title=unique(df$title)[1], x=unique(df$xlab)[1], y=unique(df$ylab)[1]) +
- scale_color_manual(name = "", values = "black") +
- scale_fill_manual(name = "", values = "grey50")
+ # scale_color_manual(name = "", values = "black") +
+ # scale_fill_manual(name = "", values = "grey50")
# theme(axis.text.x = element_text(angle = -90))
plt<-ggplotly(plt)
# plot(plt)
From 95987d9303f215461fa164d159845b0549e2884e Mon Sep 17 00:00:00 2001
From: Ann Raiho
Date: Thu, 22 Jun 2017 21:13:44 -0400
Subject: [PATCH 053/771] fixing sda plots
---
modules/assim.sequential/R/sda.enkf.R | 59 ++++++++++++----------
modules/assim.sequential/inst/paleon_sda.R | 3 ++
2 files changed, 35 insertions(+), 27 deletions(-)
diff --git a/modules/assim.sequential/R/sda.enkf.R b/modules/assim.sequential/R/sda.enkf.R
index b2ada4c422c..eea264df2cb 100644
--- a/modules/assim.sequential/R/sda.enkf.R
+++ b/modules/assim.sequential/R/sda.enkf.R
@@ -337,7 +337,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
###-------------------------------------------------------------------###
### loop over time ###
###-------------------------------------------------------------------###
- for (t in 1:2) {#
+ for (t in 11:20) {#
###-------------------------------------------------------------------###
### read restart ###
@@ -607,7 +607,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
constants.tobit = list(N = ncol(X), YN = length(y.ind)) #, nc = 1
dimensions.tobit = list(X = ncol(X), X.mod = ncol(X), Q = c(ncol(X),ncol(X))) # b = dim(inits.pred$b),
- data.tobit = list(muf = as.vector(mu.f), pf = solve(Pf), aq = aqq[t,,], bq = bqq[t],
+ data.tobit = list(muf = as.vector(mu.f), pf = Pf, aq = aqq[t,,], bq = bqq[t],
y.ind = y.ind,
y.censored = y.censored,
r = solve(R))
@@ -647,7 +647,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
Cmodel$aq <- aqq[t,,]
Cmodel$bq <- bqq[t]
Cmodel$muf <- mu.f
- Cmodel$pf <- solve(Pf)
+ Cmodel$pf <- Pf
Cmodel$r <- solve(R)
for(i in 1:length(y.ind)) {
@@ -781,7 +781,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
})))
par(mfrow = c(2, 1))
- for (i in 1:2) {#
+ for (i in 1:14) {#
t1 <- 1
Xbar <- plyr::laply(FORECAST[t1:t], function(x) { mean(x[, i], na.rm = TRUE) })
Xci <- plyr::laply(FORECAST[t1:t], function(x) { quantile(x[, i], c(0.025, 0.975)) })
@@ -794,7 +794,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
plot(as.Date(obs.times[t1:t]),
Xbar,
- ylim = range(c(XaCI, Xci), na.rm = TRUE),
+ ylim = c(0,8),#range(c(XaCI, Xci), na.rm = TRUE),
type = "n",
xlab = "Year",
ylab = ylab.names[grep(colnames(X)[i], var.names)],
@@ -887,12 +887,11 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
print("climate diagnostics under development")
}
- ### Diagnostic graphs
- pdf(file.path(settings$outdir, "EnKF.pdf"))
###-------------------------------------------------------------------###
### time series ###
###-------------------------------------------------------------------###
+ pdf(file.path(settings$outdir, "sda.enkf.time-series.pdf"))
names.y <- unique(unlist(lapply(obs.mean[t1:t], function(x) { names(x) })))
Ybar <- t(sapply(obs.mean[t1:t], function(x) {
@@ -910,11 +909,9 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
sqrt(diag(x))
}))) #need to make this from quantiles for lyford plot data
# YCI = YCI[,pmatch(colnames(X), names(obs.mean[[nt]][[1]]))]
- Xsum <- plyr::laply(FORECAST, function(x) { mean(rowSums(x[,1:9], na.rm = TRUE)) })
+ Xsum <- plyr::laply(FORECAST, function(x) { mean(rowSums(x[,1:length(names.y)], na.rm = TRUE)) })[t1:t]
- pdf('fcomp.kalman.filter.pdf')
for (i in seq_len(ncol(X))) {
- #t1 <- 1
Xbar <- plyr::laply(FORECAST[t1:t], function(x) { mean(x[, i], na.rm = TRUE) })
Xci <- plyr::laply(FORECAST[t1:t], function(x) { quantile(x[, i], c(0.025, 0.975)) })
Xci[is.na(Xci)]<-0
@@ -948,13 +945,15 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
# analysis
ciEnvelope(as.Date(obs.times[t1:t]), XaCI[, 1], XaCI[, 2], col = alphapink)
lines(as.Date(obs.times[t1:t]), Xa, col = "black", lty = 2, lwd = 2)
- }
+ legend('topright',c('Forecast','Data','Analysis'),col=c(alphablue,alphagreen,alphapink),lty=1,lwd=5)
+
+ }
+ dev.off()
###-------------------------------------------------------------------###
### bias diagnostics ###
- ###-------------------------------------------------------------------###
- # legend('topleft',c('Data','Forecast','Analysis'),col=c(4,2,3),lty=1,cex=1) Forecast minus data =
- # error
+ ###-------------------------------------------------------------------###
+ pdf(file.path(settings$outdir, "bias.diagnostic.pdf"))
for (i in seq_along(obs.mean[[1]])) {
Xbar <- plyr::laply(FORECAST[t1:t], function(x) { mean(x[, i], na.rm = TRUE) })
Xci <- plyr::laply(FORECAST[t1:t], function(x) { quantile(x[, i], c(0.025, 0.975)) })
@@ -962,17 +961,18 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
Xa <- plyr::laply(ANALYSIS[t1:t], function(x) { mean(x[, i], na.rm = TRUE) })
XaCI <- plyr::laply(ANALYSIS[t1:t], function(x) { quantile(x[, i], c(0.025, 0.975)) })
- reg <- lm(Xbar[t1:t]/Xsum - unlist(Ybar[t1:t, i]) ~ c(t1:t))
+ if(length(which(is.na(Ybar[,i])))>=length(t1:t)) next()
+ reg <- lm(Xbar[t1:t]/Xsum - unlist(Ybar[, i]) ~ c(t1:t))
plot(t1:t,
- Xbar[t1:t]/Xsum - unlist(Ybar[t1:t, i]),
+ Xbar/Xsum - unlist(Ybar[, i]),
pch = 16, cex = 1,
- ylim = c(min(Xci[t1:t, 1]/Xsum - unlist(Ybar[t1:t, i])), max(Xci[t1:t, 2]/Xsum - unlist(Ybar[t1:t, i]))),
+ ylim = c(min(Xci[, 1]/Xsum - unlist(Ybar[, i])), max(Xci[,2]/Xsum - unlist(Ybar[, i]))),
xlab = "Time",
ylab = "Error",
main = paste(colnames(X)[i], " Error = Forecast - Data"))
ciEnvelope(rev(t1:t),
- rev(Xci[t1:t, 1]/Xsum - unlist(Ybar[t1:t, i])),
- rev(Xci[t1:t, 2]/Xsum - unlist(Ybar[t1:t, i])),
+ rev(Xci[, 1]/Xsum - unlist(Ybar[, i])),
+ rev(Xci[, 2]/Xsum - unlist(Ybar[, i])),
col = alphapink)
abline(h = 0, lty = 2, lwd = 2)
abline(reg)
@@ -981,17 +981,17 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
# d<-density(c(Xbar[t1:t] - unlist(Ybar[t1:t,i]))) lines(d$y+1,d$x)
# forecast minus analysis = update
- reg1 <- lm(Xbar[t1:t]/Xsum - Xa[t1:t]/Xsum ~ c(t1:t))
+ reg1 <- lm(Xbar/Xsum - Xa/Xsum ~ c(t1:t))
plot(t1:t,
- Xbar[t1:t]/Xsum - Xa[t1:t]/Xsum,
+ Xbar/Xsum - Xa/Xsum,
pch = 16, cex = 1,
- ylim = c(min(Xbar[t1:t]/Xsum - XaCI[t1:t, 2]/Xsum), max(Xbar[t1:t]/Xsum - XaCI[t1:t, 1]/Xsum)),
+ ylim = c(min(Xbar/Xsum - XaCI[, 2]/Xsum), max(Xbar/Xsum - XaCI[, 1]/Xsum)),
xlab = "Time", ylab = "Update",
main = paste(colnames(X)[i],
"Update = Forecast - Analysis"))
ciEnvelope(rev(t1:t),
- rev(Xbar[t1:t]/Xsum - XaCI[t1:t, 1]/Xsum),
- rev(Xbar[t1:t]/Xsum - XaCI[t1:t, 2]/Xsum),
+ rev(Xbar/Xsum - XaCI[, 1]/Xsum),
+ rev(Xbar/Xsum - XaCI[, 2]/Xsum),
col = alphagreen)
abline(h = 0, lty = 2, lwd = 2)
abline(reg1)
@@ -1000,20 +1000,27 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
digits = 3)))
# d<-density(c(Xbar[t1:t] - Xa[t1:t])) lines(d$y+1,d$x)
}
+ dev.off()
+
###-------------------------------------------------------------------###
### process variance plots ###
###-------------------------------------------------------------------###
if (processvar) {
+
library(corrplot)
+ pdf('process.var.plots.pdf')
+
cor.mat <- cov2cor(aqq[t, , ] / bqq[t])
colnames(cor.mat) <- colnames(X)
rownames(cor.mat) <- colnames(X)
par(mfrow = c(1, 1), mai = c(1, 1, 4, 1))
- corrplot(cor.mat, type = "upper", tl.srt = 45, addCoef.col = "black")
+ corrplot(cor.mat, type = "upper", tl.srt = 45,order='AOE')
plot(as.Date(obs.times[t1:t]), bqq[t1:t],
pch = 16, cex = 1,
ylab = "Degrees of Freedom", xlab = "Time")
+
+ dev.off()
}
###-------------------------------------------------------------------###
@@ -1042,6 +1049,4 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
# cex=1, xlab="Total Yearly Precip",
# ylab="Update",main=colnames(Ybar)[i])
- dev.off()
-
} # sda.enkf
diff --git a/modules/assim.sequential/inst/paleon_sda.R b/modules/assim.sequential/inst/paleon_sda.R
index 304b7a26996..925cadf87a7 100644
--- a/modules/assim.sequential/inst/paleon_sda.R
+++ b/modules/assim.sequential/inst/paleon_sda.R
@@ -32,6 +32,9 @@ IC <- NULL
# IC <- sample.IC.SIPNET(ne, state = c('AGB','NPP'))
# status.end()
+#TO DO: Having problem with running proc.var == TRUE because nimble isn't keeping the toggle sampler in the function environment.
+#TO DO: Intial conditions for linkages are messed up. Need to calibrate.
+
sda.enkf(settings, obs.mean = obs.list$obs.mean, obs.cov = obs.list$obs.cov, IC = IC)
From 8700cbdb7362d09003db579d79a275814f6afa53 Mon Sep 17 00:00:00 2001
From: Ann Raiho
Date: Fri, 23 Jun 2017 00:02:21 -0400
Subject: [PATCH 054/771] Added scaling to ensemble kalman filter to fix
problem with state variables on different scales.
---
modules/assim.sequential/R/sda.enkf.R | 33 ++++++++++++++++++++-------
1 file changed, 25 insertions(+), 8 deletions(-)
diff --git a/modules/assim.sequential/R/sda.enkf.R b/modules/assim.sequential/R/sda.enkf.R
index eea264df2cb..cb84dbedcc6 100644
--- a/modules/assim.sequential/R/sda.enkf.R
+++ b/modules/assim.sequential/R/sda.enkf.R
@@ -337,7 +337,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
###-------------------------------------------------------------------###
### loop over time ###
###-------------------------------------------------------------------###
- for (t in 11:20) {#
+ for(t in seq_len(nt)) {#
###-------------------------------------------------------------------###
### read restart ###
@@ -465,11 +465,28 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
if (!is.null(Q)) {
Pf <- Pf + Q
}
+
+ mu.f.scale <- scale(mu.f, center = mean(mu.f), scale = 1)
+ Pf.scale <- cov(scale(X, center = mu.f, scale = rep(1,length(mu.f))))
+ Pf.scale[is.na(Pf.scale)]<-0
+ R.scale <- matrix(scale(as.vector(R), center = mean(mu.f), scale = 1),2,2)
+ Y.scale <- scale(Y, center = mean(mu.f[1:2]), scale = 1)
+
+ ## Kalman Gain
+ K <- Pf.scale %*% t(H) %*% solve((R.scale + H %*% Pf.scale %*% t(H)))
+ ## Analysis
+ mu.a.scale <- mu.f.scale + K %*% (Y.scale - H %*% mu.f.scale)
+ Pa.scale <- (diag(ncol(X)) - K %*% H) %*% Pf.scale
+
+ Pa <- Pa.scale * attr(mu.f.scale, 'scaled:scale') + attr(mu.f.scale, 'scaled:center')
+ mu.a <- mu.a.scale * attr(mu.f.scale, 'scaled:scale') + attr(mu.f.scale, 'scaled:center')
+
+
## Kalman Gain
- K <- Pf %*% t(H) %*% solve((R + H %*% Pf %*% t(H)))
+ #K <- Pf %*% t(H) %*% solve((R + H %*% Pf %*% t(H)))
## Analysis
- mu.a <- mu.f + K %*% (Y - H %*% mu.f)
- Pa <- (diag(ncol(X)) - K %*% H) %*% Pf
+ #mu.a <- mu.f + K %*% (Y - H %*% mu.f)
+ #Pa <- (diag(ncol(X)) - K %*% H) %*% Pf
enkf.params[[t]] <- list(mu.f = mu.f, Pf = Pf, mu.a = mu.a, Pa = Pa)
} else {
@@ -781,7 +798,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
})))
par(mfrow = c(2, 1))
- for (i in 1:14) {#
+ for (i in 1:ncol(FORECAST[[t]])) {#
t1 <- 1
Xbar <- plyr::laply(FORECAST[t1:t], function(x) { mean(x[, i], na.rm = TRUE) })
Xci <- plyr::laply(FORECAST[t1:t], function(x) { quantile(x[, i], c(0.025, 0.975)) })
@@ -794,7 +811,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
plot(as.Date(obs.times[t1:t]),
Xbar,
- ylim = c(0,8),#range(c(XaCI, Xci), na.rm = TRUE),
+ ylim = range(c(XaCI, Xci), na.rm = TRUE),
type = "n",
xlab = "Year",
ylab = ylab.names[grep(colnames(X)[i], var.names)],
@@ -820,6 +837,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
# analysis
ciEnvelope(as.Date(obs.times[t1:t]), XaCI[, 1], XaCI[, 2], col = alphapink)
lines(as.Date(obs.times[t1:t]), Xa, col = "black", lty = 2, lwd = 2)
+ legend('topright',c('Forecast','Data','Analysis'),col=c(alphablue,alphagreen,alphapink),lty=1,lwd=5)
}
}
@@ -907,8 +925,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
rep(NA, length(names.y))
}
sqrt(diag(x))
- }))) #need to make this from quantiles for lyford plot data
- # YCI = YCI[,pmatch(colnames(X), names(obs.mean[[nt]][[1]]))]
+ })))
Xsum <- plyr::laply(FORECAST, function(x) { mean(rowSums(x[,1:length(names.y)], na.rm = TRUE)) })[t1:t]
for (i in seq_len(ncol(X))) {
From 648e22b540879025c1e3eb9843f73c0a6b4a45f4 Mon Sep 17 00:00:00 2001
From: Ann Raiho
Date: Fri, 23 Jun 2017 00:15:38 -0400
Subject: [PATCH 055/771] added ensemble adjustment
---
modules/assim.sequential/R/sda.enkf.R | 41 ++++++++++++++++++++--
modules/assim.sequential/inst/paleon_sda.R | 21 +++++------
2 files changed, 50 insertions(+), 12 deletions(-)
diff --git a/modules/assim.sequential/R/sda.enkf.R b/modules/assim.sequential/R/sda.enkf.R
index cb84dbedcc6..0b34db78727 100644
--- a/modules/assim.sequential/R/sda.enkf.R
+++ b/modules/assim.sequential/R/sda.enkf.R
@@ -759,8 +759,45 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
enkf.params[[t]] <- list(mu.f = mu.f, Pf = Pf, mu.a = mu.a, Pa = Pa)
}
- ## update state matrix
- analysis <- as.data.frame(rmvnorm(as.numeric(nens), mu.a, Pa, method = "svd"))
+ ###-------------------------------------------------------------------###
+ ### update state matrix ###
+ ###-------------------------------------------------------------------###
+ S_f <- svd(Pf)
+ L_f <- S_f$d
+ V_f <- S_f$v
+
+ ## normalize
+ Z <- X*0
+ for(i in seq_len(nens)){
+ Z[i,] <- 1/sqrt(L_f) * t(V_f)%*%(X[i,]-mu.f)
+ }
+ Z[is.na(Z)]<-0
+
+ ## analysis
+ #mu_a <- c(10,-3)
+ #D <- sqrt(diag(c(3,1)))
+ #R <- matrix(c(1,-0.75,-0.75,1),2,2)
+ #P_a <- D%*%R%*%D
+ S_a <- svd(Pa)
+ L_a <- S_a$d
+ V_a <- S_a$v
+
+ ## analysis ensemble
+ X_a <- X*0
+ for(i in seq_len(nens)){
+ X_a[i,] <- V_a %*%diag(sqrt(L_a))%*%Z[i,]+mu.a
+ }
+
+ # par(mfrow=c(1,1))
+ # plot(X_a)
+ # ## check if ensemble mean is correct
+ # cbind(mu.a,colMeans(X_a))
+ # ## check if ensemble var is correct
+ # cbind(as.vector(Pa),as.vector(cov(X_a)))
+ #
+ # analysis <- as.data.frame(rmvnorm(as.numeric(nens), mu.a, Pa, method = "svd"))
+
+ analysis <- as.data.frame(X_a)
colnames(analysis) <- colnames(X)
##### Mapping analysis vectors to be in bounds of state variables
diff --git a/modules/assim.sequential/inst/paleon_sda.R b/modules/assim.sequential/inst/paleon_sda.R
index 925cadf87a7..51098cf2283 100644
--- a/modules/assim.sequential/inst/paleon_sda.R
+++ b/modules/assim.sequential/inst/paleon_sda.R
@@ -12,11 +12,14 @@ ciEnvelope <- function(x,ylo,yhi,...){
}
#LINKAGES #AGB.pft #Harvard Forest
-setwd('/fs/data2/output//PEcAn_1000003314/')
+#setwd('/fs/data2/output//PEcAn_1000003314/')
+#TO DO: Having problem with running proc.var == TRUE because nimble isn't keeping the toggle sampler in the function environment.
+#TO DO: Intial conditions for linkages are messed up. Need to calibrate.
+
#SIPNET
-#setwd('/fs/data2/output//PEcAn_1000003356')
-#TO DO: Normalize state vector because NPP is too small.
+setwd('/fs/data2/output//PEcAn_1000003356')
+#TO DO: Skip ensemble members that fail or are missing in read.restart
#See talk with with Mike on 6/21/17
#---------------- Load PEcAn settings file. --------------------------------#
@@ -27,15 +30,13 @@ obs.list <- PEcAn.assim.sequential::load_data_paleon_sda(settings = settings)
IC <- NULL
-# status.start("IC")
-# ne <- as.numeric(settings$state.data.assimilation$n.ensemble)
-# IC <- sample.IC.SIPNET(ne, state = c('AGB','NPP'))
-# status.end()
+status.start("IC")
+ne <- as.numeric(settings$state.data.assimilation$n.ensemble)
+IC <- sample.IC.SIPNET(ne, state = c('AGB','NPP'))
+status.end()
-#TO DO: Having problem with running proc.var == TRUE because nimble isn't keeping the toggle sampler in the function environment.
-#TO DO: Intial conditions for linkages are messed up. Need to calibrate.
-sda.enkf(settings, obs.mean = obs.list$obs.mean, obs.cov = obs.list$obs.cov, IC = IC)
+PEcAn.assim.sequential::sda.enkf(settings, obs.mean = obs.list$obs.mean, obs.cov = obs.list$obs.cov, IC = IC)
From a2ebfb844dae09bac527c32dcaaa6273ee8043fe Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Fri, 23 Jun 2017 19:22:50 -0500
Subject: [PATCH 056/771] Multiple run ids. Removed debugging text. Need to
clean code
---
shiny/workflowPlot/server.R | 161 ++++++++++++++++++++----------------
shiny/workflowPlot/ui.R | 14 ++--
2 files changed, 95 insertions(+), 80 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 97362bfb00d..ca813ea7fe7 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -71,34 +71,34 @@ server <- shinyServer(function(input, output, session) {
updateSelectizeInput(session, "all_run_id", choices=all_run_ids())
})
# Update on load: workflow id for selected run ids (models)
- observe({
- if(input$load){
- req(input$all_run_id)
- # Selected `multiple' ids
- selected_id <- parse_ids_from_input_runID(input$all_run_id)$wID
- # To allow caching later
- display_id <- c(input$workflow_id,selected_id)
- updateSelectizeInput(session, "workflow_id", choices=display_id)
- } else{
- session_workflow_id <- get_workflow_ids_all(bety, session)
- updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
- }
- })
+ # observe({
+ # if(input$load){
+ # req(input$all_run_id)
+ # # Selected `multiple' ids
+ # selected_id <- parse_ids_from_input_runID(input$all_run_id)$wID
+ # # To allow caching later
+ # display_id <- c(input$workflow_id,selected_id)
+ # updateSelectizeInput(session, "workflow_id", choices=display_id)
+ # } else{
+ # session_workflow_id <- get_workflow_ids_all(bety, session)
+ # updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
+ # }
+ # })
# Update run id for selected workflow id (model)
- observe({
- req(input$workflow_id)
- r_ID <- get_run_ids(bety, input$workflow_id)
- if(input$load){
- req(input$all_run_id)
- # Selected `multiple' ids
- ids_DF <- parse_ids_from_input_runID(input$all_run_id) %>% filter(wID %in% input$workflow_id)
- # To allow caching later
- # Change variable name
- r_ID <- intersect(r_ID,ids_DF$runID)
- }
- updateSelectizeInput(session, "run_id", choices=r_ID)
- })
+ # observe({
+ # req(input$workflow_id)
+ # r_ID <- get_run_ids(bety, input$workflow_id)
+ # if(input$load){
+ # req(input$all_run_id)
+ # # Selected `multiple' ids
+ # ids_DF <- parse_ids_from_input_runID(input$all_run_id) %>% filter(wID %in% input$workflow_id)
+ # # To allow caching later
+ # # Change variable name
+ # r_ID <- intersect(r_ID,ids_DF$runID)
+ # }
+ # updateSelectizeInput(session, "run_id", choices=r_ID)
+ # })
# run_ids <- reactive({
# req(input$workflow_id)
# r_ID <- get_run_ids(bety, input$workflow_id)
@@ -152,47 +152,62 @@ server <- shinyServer(function(input, output, session) {
# return(var_names)
# }
- var_names <- reactive({
- # run_ids <- get_run_ids(bety, workflow_id())
- # var_names <- get_var_names(bety, workflow_id(), run_ids[1])
- # Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
- req(input$workflow_id,input$run_id)
- workflow_id <- input$workflow_id
- run_id <- input$run_id
+ var_names_all <- function(workflow_id, run_id){
var_names <- get_var_names(bety, workflow_id, run_id)
removeVarNames <- c('Year','FracJulianDay')
var_names <-var_names[!var_names %in% removeVarNames]
return(var_names)
- # return(id_list)
- })
+ }
+
+ # var_names1 <- reactive({
+ # # run_ids <- get_run_ids(bety, workflow_id())
+ # # var_names <- get_var_names(bety, workflow_id(), run_ids[1])
+ # # Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
+ # req(input$workflow_id,input$run_id)
+ # workflow_id <- input$workflow_id
+ # run_id <- input$run_id
+ # var_names <- get_var_names(bety, workflow_id, run_id)
+ # removeVarNames <- c('Year','FracJulianDay')
+ # var_names <-var_names[!var_names %in% removeVarNames]
+ # return(var_names)
+ # # return(id_list)
+ # })
observe({
- updateSelectizeInput(session, "variable_name", choices=var_names())
+ req(input$all_run_id)
+ ids_DF <- parse_ids_from_input_runID(input$all_run_id)
+ var_name_list <- c()
+ for(row_num in 1:nrow(ids_DF)){
+ var_name_list <- c(var_name_list,var_names_all(ids_DF$wID[row_num],ids_DF$runID[row_num]))
+ # var_name_list <- var_names_all(ids_DF$wID[row_num],ids_DF$runID[row_num])
+ }
+ updateSelectizeInput(session, "variable_name", choices=var_name_list)
})
# If want to render text
- output$info <- renderText({
- # indicators <- strsplit(input$indicators, ",")[[1]]
-
- # if(input$load){
- # all_workflow_id <- strsplit(input$all_workflow_id,',')
- # }
- # d <- typeof(all_workflow_id)
- # paste0(input$all_run_id)
-
- paste0(parse_ids_from_input_runID(input$all_run_id)$runID)
- # paste0(input$load)
- # paste0(input$all_run_id[length(input$all_run_id)])
- # paste0(input$variable_name)
- # paste0(run_ids(),length(run_ids()),ids)
- # ,session$clientData$url_search)
- # paste0("x=", input$plot_dblclick$x, "\ny=", input$plot_dblclick$y)
- })
+ # output$info <- renderText({
+ # # indicators <- strsplit(input$indicators, ",")[[1]]
+ #
+ # # if(input$load){
+ # # all_workflow_id <- strsplit(input$all_workflow_id,',')
+ # # }
+ # # d <- typeof(all_workflow_id)
+ # # paste0(input$all_run_id)
+ #
+ # paste0(parse_ids_from_input_runID(input$all_run_id)$runID)
+ # # paste0(input$load)
+ # # paste0(input$all_run_id[length(input$all_run_id)])
+ # # paste0(input$variable_name)
+ # # paste0(run_ids(),length(run_ids()),ids)
+ # # ,session$clientData$url_search)
+ # # paste0("x=", input$plot_dblclick$x, "\ny=", input$plot_dblclick$y)
+ # })
load_data_single_run <- function(workflow_id,run_id){
globalDF <- data.frame()
workflow <- collect(workflow(bety, workflow_id))
- var_names <- get_var_names(bety, workflow_id, run_id)
- removeVarNames <- c('Year','FracJulianDay')
- var_names <-var_names[!var_names %in% removeVarNames]
+ # var_names <- get_var_names(bety, workflow_id, run_id)
+ # removeVarNames <- c('Year','FracJulianDay')
+ # var_names <-var_names[!var_names %in% removeVarNames]
+ var_names <- var_names_all(workflow_id,run_id)
if(nrow(workflow) > 0) {
outputfolder <- file.path(workflow$folder, 'out', run_id)
files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
@@ -253,10 +268,10 @@ server <- shinyServer(function(input, output, session) {
output$outputPlot <- renderPlotly({
# masterDF <- load_data_single_run(input$workflow_id,input$run_id)
masterDF <- loadNewData()
- output$info1 <- renderText({
- paste0(nrow(masterDF))
- paste0(length(unique(masterDF$run_id)))
- })
+ # output$info1 <- renderText({
+ # paste0(nrow(masterDF))
+ # paste0(length(unique(masterDF$run_id)))
+ # })
# Error messages
validate(
# need(input$workflow_id, 'Found workflow id'),
@@ -278,17 +293,17 @@ server <- shinyServer(function(input, output, session) {
# %>%
# dplyr::select(dates,vals,workflow_id,run_id)
- title <- unique(df$title)[1]
- xlab <- unique(df$xlab)[1]
- ylab <- unique(df$ylab)[1]
- output$info2 <- renderText({
- paste0(nrow(df))
- # paste0(typeof(title))
- })
- output$info3 <- renderText({
- paste0('xlab')
- # paste0(typeof(title))
- })
+ title <- unique(df$title)
+ xlab <- unique(df$xlab)
+ ylab <- unique(df$ylab)
+ # output$info2 <- renderText({
+ # paste0(nrow(df))
+ # # paste0(typeof(title))
+ # })
+ # output$info3 <- renderText({
+ # paste0('xlab')
+ # # paste0(typeof(title))
+ # })
# df1<-masterDF %>% filter(masterDF$var_name %in% var_name)
# workflow_id %in% workflow_id)
@@ -296,11 +311,11 @@ server <- shinyServer(function(input, output, session) {
# df<-masterDF %>% dplyr::filter(workflow_id == input$workflow_id)
plt <- ggplot(df, aes(x=dates, y=vals, color=run_id)) +
# geom_point(aes(color="Model output")) +
- geom_point()
+ geom_point() +
# geom_smooth(aes(fill = "Spline fit")) +
# coord_cartesian(xlim = ranges$x, ylim = ranges$y) +
# scale_y_continuous(labels=fancy_scientific) +
- # labs(title=title, x=xlab, y=ylab) +
+ labs(title=title, x=xlab, y=ylab)
# labs(title=unique(df$title)[1], x=unique(df$xlab)[1], y=unique(df$ylab)[1]) +
# scale_color_manual(name = "", values = "black") +
# scale_fill_manual(name = "", values = "grey50")
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index a8db281f2bb..a8f6db5d8cd 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -14,8 +14,8 @@ ui <- shinyUI(fluidPage(
p("Please select the run ID. You can select multiple IDs"),
selectizeInput("all_run_id", "Mutliple Run IDs", c(),multiple=TRUE),
actionButton("load", "Load Model outputs"),
- selectInput("workflow_id", "Workflow ID", c()),
- selectInput("run_id", "Run ID", c()),
+ # selectInput("workflow_id", "Workflow ID", c()),
+ # selectInput("run_id", "Run ID", c()),
selectInput("variable_name", "Variable Name", "")
# selectInput("workflow_id", "Workflow ID", c(99000000077)),
@@ -28,12 +28,12 @@ ui <- shinyUI(fluidPage(
# brush = brushOpts(id = "plot_brush",
# resetOnNew = TRUE),
# dblclick = "plot_dblclick"
- ),
+ )
# Checking variable names
- verbatimTextOutput("info"),
- verbatimTextOutput("info1"),
- verbatimTextOutput("info2"),
- verbatimTextOutput("info3")
+ # verbatimTextOutput("info"),
+ # verbatimTextOutput("info1"),
+ # verbatimTextOutput("info2"),
+ # verbatimTextOutput("info3")
)
)
))
From c657700c8375baecada6866bd3172081020d818b Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sat, 24 Jun 2017 10:02:52 -0500
Subject: [PATCH 057/771] Refactored, commented and clean code.
---
db/R/query.dplyr.R | 1 +
shiny/workflowPlot/server.R | 262 +++++++++---------------------------
shiny/workflowPlot/ui.R | 24 +---
3 files changed, 68 insertions(+), 219 deletions(-)
diff --git a/db/R/query.dplyr.R b/db/R/query.dplyr.R
index 5c36256a99b..17be1215dec 100644
--- a/db/R/query.dplyr.R
+++ b/db/R/query.dplyr.R
@@ -143,6 +143,7 @@ get_workflow_ids <- function(bety, session,all.ids=FALSE) {
# Get all workflow IDs
ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
.[["workflow_id"]] %>% sort(decreasing = TRUE)
+ # pull(.,workflow_id) %>% sort(decreasing = TRUE)
}
return(ids)
} # get_workflow_ids
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index ca813ea7fe7..fae04a4272b 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -9,57 +9,29 @@ library(scales)
library(dplyr)
# Define server logic
server <- shinyServer(function(input, output, session) {
- # options(shiny.trace=TRUE)
bety <- betyConnect()
- # bety <- betyConnect('/home/carya/pecan/web/config.php')
- # Ranges not required.
- # ranges <- reactiveValues(x = NULL, y = NULL)
- print("RESTART")
- # set the workflow id(s)
- # Retrieving all workflow ids.
- # Creating a new function here so that we wont have to modify the original one.
- # Ideally the get_workflow_ids function in db/R/query.dplyr.R should take a flag to check
- # if we want to load all workflow ids.
- # get_all_workflow_ids <- function(bety) {
- # ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
- # .[["workflow_id"]] %>% sort(decreasing = TRUE)
- # return(ids)
- # }
- # get_workflow_ids
- # ids <- get_all_workflow_ids(bety)
- # ids <- get_all_workflow_ids(bety, session)
- # Get all workflow ids
- # Using this function here for now.
- get_workflow_ids_all <- function(bety, session,all.ids=FALSE) {
- query <- isolate(parseQueryString(session$clientData$url_search))
- # If we dont want all workflow ids but only workflow id from the user url query
- if (!all.ids & "workflow_id" %in% names(query)) {
- ids <- unlist(query[names(query) == "workflow_id"], use.names = FALSE)
- } else {
- # Get all workflow IDs
- ids <- workflows(bety, ensemble = TRUE) %>% distinct(workflow_id) %>% collect %>%
- .[["workflow_id"]] %>% sort(decreasing = TRUE)
- # pull(.,workflow_id) %>% sort(decreasing = TRUE)
- }
- return(ids)
- } # get_workflow_ids
-
# Update all workflow ids
observe({
+ # Ideally the get_workflow_ids function (line 137) in db/R/query.dplyr.R should take a flag to check
+ # if we want to load all workflow ids.
# get_workflow_id function from query.dplyr.R
- all_ids <- get_workflow_ids_all(bety, session,all.ids=TRUE)
+ all_ids <- get_workflow_ids(bety, session,all.ids=TRUE)
updateSelectizeInput(session, "all_workflow_id", choices=all_ids)
})
- # Retrieves all run ids for seleted workflow ids
- # Returns ('workflow ',w_id,', run ',r_id)
+ # Update all run ids
all_run_ids <- reactive({
+ # Retrieves all run ids for seleted workflow ids
+ # Returns ('workflow ',w_id,', run ',r_id)
req(input$all_workflow_id)
w_ids <- input$all_workflow_id
+ # Will return a list
run_id_list <- c()
for(w_id in w_ids){
+ # For all the workflow ids
r_ids <- get_run_ids(bety, w_id)
for(r_id in r_ids){
- # . as a separator between multiple run ids
+ # Each workflow id can have more than one run ids
+ # ',' as a separator between workflow id and run id
list_item <- paste0('workflow ',w_id,', run ',r_id)
run_id_list <- c(run_id_list,list_item)
}
@@ -70,57 +42,10 @@ server <- shinyServer(function(input, output, session) {
observe({
updateSelectizeInput(session, "all_run_id", choices=all_run_ids())
})
- # Update on load: workflow id for selected run ids (models)
- # observe({
- # if(input$load){
- # req(input$all_run_id)
- # # Selected `multiple' ids
- # selected_id <- parse_ids_from_input_runID(input$all_run_id)$wID
- # # To allow caching later
- # display_id <- c(input$workflow_id,selected_id)
- # updateSelectizeInput(session, "workflow_id", choices=display_id)
- # } else{
- # session_workflow_id <- get_workflow_ids_all(bety, session)
- # updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
- # }
- # })
- # Update run id for selected workflow id (model)
-
- # observe({
- # req(input$workflow_id)
- # r_ID <- get_run_ids(bety, input$workflow_id)
- # if(input$load){
- # req(input$all_run_id)
- # # Selected `multiple' ids
- # ids_DF <- parse_ids_from_input_runID(input$all_run_id) %>% filter(wID %in% input$workflow_id)
- # # To allow caching later
- # # Change variable name
- # r_ID <- intersect(r_ID,ids_DF$runID)
- # }
- # updateSelectizeInput(session, "run_id", choices=r_ID)
- # })
- # run_ids <- reactive({
- # req(input$workflow_id)
- # r_ID <- get_run_ids(bety, input$workflow_id)
- # if(input$load){
- # req(input$all_run_id)
- # # Selected `multiple' ids
- # selected_id <- parse_ids_from_input_runID(input$all_run_id)$wID
- # # To allow caching later
- # display_id <- c(input$workflow_id,selected_id)
- # updateSelectizeInput(session, "workflow_id", choices=display_id)
- # } else{
- # session_workflow_id <- get_workflow_ids_all(bety, session)
- # updateSelectizeInput(session, "workflow_id", choices=session_workflow_id)
- # }
- # })
- # observe({
- # updateSelectizeInput(session, "run_id", choices=run_ids())
- # })
return_DF_from_run_ID <- function(diff_ids){
# Called by the function parse_ids_from_input_runID
+ # which is a wrapper of this function
# Returns a DF for a particular run_id
- # print(diff_ids)
split_string <- strsplit(diff_ids,',')[[1]]
# Workflow id is the first element. Trim leading and ending white spaces. Split by space now
wID <- as.numeric(strsplit(trimws(split_string[1],which = c("both")),' ')[[1]][2])
@@ -128,86 +53,50 @@ server <- shinyServer(function(input, output, session) {
runID <- as.numeric(strsplit(trimws(split_string[2],which = c("both")),' ')[[1]][2])
return(data.frame(wID,runID))
}
+ # Wrapper over return_DF_from_run_ID
+ # @param list of multiple run ids
+ # run_id_string: ('workflow' workflow_ID, 'run' run_id)
+ # @return Data Frame of workflow and run ids
parse_ids_from_input_runID <- function(run_id_list){
- # global_id_DF <- data.frame()
- # split_diff_ids <- strsplit(run_id_string,';')[[1]]
- # for(diff_ids in split_diff_ids){
- # # run_id_string: 'workflow' workflow_ID, 'run' run_id
- # # Split by comma to get workflow and run ids
- #
- #
- globalDF <- data.frame()
- for(w_run_id in run_id_list){
+ globalDF <- data.frame()
+ for(w_run_id in run_id_list){
globalDF <- rbind(globalDF,return_DF_from_run_ID(w_run_id))
- }
- # split_ids <- lapply(split_diff_ids , function(x) list_workflow_run_id(x))
- # local_id_DF <- data.frame(wID,runID)
- # global_id_DF <- rbind(global_id_DF,local_id_DF)
- return(globalDF)
- }
- # }
- # Update variables if user changes run
- # get_var_names_for_ID <- function(bety,wID,runID){
- # var_names <- get_var_names(bety, wID, runID)
- # return(var_names)
- # }
-
+ }
+ return(globalDF)
+ }
+ # Fetches variable names from DB
+ # @param workflow_id and run_id
+ # @return List of variable names
var_names_all <- function(workflow_id, run_id){
+ # Get variables for a particular workflow and run id
var_names <- get_var_names(bety, workflow_id, run_id)
+ # Remove variables which should not be shown to the user
removeVarNames <- c('Year','FracJulianDay')
- var_names <-var_names[!var_names %in% removeVarNames]
+ var_names <- var_names[!var_names %in% removeVarNames]
return(var_names)
}
-
- # var_names1 <- reactive({
- # # run_ids <- get_run_ids(bety, workflow_id())
- # # var_names <- get_var_names(bety, workflow_id(), run_ids[1])
- # # Removing the variables "Year" and "FracJulianDay" from the Variable Name input in the app
- # req(input$workflow_id,input$run_id)
- # workflow_id <- input$workflow_id
- # run_id <- input$run_id
- # var_names <- get_var_names(bety, workflow_id, run_id)
- # removeVarNames <- c('Year','FracJulianDay')
- # var_names <-var_names[!var_names %in% removeVarNames]
- # return(var_names)
- # # return(id_list)
- # })
+ # Update variable names
observe({
req(input$all_run_id)
+ # All information about a model is contained in 'all_run_id' string
ids_DF <- parse_ids_from_input_runID(input$all_run_id)
var_name_list <- c()
for(row_num in 1:nrow(ids_DF)){
var_name_list <- c(var_name_list,var_names_all(ids_DF$wID[row_num],ids_DF$runID[row_num]))
- # var_name_list <- var_names_all(ids_DF$wID[row_num],ids_DF$runID[row_num])
}
updateSelectizeInput(session, "variable_name", choices=var_name_list)
})
- # If want to render text
- # output$info <- renderText({
- # # indicators <- strsplit(input$indicators, ",")[[1]]
- #
- # # if(input$load){
- # # all_workflow_id <- strsplit(input$all_workflow_id,',')
- # # }
- # # d <- typeof(all_workflow_id)
- # # paste0(input$all_run_id)
- #
- # paste0(parse_ids_from_input_runID(input$all_run_id)$runID)
- # # paste0(input$load)
- # # paste0(input$all_run_id[length(input$all_run_id)])
- # # paste0(input$variable_name)
- # # paste0(run_ids(),length(run_ids()),ids)
- # # ,session$clientData$url_search)
- # # paste0("x=", input$plot_dblclick$x, "\ny=", input$plot_dblclick$y)
- # })
-
+ # Load data for a single run of the model
+ # @param workflow_id and run_id
+ # @return Dataframe for one run
+ # For a particular combination of workflow and run id, loads
+ # all variables from all files.
load_data_single_run <- function(workflow_id,run_id){
globalDF <- data.frame()
workflow <- collect(workflow(bety, workflow_id))
- # var_names <- get_var_names(bety, workflow_id, run_id)
- # removeVarNames <- c('Year','FracJulianDay')
- # var_names <-var_names[!var_names %in% removeVarNames]
+ # Use the function 'var_names_all' to get all variables
var_names <- var_names_all(workflow_id,run_id)
+ # Using earlier code, refactored
if(nrow(workflow) > 0) {
outputfolder <- file.path(workflow$folder, 'out', run_id)
files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
@@ -220,10 +109,13 @@ server <- shinyServer(function(input, output, session) {
ylab <- ""
var <- ncdf4::ncatt_get(nc, var_name)
#sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
+ # Snow water
sw <- TRUE
+ # Check required bcoz many files dont contain title
if(!is.null(var$long_name)){
title <- var$long_name
}
+ # Check required bcoz many files dont contain units
if(!is.null(var$units)){
ylab <- var$units
}
@@ -234,99 +126,73 @@ server <- shinyServer(function(input, output, session) {
dates <- as.Date(dates)
vals <- if(is.na(vals)) y[b] else c(vals, y[b])
xlab <- "Time"
- # Not required to change xlab by ranges. Using ggplotly.
- # xlab <- if (is.null(ranges$x)) "Time" else paste(ranges$x, collapse=" - ")
+ # Values of the data which we will plot
valuesDF <- data.frame(dates,vals)
+ # Meta information about the data
metaDF <- data.frame(workflow_id,run_id,title,xlab,ylab,var_name)
- # Populating metaDF as same length of values DF
- # metaDF1<-metaDF[rep(seq_len(nrow(valuesDF))),]
currentDF <- cbind(valuesDF,metaDF)
globalDF <- rbind(globalDF,currentDF)
}
ncdf4::nc_close(nc)
}
}
+ # Required to convert from factors to characters
+ # Otherwise error by ggplotly
globalDF$title <- as.character(globalDF$title)
globalDF$xlab <- as.character(globalDF$xlab)
globalDF$ylab <- as.character(globalDF$ylab)
globalDF$var_name <- as.character(globalDF$var_name)
return(globalDF)
}
-
+ # Loads data for all workflow and run ids after the load button is pressed.
+ # All information about a model is contained in 'all_run_id' string
+ # Wrapper over 'load_data_single_run'
loadNewData <-eventReactive(input$load,{
- # workflow_id = 99000000077
- # run_id = 99000000002
- # var_name = var_names
req(input$all_run_id)
- globalDF <- data.frame()
+ # Get IDs DF from 'all_run_id' string
ids_DF <- parse_ids_from_input_runID(input$all_run_id)
+ globalDF <- data.frame()
for(row_num in 1:nrow(ids_DF)){
globalDF <- rbind(globalDF, load_data_single_run(ids_DF$wID[row_num],ids_DF$runID[row_num]))
}
return(globalDF)
})
+ # Renders the ggplotly
output$outputPlot <- renderPlotly({
- # masterDF <- load_data_single_run(input$workflow_id,input$run_id)
- masterDF <- loadNewData()
- # output$info1 <- renderText({
- # paste0(nrow(masterDF))
- # paste0(length(unique(masterDF$run_id)))
- # })
# Error messages
validate(
- # need(input$workflow_id, 'Found workflow id'),
- # need(input$run_id, 'Run id detected'),
- need(input$variable_name, 'Please wait! Loading data')
+ need(input$all_workflow_id, 'Select workflow id'),
+ need(input$all_run_id, 'Select Run id'),
+ need(input$variable_name, 'Click the button to load data')
)
+ # Load data
+ masterDF <- loadNewData()
+ # Convert from factor to character. For subsetting
masterDF$var_name <- as.character(masterDF$var_name)
+ # Convert to factor. Required for ggplot
masterDF$run_id <- as.factor(as.character(masterDF$run_id))
-
- # masterDF$var_name = as.factor(masterDF$var_name)
- # df1<-subset(masterDF,var_name==var_name)
-
- # Drop filtering
+ # Filter by variable name
df <- masterDF %>%
- dplyr::filter(
- # workflow_id == input$workflow_id &
- # run_id == input$run_id &
- var_name == input$variable_name)
- # %>%
- # dplyr::select(dates,vals,workflow_id,run_id)
-
+ dplyr::filter(var_name == input$variable_name)
+ # Meta information about the plot
title <- unique(df$title)
xlab <- unique(df$xlab)
ylab <- unique(df$ylab)
- # output$info2 <- renderText({
- # paste0(nrow(df))
- # # paste0(typeof(title))
- # })
- # output$info3 <- renderText({
- # paste0('xlab')
- # # paste0(typeof(title))
- # })
-
- # df1<-masterDF %>% filter(masterDF$var_name %in% var_name)
- # workflow_id %in% workflow_id)
- # & run_id == run_id & var_name == var_name)
- # df<-masterDF %>% dplyr::filter(workflow_id == input$workflow_id)
+ # ggplot function for now scatter plots.
+ # TODO Shubham allow line plots as well
plt <- ggplot(df, aes(x=dates, y=vals, color=run_id)) +
- # geom_point(aes(color="Model output")) +
geom_point() +
- # geom_smooth(aes(fill = "Spline fit")) +
- # coord_cartesian(xlim = ranges$x, ylim = ranges$y) +
+ # Earlier smoothing and y labels
+ # geom_smooth(aes(fill = "Spline fit")) +
# scale_y_continuous(labels=fancy_scientific) +
labs(title=title, x=xlab, y=ylab)
- # labs(title=unique(df$title)[1], x=unique(df$xlab)[1], y=unique(df$ylab)[1]) +
+ # Earlier color and fill values
# scale_color_manual(name = "", values = "black") +
# scale_fill_manual(name = "", values = "grey50")
- # theme(axis.text.x = element_text(angle = -90))
plt<-ggplotly(plt)
- # plot(plt)
+ # Not able to add icon over ggplotly
# add_icon()
- # }
- # }
})
-
# Shiny server closes here
})
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index a8f6db5d8cd..a2c7673474a 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -1,39 +1,21 @@
library(shiny)
source('helper.R')
-
# Define UI
ui <- shinyUI(fluidPage(
# Application title
titlePanel("Workflow Plots"),
-
sidebarLayout(
sidebarPanel(
# helpText(),
- p("Please select the workflow ID to continue. You can select multiple IDs"),
+ p("Please select the workflow IDs to continue. You can select multiple IDs"),
selectizeInput("all_workflow_id", "Mutliple Workflow IDs", c(),multiple=TRUE),
- p("Please select the run ID. You can select multiple IDs"),
+ p("Please select the run IDs. You can select multiple IDs"),
selectizeInput("all_run_id", "Mutliple Run IDs", c(),multiple=TRUE),
actionButton("load", "Load Model outputs"),
- # selectInput("workflow_id", "Workflow ID", c()),
- # selectInput("run_id", "Run ID", c()),
selectInput("variable_name", "Variable Name", "")
-
- # selectInput("workflow_id", "Workflow ID", c(99000000077)),
- # selectInput("run_id", "Run ID", c(99000000002)),
- # selectInput("variable_name", "Variable Name", c("AutoResp","GPP"))
),
mainPanel(
- plotlyOutput("outputPlot"
- ## brushOpts and dblclick not supported by plotly
- # brush = brushOpts(id = "plot_brush",
- # resetOnNew = TRUE),
- # dblclick = "plot_dblclick"
- )
- # Checking variable names
- # verbatimTextOutput("info"),
- # verbatimTextOutput("info1"),
- # verbatimTextOutput("info2"),
- # verbatimTextOutput("info3")
+ plotlyOutput("outputPlot")
)
)
))
From 9f7185c23488e192f281f57f41157f05b9a224c6 Mon Sep 17 00:00:00 2001
From: Ann Raiho
Date: Sat, 24 Jun 2017 18:54:04 -0400
Subject: [PATCH 058/771] small changes
---
models/sipnet/R/sample.IC.SIPNET.R | 4 +-
modules/assim.sequential/R/sda.enkf.R | 55 ++++++++++++++++++---------
2 files changed, 39 insertions(+), 20 deletions(-)
diff --git a/models/sipnet/R/sample.IC.SIPNET.R b/models/sipnet/R/sample.IC.SIPNET.R
index 4c130a2187a..869d655d198 100644
--- a/models/sipnet/R/sample.IC.SIPNET.R
+++ b/models/sipnet/R/sample.IC.SIPNET.R
@@ -23,12 +23,12 @@ sample.IC.SIPNET <- function(ne, state, year = 1) {
## Mg C / ha / yr NPP
NPP <- ifelse(rep("NPP" %in% names(state), ne),
- state$NPP[1, sample.int(ncol(state$NPP), ne), year], # *.48, ## unit MgC/ha/yr
+ udunits2::ud.convert(state$NPP[sample.int(length(state$NPP), ne)],'kg/m^2/s','Mg/ha/yr'), # *.48, ## unit MgC/ha/yr
runif(ne, 0, 10)) ## prior
# g C * m-2 ground area in wood (above-ground + roots)
plantWood <- ifelse(rep("AGB" %in% names(state), ne),
- state$AGB[1, sample.int(ncol(state$AGB), ne), year] * (1/1000) * (1e+06/1), ## unit KgC/ha -> g C /m^2
+ udunits2::ud.convert(state$AGB[sample.int(length(state$AGB), ne)],'kg/m^2','g/m^2'), ## unit KgC/ha -> g C /m^2
runif(ne, 0, 14000)) ## prior
# initial leaf area, m2 leaves * m-2 ground area (multiply by leafCSpWt to
diff --git a/modules/assim.sequential/R/sda.enkf.R b/modules/assim.sequential/R/sda.enkf.R
index 0b34db78727..7ed0f77d369 100644
--- a/modules/assim.sequential/R/sda.enkf.R
+++ b/modules/assim.sequential/R/sda.enkf.R
@@ -466,11 +466,19 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
Pf <- Pf + Q
}
- mu.f.scale <- scale(mu.f, center = mean(mu.f), scale = 1)
- Pf.scale <- cov(scale(X, center = mu.f, scale = rep(1,length(mu.f))))
+ mu.f.scale <- mu.f / mu.f
+ mu.f.scale[is.na(mu.f.scale)]<-0
+ map.mu.f <- H%*%mu.f
+ Y.scale <- Y/map.mu.f ##need H in here to match mu.f's to Y's
+ Pf.scale <- t(t(Pf/mu.f)/mu.f)
Pf.scale[is.na(Pf.scale)]<-0
- R.scale <- matrix(scale(as.vector(R), center = mean(mu.f), scale = 1),2,2)
- Y.scale <- scale(Y, center = mean(mu.f[1:2]), scale = 1)
+ R.scale <- t(t(R/as.vector(map.mu.f))/as.vector(map.mu.f))
+
+ # mu.f.scale <- scale(mu.f,center = FALSE, scale = mean(mu.f))
+ # Pf.scale <- mu.f*Pf%*%t(t(mu.f))
+ # Pf.scale[is.na(Pf.scale)]<-0
+ # R.scale <- matrix(scale(as.vector(R), center = mean(mu.f), scale = 1),2,2)
+ # Y.scale <- scale(Y, center = mean(mu.f[1:2]), scale = 1)
## Kalman Gain
K <- Pf.scale %*% t(H) %*% solve((R.scale + H %*% Pf.scale %*% t(H)))
@@ -478,9 +486,8 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
mu.a.scale <- mu.f.scale + K %*% (Y.scale - H %*% mu.f.scale)
Pa.scale <- (diag(ncol(X)) - K %*% H) %*% Pf.scale
- Pa <- Pa.scale * attr(mu.f.scale, 'scaled:scale') + attr(mu.f.scale, 'scaled:center')
- mu.a <- mu.a.scale * attr(mu.f.scale, 'scaled:scale') + attr(mu.f.scale, 'scaled:center')
-
+ Pa <- t(t(Pa.scale*mu.f)*mu.f)
+ mu.a <- mu.a.scale * mu.f
## Kalman Gain
#K <- Pf %*% t(H) %*% solve((R + H %*% Pf %*% t(H)))
@@ -795,9 +802,9 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
# ## check if ensemble var is correct
# cbind(as.vector(Pa),as.vector(cov(X_a)))
#
- # analysis <- as.data.frame(rmvnorm(as.numeric(nens), mu.a, Pa, method = "svd"))
+ analysis <- as.data.frame(rmvnorm(as.numeric(nens), mu.a, Pa, method = "svd"))
- analysis <- as.data.frame(X_a)
+ #analysis <- as.data.frame(X_a)
colnames(analysis) <- colnames(X)
##### Mapping analysis vectors to be in bounds of state variables
@@ -826,7 +833,8 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
tmp[mch] <- x[mch]
tmp
}))
- Ybar <- Ybar[, na.omit(pmatch(colnames(X), colnames(Ybar)))]
+ Y.order <- na.omit(pmatch(colnames(X), colnames(Ybar)))
+ Ybar <- Ybar[,Y.order]
YCI <- t(as.matrix(sapply(obs.cov[t1:t], function(x) {
if (is.null(x)) {
rep(NA, length(names.y))
@@ -834,6 +842,8 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
sqrt(diag(x))
})))
+ YCI <- YCI[,Y.order]
+
par(mfrow = c(2, 1))
for (i in 1:ncol(FORECAST[[t]])) {#
t1 <- 1
@@ -846,16 +856,17 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
ylab.names <- unlist(sapply(settings$state.data.assimilation$state.variable,
function(x) { x })[2, ], use.names = FALSE)
- plot(as.Date(obs.times[t1:t]),
- Xbar,
- ylim = range(c(XaCI, Xci), na.rm = TRUE),
- type = "n",
- xlab = "Year",
- ylab = ylab.names[grep(colnames(X)[i], var.names)],
- main = colnames(X)[i])
+
# observation / data
if (i <= ncol(Ybar)) {
+ plot(as.Date(obs.times[t1:t]),
+ Xbar,
+ ylim = range(c(XaCI, Xci, Ybar[,i]), na.rm = TRUE),
+ type = "n",
+ xlab = "Year",
+ ylab = ylab.names[grep(colnames(X)[i], var.names)],
+ main = colnames(X)[i])
ciEnvelope(as.Date(obs.times[t1:t]),
as.numeric(Ybar[, i]) - as.numeric(YCI[, i]) * 1.96,
as.numeric(Ybar[, i]) + as.numeric(YCI[, i]) * 1.96,
@@ -865,6 +876,14 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
type = "l",
col = "darkgreen",
lwd = 2)
+ }else{
+ plot(as.Date(obs.times[t1:t]),
+ Xbar,
+ ylim = range(c(XaCI, Xci), na.rm = TRUE),
+ type = "n",
+ xlab = "Year",
+ ylab = ylab.names[grep(colnames(X)[i], var.names)],
+ main = colnames(X)[i])
}
# forecast
@@ -874,7 +893,7 @@ sda.enkf <- function(settings, obs.mean, obs.cov, IC = NULL, Q = NULL) {
# analysis
ciEnvelope(as.Date(obs.times[t1:t]), XaCI[, 1], XaCI[, 2], col = alphapink)
lines(as.Date(obs.times[t1:t]), Xa, col = "black", lty = 2, lwd = 2)
- legend('topright',c('Forecast','Data','Analysis'),col=c(alphablue,alphagreen,alphapink),lty=1,lwd=5)
+ #legend('topright',c('Forecast','Data','Analysis'),col=c(alphablue,alphagreen,alphapink),lty=1,lwd=5)
}
}
From e14cbafbce6a122259b256331937d188c5848195 Mon Sep 17 00:00:00 2001
From: "Shawn P. Serbin"
Date: Wed, 28 Jun 2017 09:05:55 -0400
Subject: [PATCH 059/771] Updates to job templates. Cloning still doesn't work
as it should
---
models/fates/inst/template.job | 41 ++---
models/fates/inst/template.job.create_clone | 190 ++++++++++++++++++++
2 files changed, 211 insertions(+), 20 deletions(-)
create mode 100644 models/fates/inst/template.job.create_clone
diff --git a/models/fates/inst/template.job b/models/fates/inst/template.job
index 7ed2e9b05fd..8e72f806f70 100644
--- a/models/fates/inst/template.job
+++ b/models/fates/inst/template.job
@@ -64,8 +64,8 @@ export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
# Modifying : env_run.xml
./xmlchange -file env_run.xml -id REST_N -val 1
./xmlchange -file env_run.xml -id REST_OPTION -val nyears
-./xmlchange -file env_run.xml -id DATM_CLMNCEP_YR_START -val 1999
-./xmlchange -file env_run.xml -id DATM_CLMNCEP_YR_END -val 1999
+./xmlchange -file env_run.xml -id DATM_CLMNCEP_YR_START -val 1974
+./xmlchange -file env_run.xml -id DATM_CLMNCEP_YR_END -val 2004
./xmlchange -file env_run.xml -id DIN_LOC_ROOT -val @INDIR@
./xmlchange -file env_run.xml -id DIN_LOC_ROOT_CLMFORC -val '@INDIR@'
./xmlchange -file env_run.xml -id DOUT_S -val TRUE
@@ -80,9 +80,9 @@ export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
## ENV_BUILD update configurations
./xmlchange -file env_build.xml -id CIME_OUTPUT_ROOT -val @CASEDIR@
- #./xmlchange -file env_build.xml -id EXEROOT -val @BLD@
- ./xmlchange -file env_build.xml -id EXEROOT -val @CASEDIR@/bld
- #./xmlchange -file env_build.xml -id BUILD_COMPLETE -val TRUE
+ #./xmlchange -file env_build.xml -id EXEROOT -val @BLD@ # this is the way it should be set, long term
+ ./xmlchange -file env_build.xml -id EXEROOT -val @CASEDIR@/bld # temporary fix
+ #./xmlchange -file env_build.xml -id BUILD_COMPLETE -val TRUE # TEMPORARY! This eventually needs to be uncommented so we don't build the model each time !
## DATES -> ENV_RUN
./xmlchange -file env_run.xml -id RUNDIR -val @CASEDIR@/run
@@ -134,8 +134,7 @@ EOF
echo "*** Run case.build ***"
sleep 10
- ./case.build
- #./case.build --sharedlib-only
+ ./case.build # ! Long run, we should not be building the model. But current BUILD_COMPLETE doesn't seem to be working !
## RUN
echo "*** Run ***"
@@ -146,20 +145,22 @@ EOF
echo `pwd`
echo `ls -altr`
- ## RUNDIR FILE LINKS
- if [ -e @RUNDIR@/datm_atm_in]
- then
- rm datm_atm_in
- ln -s @RUNDIR@/datm_atm_in .
- fi
- if [ -e @RUNDIR@/datm.streams.txt.PEcAn_met]
- then
- rm datm.stream.txt.CLM_QIAN*
- ln -s @RUNDIR@/datm.streams.txt.PEcAn_met .
- fi
+ ## RUNDIR FILE LINKS -- CURRENTLY GETTING SEG FAULTS WHEN TRYING TO RUN WITH PECAN MET
+ #if [ -e @RUNDIR@/datm_atm_in ]
+ # then
+ # rm datm_atm_in datm_in
+ # ln -s @RUNDIR@/datm_atm_in .
+ # ln -s @RUNDIR@/datm_atm_in datm_in
+ #fi
+ #if [ -e @RUNDIR@/datm.streams.txt.PEcAn_met ]
+ # then
+ #rm datm.stream.txt.CLM_QIAN*
+ # rm datm.streams.txt.CLM_QIAN* # bug fix, s was missing!
+ # ln -s @RUNDIR@/datm.streams.txt.PEcAn_met .
+ #fi
- #"@BINARY@" # EDITED BY SPS
- "@CASEDIR@/bld/cesm.exe" # edited for testing
+ #"@BINARY@" # ! Long term, we should be running it this way !
+ "@CASEDIR@/bld/cesm.exe" # edited for testing, ! TEMPORARY. NEED TO SWITCH BACK ONCE BUILD_COMPLETE works !
STATUS=$?
diff --git a/models/fates/inst/template.job.create_clone b/models/fates/inst/template.job.create_clone
new file mode 100644
index 00000000000..c2166a77f04
--- /dev/null
+++ b/models/fates/inst/template.job.create_clone
@@ -0,0 +1,190 @@
+#!/bin/bash -l
+
+# redirect output
+exec 3>&1
+exec &> "@OUTDIR@/logfile.txt"
+
+# host specific setup
+@HOST_SETUP@
+
+# create output folder
+mkdir -p "@OUTDIR@"
+
+# flag needed for ubuntu
+export GFORTRAN_UNBUFFERED_PRECONNECTED=yes
+
+# see if application needs running
+#if [ ! -e "@OUTDIR@/pecan.done" ]; then
+
+ ## Figure out where CIME SCRIPTS are installed
+ cd @REFCASE@
+ IFS=' ' read -ra SCRIPTROOT <<< `./xmlquery SCRIPTSROOT -value`
+ echo "CIME script root: "
+ echo ${SCRIPTROOT}
+
+ ## Seem to be stuck having to build a new case. Will try and avoid this in the future
+ cd ${SCRIPTROOT}
+ echo "*** Run create_newcase ***"
+ echo " ----- Case details:"
+ echo @CASEDIR@
+ #echo "Res: @RES@ "
+ #echo "Compset: @COMPSET@ "
+ #echo "Machine: @MACHINE@ "
+ #echo "Compiler: @COMPILER@ "
+ #echo "Project_name: @PROJECT@ "
+ echo "--------------------------"
+ #./create_newcase -case @CASEDIR@ -res 1x1_brazil -compset ICLM45ED -mach @MACHINE@ -compiler @COMPILER@ -project @PROJECT@
+ ./create_clone --verbose --case @CASEDIR@ --clone @REFCASE@ --keepexe
+
+ cd "@RUNDIR@"
+
+ ## RECURSIVELY COPY/SYMLINK REFERENCE INPUTS DIRECTORY (DIN_LOC_ROOT)
+ ## create folders and symbolic links. Links will later be deleted when non-default files are specified
+ mkdir input
+ echo "PEcAn.FATES::recurse.create('input','@DEFAULT@')" | R --vanilla
+
+ cd "@CASEDIR@"
+
+ ## THINGS THAT ARE IN REFCASE
+ # Modifying : env_mach_pes.xml
+ echo "*** Modify XMLs ***"
+./xmlchange -file env_mach_pes.xml -id NTASKS_ATM -val 1
+./xmlchange -file env_mach_pes.xml -id NTASKS_LND -val 1
+./xmlchange -file env_mach_pes.xml -id NTASKS_ICE -val 1
+./xmlchange -file env_mach_pes.xml -id NTASKS_OCN -val 1
+./xmlchange -file env_mach_pes.xml -id NTASKS_CPL -val 1
+./xmlchange -file env_mach_pes.xml -id NTASKS_GLC -val 1
+./xmlchange -file env_mach_pes.xml -id NTASKS_ROF -val 1
+./xmlchange -file env_mach_pes.xml -id NTASKS_WAV -val 1
+./xmlchange -file env_mach_pes.xml -id MAX_TASKS_PER_NODE -val 1
+./xmlchange -file env_mach_pes.xml -id TOTALPES -val 1
+ # Modifying : env_build.xml
+./xmlchange -file env_build.xml -id GMAKE -val make
+./xmlchange -file env_build.xml -id DEBUG -val FALSE
+ # Modifying : env_run.xml
+./xmlchange -file env_run.xml -id REST_N -val 1
+./xmlchange -file env_run.xml -id REST_OPTION -val nyears
+./xmlchange -file env_run.xml -id DATM_CLMNCEP_YR_START -val 1974
+./xmlchange -file env_run.xml -id DATM_CLMNCEP_YR_END -val 2004
+./xmlchange -file env_run.xml -id DIN_LOC_ROOT -val @INDIR@
+./xmlchange -file env_run.xml -id DIN_LOC_ROOT_CLMFORC -val '@INDIR@'
+./xmlchange -file env_run.xml -id DOUT_S -val TRUE
+./xmlchange -file env_run.xml -id DOUT_S_ROOT -val '@CASEDIR@/run'
+./xmlchange -file env_run.xml -id PIO_DEBUG_LEVEL -val 0
+./xmlchange -file env_run.xml -id ATM_DOMAIN_FILE -val 'domain.lnd.@SITE_NAME@.nc'
+./xmlchange -file env_run.xml -id ATM_DOMAIN_PATH -val '@INDIR@/share/domains/domain.clm/'
+./xmlchange -file env_run.xml -id LND_DOMAIN_FILE -val 'domain.lnd.@SITE_NAME@.nc'
+./xmlchange -file env_run.xml -id LND_DOMAIN_PATH -val '@INDIR@/share/domains/domain.clm/'
+./xmlchange -file env_run.xml -id CLM_USRDAT_NAME -val '@SITE_NAME@'
+## END REFCASE
+
+ ## ENV_BUILD update configurations
+ ./xmlchange -file env_build.xml -id CIME_OUTPUT_ROOT -val @CASEDIR@
+ ./xmlchange -file env_build.xml -id EXEROOT -val @BLD@ # this is the way it should be set, long term
+ #./xmlchange -file env_build.xml -id EXEROOT -val @CASEDIR@/bld # temporary fix
+ ./xmlchange -file env_build.xml -id BUILD_COMPLETE -val TRUE # TEMPORARY! This eventually needs to be uncommented so we don't build the model each time !
+
+ ## DATES -> ENV_RUN
+ ./xmlchange -file env_run.xml -id RUNDIR -val @CASEDIR@/run
+ ./xmlchange -file env_run.xml -id RUN_STARTDATE -val @START_DATE@
+ ./xmlchange -file env_run.xml -id STOP_OPTION -val ndays
+ ./xmlchange -file env_run.xml -id STOP_N -val @STOP_N@
+
+ ## SITE INFO --> DOMAIN FILE
+ rm @INDIR@/share/domains/domain.clm/*
+ ln -s @RUNDIR@/domain.lnd.@SITE_NAME@.nc @INDIR@/share/domains/domain.clm/
+
+ ## SURFDATA
+ rm @INDIR@/lnd/clm2/surfdata_map/surfdata*
+ ln -s @RUNDIR@/surfdata_@SITE_NAME@_simyr2000.nc @INDIR@/lnd/clm2/surfdata_map/
+ SURFMAP=@INDIR@/lnd/clm2/surfdata_map/surfdata_@SITE_NAME@_simyr2000.nc
+cat >> user_nl_clm << EOF
+fsurdat = '@INDIR@/lnd/clm2/surfdata_map/surfdata_@SITE_NAME@_simyr2000.nc'
+finidat = ' '
+EOF
+
+ ## PARAMETERS
+ rm @INDIR@/lnd/clm2/paramdata/*
+ #ln -s @RUNDIR@/clm_params_ed.@RUN_ID@.nc @INDIR@/lnd/clm2/paramdata/
+ ln -s @RUNDIR@/clm_params.@RUN_ID@.nc @INDIR@/lnd/clm2/paramdata/
+ ln -s @RUNDIR@/fates_params.@RUN_ID@.nc @INDIR@/lnd/clm2/paramdata/
+#cat >> user_nl_clm << EOF
+#paramfile = '@INDIR@/lnd/clm2/paramdata/clm_params_ed.@RUN_ID@.nc'
+#EOF
+cat >> user_nl_clm << EOF
+fates_paramfile = '@INDIR@/lnd/clm2/paramdata/fates_params.@RUN_ID@.nc'
+paramfile = '@INDIR@/lnd/clm2/paramdata/clm_params.@RUN_ID@.nc'
+EOF
+
+ ## APPLY CONFIG CHANGES
+ echo "*** Run case.setup ***"
+ ./case.setup
+
+ ## ADDITIONAL MODS THAT ARE JUST ASSOCIATED WITH REFCASE - removed 'NEP' 'NPP_column'
+cat >> user_nl_clm << EOF
+hist_empty_htapes = .true.
+hist_fincl1='EFLX_LH_TOT','TSOI_10CM','QVEGT','GPP','AR','ED_bleaf','ED_biomass','NPP','MAINT_RESP','GROWTH_RESP'
+hist_mfilt = 8760
+hist_nhtfrq = -1
+EOF
+
+# Modify user_nl_datm
+#cat >> user_nl_datm << EOF
+#EOF
+
+ echo "*** Run case.build ***"
+ sleep 10
+ ./case.build # ! Long run, we should not be building the model. But current BUILD_COMPLETE doesn't seem to be working !
+
+ ## RUN
+ echo "*** Run ***"
+ now=`date`
+ echo "Simulation start: $now"
+ cd run
+ mkdir timing
+ echo `pwd`
+ echo `ls -altr`
+
+ ## RUNDIR FILE LINKS -- CURRENTLY GETTING SEG FAULTS WHEN TRYING TO RUN WITH PECAN MET
+ #if [ -e @RUNDIR@/datm_atm_in ]
+ # then
+ # rm datm_atm_in datm_in
+ # ln -s @RUNDIR@/datm_atm_in .
+ # ln -s @RUNDIR@/datm_atm_in datm_in
+ #fi
+ #if [ -e @RUNDIR@/datm.streams.txt.PEcAn_met ]
+ # then
+ #rm datm.stream.txt.CLM_QIAN*
+ # rm datm.streams.txt.CLM_QIAN* # bug fix, s was missing!
+ # ln -s @RUNDIR@/datm.streams.txt.PEcAn_met .
+ #fi
+
+ "@BINARY@" # ! Long term, we should be running it this way !
+ #"@CASEDIR@/bld/cesm.exe" # edited for testing, ! TEMPORARY. NEED TO SWITCH BACK ONCE BUILD_COMPLETE works !
+ STATUS=$?
+
+
+ # check the status
+ if [ $STATUS -ne 0 ]; then
+ echo -e "ERROR IN MODEL RUN\nLogfile is located at '@OUTDIR@/logfile.txt'" >&3
+ exit $STATUS
+ fi
+
+# host specific post-run
+@HOST_TEARDOWN@
+
+ # convert output
+ cp *clm2.h0.*.nc @OUTDIR@
+ echo "library(PEcAn.FATES); model2netcdf.FATES('@OUTDIR@')" | R --vanilla
+
+
+ # copy readme with specs to output
+ cp "@RUNDIR@/README.txt" "@OUTDIR@/README.txt"
+
+ # write tag so future execution knows run finished
+ echo $(date) >> "@OUTDIR@/pecan.done"
+
+ sleep 60
+
+# all done
+echo -e "MODEL FINISHED\nLogfile is located at '@OUTDIR@/logfile.txt'" >&3
From a2e3e749ab4868f4b1954adeec90639704b90679 Mon Sep 17 00:00:00 2001
From: "Shawn P. Serbin"
Date: Thu, 29 Jun 2017 15:06:04 -0400
Subject: [PATCH 060/771] Update to template.job
---
models/fates/inst/template.job | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/models/fates/inst/template.job b/models/fates/inst/template.job
index 8e72f806f70..037fa96eed5 100644
--- a/models/fates/inst/template.job
+++ b/models/fates/inst/template.job
@@ -145,6 +145,7 @@ EOF
echo `pwd`
echo `ls -altr`
+ ## ----------- Disabled for now
## RUNDIR FILE LINKS -- CURRENTLY GETTING SEG FAULTS WHEN TRYING TO RUN WITH PECAN MET
#if [ -e @RUNDIR@/datm_atm_in ]
# then
@@ -158,7 +159,8 @@ EOF
# rm datm.streams.txt.CLM_QIAN* # bug fix, s was missing!
# ln -s @RUNDIR@/datm.streams.txt.PEcAn_met .
#fi
-
+ ## ------------- MET needs to be re-enabled in a later PR
+
#"@BINARY@" # ! Long term, we should be running it this way !
"@CASEDIR@/bld/cesm.exe" # edited for testing, ! TEMPORARY. NEED TO SWITCH BACK ONCE BUILD_COMPLETE works !
STATUS=$?
From e3c663d90e80a80a7c681fe8fcc750b4ad31b117 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sat, 1 Jul 2017 17:06:29 -0500
Subject: [PATCH 061/771] Moving functions from helper.R to query.dplyr.R
---
db/R/query.dplyr.R | 83 +++++++++++++++++++++++++++++++++++++
shiny/workflowPlot/helper.R | 2 -
shiny/workflowPlot/server.R | 41 +++++++++---------
visualization/DESCRIPTION | 6 ++-
4 files changed, 108 insertions(+), 24 deletions(-)
diff --git a/db/R/query.dplyr.R b/db/R/query.dplyr.R
index 17be1215dec..310df3460cd 100644
--- a/db/R/query.dplyr.R
+++ b/db/R/query.dplyr.R
@@ -210,3 +210,86 @@ get_var_names <- function(bety, workflow_id, run_id, remove_pool = TRUE) {
}
return(var_names)
} # get_var_names
+
+#' Get vector of variable names for a particular workflow and run ID
+#' @inheritParams dbHostInfo
+#' @inheritParams workflow
+#' @param run_id Run ID
+#' @param workflow_id Workflow ID
+#' @export
+var_names_all <- function(bety, workflow_id, run_id) {
+ # @return List of variable names
+ # Get variables for a particular workflow and run id
+ var_names <- get_var_names(bety, workflow_id, run_id)
+ # Remove variables which should not be shown to the user
+ removeVarNames <- c('Year','FracJulianDay')
+ var_names <- var_names[!var_names %in% removeVarNames]
+ return(var_names)
+} # var_names_all
+
+#' Load data for a single run of the model
+#' @inheritParams dbHostInfo
+#' @inheritParams workflow
+#' @param run_id Run ID
+#' @param workflow_id Workflow ID
+#' @export
+load_data_single_run <- function(bety, workflow_id,run_id) {
+ # For a particular combination of workflow and run id, loads
+ # all variables from all files.
+ # @return Dataframe for one run
+ # Adapted from earlier code in pecan/shiny/workflowPlot/server.R
+ globalDF <- data.frame()
+ workflow <- collect(workflow(bety, workflow_id))
+ # Use the function 'var_names_all' to get all variables
+ removeVarNames <- c('Year','FracJulianDay')
+ var_names <- var_names_all(bety,workflow_id,run_id)
+ # Using earlier code, refactored
+ if(nrow(workflow) > 0) {
+ outputfolder <- file.path(workflow$folder, 'out', run_id)
+ files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
+ for(file in files) {
+ nc <- nc_open(file)
+ for(var_name in var_names){
+ dates <- NA
+ vals <- NA
+ title <- var_name
+ ylab <- ""
+ var <- ncdf4::ncatt_get(nc, var_name)
+ #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
+ # Snow water
+ sw <- TRUE
+ # Check required bcoz many files don't contain title
+ if(!is.null(var$long_name)){
+ title <- var$long_name
+ }
+ # Check required bcoz many files don't contain units
+ if(!is.null(var$units)){
+ ylab <- var$units
+ }
+ x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
+ y <- ncdf4::ncvar_get(nc, var_name)
+ b <- !is.na(x) & !is.na(y) & sw != 0
+ dates <- if(is.na(dates)) x[b] else c(dates, x[b])
+ dates <- as.Date(dates)
+ vals <- if(is.na(vals)) y[b] else c(vals, y[b])
+ xlab <- "Time"
+ # Values of the data which we will plot
+ valuesDF <- data.frame(dates,vals)
+ # Meta information about the data.
+ metaDF <- data.frame(workflow_id,run_id,title,xlab,ylab,var_name)
+ # Meta and Values DF created differently because they would of different
+ # number of rows. cbind would repeat metaDF(1X6) to the size of valuesDF
+ currentDF <- cbind(valuesDF,metaDF)
+ globalDF <- rbind(globalDF,currentDF)
+ }
+ ncdf4::nc_close(nc)
+ }
+ }
+ # Required to convert from factors to characters
+ # Otherwise error by ggplotly
+ globalDF$title <- as.character(globalDF$title)
+ globalDF$xlab <- as.character(globalDF$xlab)
+ globalDF$ylab <- as.character(globalDF$ylab)
+ globalDF$var_name <- as.character(globalDF$var_name)
+ return(globalDF)
+} #load_data_single_run
diff --git a/shiny/workflowPlot/helper.R b/shiny/workflowPlot/helper.R
index 9390b6d30b7..978e37e0a8a 100644
--- a/shiny/workflowPlot/helper.R
+++ b/shiny/workflowPlot/helper.R
@@ -10,5 +10,3 @@ isInstalled <- function(mypkg){
is.element(mypkg, installed.packages()[,1])
}
checkAndDownload(c('plotly','scales','dplyr'))
-# devtools::install_github('hadley/ggplot2')
-# testVal = 5
\ No newline at end of file
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index fae04a4272b..6672a607510 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -3,6 +3,7 @@ library(PEcAn.DB)
library(shiny)
library(ncdf4)
library(ggplot2)
+# Helper allows to load functions and variables that could be shared both by server.R and ui.R
source('helper.R')
library(plotly)
library(scales)
@@ -12,7 +13,7 @@ server <- shinyServer(function(input, output, session) {
bety <- betyConnect()
# Update all workflow ids
observe({
- # Ideally the get_workflow_ids function (line 137) in db/R/query.dplyr.R should take a flag to check
+ # Ideally get_workflow_ids function (line 137) in db/R/query.dplyr.R should take a flag to check
# if we want to load all workflow ids.
# get_workflow_id function from query.dplyr.R
all_ids <- get_workflow_ids(bety, session,all.ids=TRUE)
@@ -43,7 +44,7 @@ server <- shinyServer(function(input, output, session) {
updateSelectizeInput(session, "all_run_id", choices=all_run_ids())
})
return_DF_from_run_ID <- function(diff_ids){
- # Called by the function parse_ids_from_input_runID
+ # Called by function parse_ids_from_input_runID
# which is a wrapper of this function
# Returns a DF for a particular run_id
split_string <- strsplit(diff_ids,',')[[1]]
@@ -67,14 +68,14 @@ server <- shinyServer(function(input, output, session) {
# Fetches variable names from DB
# @param workflow_id and run_id
# @return List of variable names
- var_names_all <- function(workflow_id, run_id){
- # Get variables for a particular workflow and run id
- var_names <- get_var_names(bety, workflow_id, run_id)
- # Remove variables which should not be shown to the user
- removeVarNames <- c('Year','FracJulianDay')
- var_names <- var_names[!var_names %in% removeVarNames]
- return(var_names)
- }
+ # var_names_all <- function(bety,workflow_id, run_id){
+ # # Get variables for a particular workflow and run id
+ # var_names <- get_var_names(bety, workflow_id, run_id)
+ # # Remove variables which should not be shown to the user
+ # removeVarNames <- c('Year','FracJulianDay')
+ # var_names <- var_names[!var_names %in% removeVarNames]
+ # return(var_names)
+ # }
# Update variable names
observe({
req(input$all_run_id)
@@ -82,20 +83,20 @@ server <- shinyServer(function(input, output, session) {
ids_DF <- parse_ids_from_input_runID(input$all_run_id)
var_name_list <- c()
for(row_num in 1:nrow(ids_DF)){
- var_name_list <- c(var_name_list,var_names_all(ids_DF$wID[row_num],ids_DF$runID[row_num]))
+ var_name_list <- c(var_name_list,var_names_all(bety,ids_DF$wID[row_num],ids_DF$runID[row_num]))
}
updateSelectizeInput(session, "variable_name", choices=var_name_list)
})
- # Load data for a single run of the model
- # @param workflow_id and run_id
- # @return Dataframe for one run
- # For a particular combination of workflow and run id, loads
- # all variables from all files.
- load_data_single_run <- function(workflow_id,run_id){
+ # # Load data for a single run of the model
+ # # @param workflow_id and run_id
+ # # @return Dataframe for one run
+ # # For a particular combination of workflow and run id, loads
+ # # all variables from all files.
+ load_data_single_run <- function(bety,workflow_id,run_id){
globalDF <- data.frame()
workflow <- collect(workflow(bety, workflow_id))
# Use the function 'var_names_all' to get all variables
- var_names <- var_names_all(workflow_id,run_id)
+ var_names <- var_names_all(bety,workflow_id,run_id)
# Using earlier code, refactored
if(nrow(workflow) > 0) {
outputfolder <- file.path(workflow$folder, 'out', run_id)
@@ -153,7 +154,7 @@ server <- shinyServer(function(input, output, session) {
ids_DF <- parse_ids_from_input_runID(input$all_run_id)
globalDF <- data.frame()
for(row_num in 1:nrow(ids_DF)){
- globalDF <- rbind(globalDF, load_data_single_run(ids_DF$wID[row_num],ids_DF$runID[row_num]))
+ globalDF <- rbind(globalDF, load_data_single_run(bety,ids_DF$wID[row_num],ids_DF$runID[row_num]))
}
return(globalDF)
})
@@ -193,7 +194,7 @@ server <- shinyServer(function(input, output, session) {
# Not able to add icon over ggplotly
# add_icon()
})
- # Shiny server closes here
+# Shiny server closes here
})
# runApp(port=6480, launch.browser=FALSE)
diff --git a/visualization/DESCRIPTION b/visualization/DESCRIPTION
index 8cb0f6fc2f2..1fec24918f3 100644
--- a/visualization/DESCRIPTION
+++ b/visualization/DESCRIPTION
@@ -28,13 +28,15 @@ Depends:
PEcAn.DB,
RPostgreSQL,
dplyr,
- dbplyr
+ dbplyr,
+ plotly
Imports:
lubridate (>= 1.6.0),
ncdf4 (>= 1.15),
plyr (>= 1.8.4),
stringr(>= 1.1.0),
- udunits2 (>= 0.11)
+ udunits2 (>= 0.11),
+ plotly(>=4.6.0)
Suggests:
testthat (>= 1.0.2),
png,
From 17f1e88dc7ad28a50a7222e82d619c3b4eaf0303 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sat, 1 Jul 2017 21:04:19 -0500
Subject: [PATCH 062/771] Updating PR based on comments
---
db/NAMESPACE | 2 +
db/R/query.dplyr.R | 6 +--
db/man/get_workflow_ids.Rd | 2 +-
db/man/load_data_single_run.Rd | 18 +++++++++
db/man/var_names_all.Rd | 18 +++++++++
shiny/workflowPlot/server.R | 69 ----------------------------------
6 files changed, 41 insertions(+), 74 deletions(-)
create mode 100644 db/man/load_data_single_run.Rd
create mode 100644 db/man/var_names_all.Rd
diff --git a/db/NAMESPACE b/db/NAMESPACE
index 10a6ffa767b..772019202cf 100644
--- a/db/NAMESPACE
+++ b/db/NAMESPACE
@@ -30,6 +30,7 @@ export(get_run_ids)
export(get_users)
export(get_var_names)
export(get_workflow_ids)
+export(load_data_single_run)
export(ncdays2date)
export(query.base)
export(query.base.con)
@@ -45,5 +46,6 @@ export(rename.jags.columns)
export(runModule.get.trait.data)
export(runs)
export(take.samples)
+export(var_names_all)
export(workflow)
export(workflows)
diff --git a/db/R/query.dplyr.R b/db/R/query.dplyr.R
index 310df3460cd..042cb4e3bb9 100644
--- a/db/R/query.dplyr.R
+++ b/db/R/query.dplyr.R
@@ -212,8 +212,7 @@ get_var_names <- function(bety, workflow_id, run_id, remove_pool = TRUE) {
} # get_var_names
#' Get vector of variable names for a particular workflow and run ID
-#' @inheritParams dbHostInfo
-#' @inheritParams workflow
+#' @inheritParams get_var_names
#' @param run_id Run ID
#' @param workflow_id Workflow ID
#' @export
@@ -228,8 +227,7 @@ var_names_all <- function(bety, workflow_id, run_id) {
} # var_names_all
#' Load data for a single run of the model
-#' @inheritParams dbHostInfo
-#' @inheritParams workflow
+#' @inheritParams var_names_all
#' @param run_id Run ID
#' @param workflow_id Workflow ID
#' @export
diff --git a/db/man/get_workflow_ids.Rd b/db/man/get_workflow_ids.Rd
index a95c3ca6695..ed6bc572f5a 100644
--- a/db/man/get_workflow_ids.Rd
+++ b/db/man/get_workflow_ids.Rd
@@ -4,7 +4,7 @@
\alias{get_workflow_ids}
\title{Get vector of workflow IDs}
\usage{
-get_workflow_ids(bety, session)
+get_workflow_ids(bety, session, all.ids = FALSE)
}
\arguments{
\item{bety}{BETYdb connection, as opened by `betyConnect()`}
diff --git a/db/man/load_data_single_run.Rd b/db/man/load_data_single_run.Rd
new file mode 100644
index 00000000000..3205ae6445d
--- /dev/null
+++ b/db/man/load_data_single_run.Rd
@@ -0,0 +1,18 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/query.dplyr.R
+\name{load_data_single_run}
+\alias{load_data_single_run}
+\title{Load data for a single run of the model}
+\usage{
+load_data_single_run(bety, workflow_id, run_id)
+}
+\arguments{
+\item{bety}{BETYdb connection, as opened by `betyConnect()`}
+
+\item{workflow_id}{Workflow ID}
+
+\item{run_id}{Run ID}
+}
+\description{
+Load data for a single run of the model
+}
diff --git a/db/man/var_names_all.Rd b/db/man/var_names_all.Rd
new file mode 100644
index 00000000000..91bf847a53d
--- /dev/null
+++ b/db/man/var_names_all.Rd
@@ -0,0 +1,18 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/query.dplyr.R
+\name{var_names_all}
+\alias{var_names_all}
+\title{Get vector of variable names for a particular workflow and run ID}
+\usage{
+var_names_all(bety, workflow_id, run_id)
+}
+\arguments{
+\item{bety}{BETYdb connection, as opened by `betyConnect()`}
+
+\item{workflow_id}{Workflow ID}
+
+\item{run_id}{Run ID}
+}
+\description{
+Get vector of variable names for a particular workflow and run ID
+}
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 6672a607510..5c786736155 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -65,17 +65,6 @@ server <- shinyServer(function(input, output, session) {
}
return(globalDF)
}
- # Fetches variable names from DB
- # @param workflow_id and run_id
- # @return List of variable names
- # var_names_all <- function(bety,workflow_id, run_id){
- # # Get variables for a particular workflow and run id
- # var_names <- get_var_names(bety, workflow_id, run_id)
- # # Remove variables which should not be shown to the user
- # removeVarNames <- c('Year','FracJulianDay')
- # var_names <- var_names[!var_names %in% removeVarNames]
- # return(var_names)
- # }
# Update variable names
observe({
req(input$all_run_id)
@@ -87,64 +76,6 @@ server <- shinyServer(function(input, output, session) {
}
updateSelectizeInput(session, "variable_name", choices=var_name_list)
})
- # # Load data for a single run of the model
- # # @param workflow_id and run_id
- # # @return Dataframe for one run
- # # For a particular combination of workflow and run id, loads
- # # all variables from all files.
- load_data_single_run <- function(bety,workflow_id,run_id){
- globalDF <- data.frame()
- workflow <- collect(workflow(bety, workflow_id))
- # Use the function 'var_names_all' to get all variables
- var_names <- var_names_all(bety,workflow_id,run_id)
- # Using earlier code, refactored
- if(nrow(workflow) > 0) {
- outputfolder <- file.path(workflow$folder, 'out', run_id)
- files <- list.files(outputfolder, "*.nc$", full.names=TRUE)
- for(file in files) {
- nc <- nc_open(file)
- for(var_name in var_names){
- dates <- NA
- vals <- NA
- title <- var_name
- ylab <- ""
- var <- ncdf4::ncatt_get(nc, var_name)
- #sw <- if ('Swdown' %in% names(nc$var)) ncdf4::ncvar_get(nc, 'Swdown') else TRUE
- # Snow water
- sw <- TRUE
- # Check required bcoz many files dont contain title
- if(!is.null(var$long_name)){
- title <- var$long_name
- }
- # Check required bcoz many files dont contain units
- if(!is.null(var$units)){
- ylab <- var$units
- }
- x <- ncdays2date(ncdf4::ncvar_get(nc, 'time'), ncdf4::ncatt_get(nc, 'time'))
- y <- ncdf4::ncvar_get(nc, var_name)
- b <- !is.na(x) & !is.na(y) & sw != 0
- dates <- if(is.na(dates)) x[b] else c(dates, x[b])
- dates <- as.Date(dates)
- vals <- if(is.na(vals)) y[b] else c(vals, y[b])
- xlab <- "Time"
- # Values of the data which we will plot
- valuesDF <- data.frame(dates,vals)
- # Meta information about the data
- metaDF <- data.frame(workflow_id,run_id,title,xlab,ylab,var_name)
- currentDF <- cbind(valuesDF,metaDF)
- globalDF <- rbind(globalDF,currentDF)
- }
- ncdf4::nc_close(nc)
- }
- }
- # Required to convert from factors to characters
- # Otherwise error by ggplotly
- globalDF$title <- as.character(globalDF$title)
- globalDF$xlab <- as.character(globalDF$xlab)
- globalDF$ylab <- as.character(globalDF$ylab)
- globalDF$var_name <- as.character(globalDF$var_name)
- return(globalDF)
- }
# Loads data for all workflow and run ids after the load button is pressed.
# All information about a model is contained in 'all_run_id' string
# Wrapper over 'load_data_single_run'
From a3148ac588e29e965aa2c7c963d5dbb70b060ba8 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sat, 1 Jul 2017 21:22:50 -0500
Subject: [PATCH 063/771] Description error
---
visualization/DESCRIPTION | 3 +--
1 file changed, 1 insertion(+), 2 deletions(-)
diff --git a/visualization/DESCRIPTION b/visualization/DESCRIPTION
index 1fec24918f3..aac8a5fee9f 100644
--- a/visualization/DESCRIPTION
+++ b/visualization/DESCRIPTION
@@ -35,8 +35,7 @@ Imports:
ncdf4 (>= 1.15),
plyr (>= 1.8.4),
stringr(>= 1.1.0),
- udunits2 (>= 0.11),
- plotly(>=4.6.0)
+ udunits2 (>= 0.11)
Suggests:
testthat (>= 1.0.2),
png,
From 6564875cc272090809d5c54452faa2f8b853d775 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sun, 2 Jul 2017 16:31:24 -0500
Subject: [PATCH 064/771] Updating inheritParams for load_data_single_run
---
db/R/query.dplyr.R | 1 +
1 file changed, 1 insertion(+)
diff --git a/db/R/query.dplyr.R b/db/R/query.dplyr.R
index 042cb4e3bb9..7fdcb5715b6 100644
--- a/db/R/query.dplyr.R
+++ b/db/R/query.dplyr.R
@@ -228,6 +228,7 @@ var_names_all <- function(bety, workflow_id, run_id) {
#' Load data for a single run of the model
#' @inheritParams var_names_all
+#' @inheritParams workflow
#' @param run_id Run ID
#' @param workflow_id Workflow ID
#' @export
From 9dd65e9b37cdbee84ab3e9ce8331856dea46e002 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Mon, 3 Jul 2017 19:42:58 -0500
Subject: [PATCH 065/771] Allow toggle for chart type. observeEvent while
loading variables. Commenting source helper.R
---
shiny/workflowPlot/helper.R | 2 +-
shiny/workflowPlot/server.R | 23 ++++++++++++++++-------
shiny/workflowPlot/ui.R | 6 ++++--
3 files changed, 21 insertions(+), 10 deletions(-)
diff --git a/shiny/workflowPlot/helper.R b/shiny/workflowPlot/helper.R
index 978e37e0a8a..0af0c05448f 100644
--- a/shiny/workflowPlot/helper.R
+++ b/shiny/workflowPlot/helper.R
@@ -9,4 +9,4 @@ checkAndDownload<-function(packageNames) {
isInstalled <- function(mypkg){
is.element(mypkg, installed.packages()[,1])
}
-checkAndDownload(c('plotly','scales','dplyr'))
+# checkAndDownload(c('plotly','scales','dplyr'))
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 5c786736155..6caec434719 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -4,7 +4,7 @@ library(shiny)
library(ncdf4)
library(ggplot2)
# Helper allows to load functions and variables that could be shared both by server.R and ui.R
-source('helper.R')
+# source('helper.R')
library(plotly)
library(scales)
library(dplyr)
@@ -65,8 +65,8 @@ server <- shinyServer(function(input, output, session) {
}
return(globalDF)
}
- # Update variable names
- observe({
+ # Update variable names observeEvent on input$load
+ observeEvent(input$load,{
req(input$all_run_id)
# All information about a model is contained in 'all_run_id' string
ids_DF <- parse_ids_from_input_runID(input$all_run_id)
@@ -95,7 +95,7 @@ server <- shinyServer(function(input, output, session) {
validate(
need(input$all_workflow_id, 'Select workflow id'),
need(input$all_run_id, 'Select Run id'),
- need(input$variable_name, 'Click the button to load data')
+ need(input$variable_name, 'Click the button to load data. Please allow some time')
)
# Load data
masterDF <- loadNewData()
@@ -112,12 +112,21 @@ server <- shinyServer(function(input, output, session) {
ylab <- unique(df$ylab)
# ggplot function for now scatter plots.
# TODO Shubham allow line plots as well
- plt <- ggplot(df, aes(x=dates, y=vals, color=run_id)) +
- geom_point() +
+ plt <- ggplot(df, aes(x=dates, y=vals, color=run_id))
+ # Toggle chart type using switch
+ switch(input$plotType,
+ "scatterPlot" = {
+ plt <- plt + geom_point()
+ },
+ "lineChart" = {
+ plt <- plt + geom_line()
+ }
+ )
+ # geom_point() +
# Earlier smoothing and y labels
# geom_smooth(aes(fill = "Spline fit")) +
# scale_y_continuous(labels=fancy_scientific) +
- labs(title=title, x=xlab, y=ylab)
+ plt <- plt + labs(title=title, x=xlab, y=ylab)
# Earlier color and fill values
# scale_color_manual(name = "", values = "black") +
# scale_fill_manual(name = "", values = "grey50")
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index a2c7673474a..2c00dfb8e20 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -1,5 +1,6 @@
library(shiny)
-source('helper.R')
+# Helper allows to load functions and variables that could be shared both by server.R and ui.R
+# source('helper.R')
# Define UI
ui <- shinyUI(fluidPage(
# Application title
@@ -12,7 +13,8 @@ ui <- shinyUI(fluidPage(
p("Please select the run IDs. You can select multiple IDs"),
selectizeInput("all_run_id", "Mutliple Run IDs", c(),multiple=TRUE),
actionButton("load", "Load Model outputs"),
- selectInput("variable_name", "Variable Name", "")
+ selectInput("variable_name", "Variable Name", ""),
+ radioButtons("plotType", "Plot Type", c("Scatter Plot" = "scatterPlot", "Line Chart" = "lineChart"), selected="scatterPlot")
),
mainPanel(
plotlyOutput("outputPlot")
From 4b8c656bd67c6c97325b5a966f72d22fc09200d2 Mon Sep 17 00:00:00 2001
From: Alexey Shiklomanov
Date: Wed, 5 Jul 2017 10:33:19 -0400
Subject: [PATCH 066/771] Check package dependencies in `make document`
Resolves #1502.
---
Makefile | 4 +++-
1 file changed, 3 insertions(+), 1 deletion(-)
diff --git a/Makefile b/Makefile
index 325e7cbdd65..ea381f752b9 100644
--- a/Makefile
+++ b/Makefile
@@ -36,7 +36,7 @@ ALL_PKGS_D := $(BASE_D) $(MODELS_D) $(MODULES_D) .doc/models/template
.PHONY: all install check test document
-all: install
+all: document install
document: .doc/all
install: .install/all
@@ -85,6 +85,7 @@ clean:
mkdir -p $(@D)
echo `date` > $@
+depends_R_pkg = Rscript -e "devtools::install_dev_deps('$(strip $(1))', Ncpus = ${NCPUS});"
install_R_pkg = Rscript -e "devtools::install('$(strip $(1))', Ncpus = ${NCPUS});"
check_R_pkg = Rscript scripts/check_with_errors.R $(strip $(1))
test_R_pkg = Rscript -e "devtools::test('"$(strip $(1))"', reporter = 'stop')"
@@ -94,6 +95,7 @@ $(ALL_PKGS_I) $(ALL_PKGS_C) $(ALL_PKGS_T) $(ALL_PKGS_D): .install/devtools .inst
.SECONDEXPANSION:
.doc/%: $$(wildcard %/**/*) $$(wildcard %/*)
+ $(call depends_R_pkg, $(subst .doc/,,$@))
$(call doc_R_pkg, $(subst .doc/,,$@))
mkdir -p $(@D)
echo `date` > $@
From a7c0077c4eb591cd738e1b2a42a4eaf66780e4a8 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Wed, 5 Jul 2017 21:36:33 -0500
Subject: [PATCH 067/771] UI for loading external data
---
shiny/workflowPlot/server.R | 11 +++++++++--
shiny/workflowPlot/ui.R | 22 ++++++++++++++++++++--
2 files changed, 29 insertions(+), 4 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 6caec434719..081a505da81 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -4,7 +4,7 @@ library(shiny)
library(ncdf4)
library(ggplot2)
# Helper allows to load functions and variables that could be shared both by server.R and ui.R
-# source('helper.R')
+source('helper.R')
library(plotly)
library(scales)
library(dplyr)
@@ -89,7 +89,14 @@ server <- shinyServer(function(input, output, session) {
}
return(globalDF)
})
- # Renders the ggplotly
+ loadExternalData <-eventReactive(input$load_data,{
+ inFile <- input$file1
+ if (is.null(inFile))
+ return(data.frame())
+ read.csv(inFile$datapath, header=input$header, sep=input$sep,
+ quote=input$quote)
+ })
+ # Renders ggplotly
output$outputPlot <- renderPlotly({
# Error messages
validate(
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index 2c00dfb8e20..a1204eef7e6 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -1,6 +1,6 @@
library(shiny)
# Helper allows to load functions and variables that could be shared both by server.R and ui.R
-# source('helper.R')
+source('helper.R')
# Define UI
ui <- shinyUI(fluidPage(
# Application title
@@ -14,7 +14,25 @@ ui <- shinyUI(fluidPage(
selectizeInput("all_run_id", "Mutliple Run IDs", c(),multiple=TRUE),
actionButton("load", "Load Model outputs"),
selectInput("variable_name", "Variable Name", ""),
- radioButtons("plotType", "Plot Type", c("Scatter Plot" = "scatterPlot", "Line Chart" = "lineChart"), selected="scatterPlot")
+ radioButtons("plotType", "Plot Type", c("Scatter Plot" = "scatterPlot", "Line Chart" = "lineChart"), selected="scatterPlot"),
+ tags$hr(),
+ tags$hr(),
+ fileInput('file1', 'Choose CSV File to upload data',
+ accept=c('text/csv',
+ 'text/comma-separated-values,text/plain',
+ '.csv')),
+ checkboxInput('header', 'Header', TRUE),
+ radioButtons('sep', 'Separator',
+ c(Comma=',',
+ Semicolon=';',
+ Tab='\t'),
+ ','),
+ radioButtons('quote', 'Quote',
+ c(None='',
+ 'Double Quote'='"',
+ 'Single Quote'="'"),
+ '"'),
+ actionButton("load_data", "Load External Data")
),
mainPanel(
plotlyOutput("outputPlot")
From cb13cedcabc879638922a9fc63b34657d724b742 Mon Sep 17 00:00:00 2001
From: Betsy Cowdery
Date: Fri, 7 Jul 2017 14:07:46 -0600
Subject: [PATCH 068/771] Changing plot.photo to plot_photo
---
documentation/tutorials/MCMC/MCMC_Concepts.Rmd | 2 +-
modules/photosynthesis/R/plots.R | 8 ++++----
modules/photosynthesis/code/test.fitA.R | 2 +-
modules/photosynthesis/vignettes/ResponseCurves.Rmd | 6 +++---
4 files changed, 9 insertions(+), 9 deletions(-)
diff --git a/documentation/tutorials/MCMC/MCMC_Concepts.Rmd b/documentation/tutorials/MCMC/MCMC_Concepts.Rmd
index fb1e8205098..9be8bb07abc 100644
--- a/documentation/tutorials/MCMC/MCMC_Concepts.Rmd
+++ b/documentation/tutorials/MCMC/MCMC_Concepts.Rmd
@@ -173,7 +173,7 @@ In the final set of plots we look at the actual A-Ci and A-Q curves themselves.
```{r}
## Response curve
-plot.photo(dat,fit)
+plot_photo(dat,fit)
```
Note: on the last figure you will get warnings about "No ACi" and "No AQ" which can be ignored. These are occuring because the file that had the ACi curve didn't have an AQ curve, and the file that had the AQ curve didn't have an ACi curve.
diff --git a/modules/photosynthesis/R/plots.R b/modules/photosynthesis/R/plots.R
index d15e0a7d824..fb28a43fd81 100644
--- a/modules/photosynthesis/R/plots.R
+++ b/modules/photosynthesis/R/plots.R
@@ -7,11 +7,11 @@ ciEnvelope <- function(x, ylo, yhi, col = "lightgrey", ...) {
col = col, border = NA, ...))
} # ciEnvelope
-##' @name plot.photo
-##' @title plot.photo
+##' @name plot_photo
+##' @title plot_photo
##' @author Mike Dietze
##' @export
-plot.photo <- function(data, out, curve = c("ACi", "AQ"), tol = 0.05, byLeaf = TRUE) {
+plot_photo <- function(data, out, curve = c("ACi", "AQ"), tol = 0.05, byLeaf = TRUE) {
params <- as.matrix(out$params)
predict <- as.matrix(out$predict)
@@ -97,4 +97,4 @@ plot.photo <- function(data, out, curve = c("ACi", "AQ"), tol = 0.05, byLeaf = T
}
} ## end A-Q
} ## end loop over curves
-} # plot.photo
+} # plot_photo
diff --git a/modules/photosynthesis/code/test.fitA.R b/modules/photosynthesis/code/test.fitA.R
index d1e471376d1..ead28d7753d 100644
--- a/modules/photosynthesis/code/test.fitA.R
+++ b/modules/photosynthesis/code/test.fitA.R
@@ -49,5 +49,5 @@ summary(fit$params) ## parameter estimates
abline(0,1,col=2,lwd=2)
-plot.photo(dat,fit)
+plot_photo(dat,fit)
\ No newline at end of file
diff --git a/modules/photosynthesis/vignettes/ResponseCurves.Rmd b/modules/photosynthesis/vignettes/ResponseCurves.Rmd
index 50686ba7418..a1a6d9717ef 100644
--- a/modules/photosynthesis/vignettes/ResponseCurves.Rmd
+++ b/modules/photosynthesis/vignettes/ResponseCurves.Rmd
@@ -159,7 +159,7 @@ plot(pmean, dat$Photo, pch = "+", xlab = "Predicted A", ylab = "Observed A")
abline(0, 1, col = 2, lwd = 2)
##
-plot.photo(dat, fit)
+plot_photo(dat, fit)
```
@@ -199,7 +199,7 @@ plot(pmean, dat$Photo, pch = "+", xlab = "Predicted A", ylab = "Observed A")
abline(0,1,col=2,lwd=2)
##
-plot.photo(dat,fitI)
+plot_photo(dat,fitI)
```
@@ -248,7 +248,7 @@ plot(pmean, dat$Photo, pch = "+", xlab = "Predicted A", ylab = "Observed A")
abline(0,1,col=2,lwd=2)
##
-plot.photo(dat, fitC)
+plot_photo(dat, fitC)
```
From 69ae000c722aa4e533d4c41d5a5a3c2868520f96 Mon Sep 17 00:00:00 2001
From: Betsy Cowdery
Date: Fri, 7 Jul 2017 14:11:59 -0600
Subject: [PATCH 069/771] Changin read.Licor to read_Licor
---
documentation/tutorials/MCMC/MCMC_Concepts.Rmd | 2 +-
modules/photosynthesis/R/fitA.R | 8 ++++----
modules/photosynthesis/code/test.fitA.R | 2 +-
3 files changed, 6 insertions(+), 6 deletions(-)
diff --git a/documentation/tutorials/MCMC/MCMC_Concepts.Rmd b/documentation/tutorials/MCMC/MCMC_Concepts.Rmd
index 9be8bb07abc..5036c41b8d6 100644
--- a/documentation/tutorials/MCMC/MCMC_Concepts.Rmd
+++ b/documentation/tutorials/MCMC/MCMC_Concepts.Rmd
@@ -73,7 +73,7 @@ library(PEcAn.photosynthesis)
### Load built in data
filenames <- system.file("extdata", paste0("flux-course-3",c("aci","aq")), package = "PEcAn.photosynthesis")
-dat<-do.call("rbind", lapply(filenames, read.Licor))
+dat<-do.call("rbind", lapply(filenames, read_Licor))
## Simple plots
aci = as.character(dat$fname) == basename(filenames[1])
diff --git a/modules/photosynthesis/R/fitA.R b/modules/photosynthesis/R/fitA.R
index a93d58f5a26..35aee63f879 100644
--- a/modules/photosynthesis/R/fitA.R
+++ b/modules/photosynthesis/R/fitA.R
@@ -265,8 +265,8 @@ return(out)
} # fitA
-##' @name read.Licor
-##' @title read.Licor
+##' @name read_Licor
+##' @title read_Licor
##'
##' @author Mike Dietze
##' @export
@@ -274,7 +274,7 @@ return(out)
##' @param filename name of the file to read
##' @param sep file delimiter. defaults to tab
##' @param ... optional arguements forwarded to read.table
-read.Licor <- function(filename, sep = "\t", ...) {
+read_Licor <- function(filename, sep = "\t", ...) {
fbase <- sub(".txt", "", tail(unlist(strsplit(filename, "/")), n = 1))
print(fbase)
full <- readLines(filename)
@@ -290,7 +290,7 @@ read.Licor <- function(filename, sep = "\t", ...) {
fname <- rep(fbase, nrow(dat))
dat <- as.data.frame(cbind(fname, dat))
return(dat)
-} # read.Licor
+} # read_Licor
mat2mcmc.list <- function(w) {
diff --git a/modules/photosynthesis/code/test.fitA.R b/modules/photosynthesis/code/test.fitA.R
index ead28d7753d..c08930b538a 100644
--- a/modules/photosynthesis/code/test.fitA.R
+++ b/modules/photosynthesis/code/test.fitA.R
@@ -9,7 +9,7 @@
## Read Photosynthetic gas exchange data
filenames <- list.files(in.folder,pattern=pattern, full.names=TRUE)
- master = lapply(filenames, read.Licor)
+ master = lapply(filenames, read_Licor)
save(master,file="master.RData")
## run QA/QC checks
From 7bdf26959a696fe134217d1baa93bb70bbfb6fe4 Mon Sep 17 00:00:00 2001
From: Betsy Cowdery
Date: Fri, 7 Jul 2017 14:13:47 -0600
Subject: [PATCH 070/771] Changing Licor.QC to Licor_QC
---
modules/photosynthesis/R/Licor.QC.R | 8 ++++----
modules/photosynthesis/code/test.fitA.R | 2 +-
modules/photosynthesis/vignettes/ResponseCurves.Rmd | 6 +++---
3 files changed, 8 insertions(+), 8 deletions(-)
diff --git a/modules/photosynthesis/R/Licor.QC.R b/modules/photosynthesis/R/Licor.QC.R
index 7f02374d41d..a81995729d5 100644
--- a/modules/photosynthesis/R/Licor.QC.R
+++ b/modules/photosynthesis/R/Licor.QC.R
@@ -1,11 +1,11 @@
-##' @name Licor.QC
-##' @title Licor.QC
+##' @name Licor_QC
+##' @title Licor_QC
##' @author Mike Dietze
##' @export
##' @param dat data frame
##' @param curve Whether to do Quality Control by examining the 'ACi' curve, the 'AQ' curve, or both
##' @param tol Code automatically tries to separate ACi and AQ curves in the same dataset by detecting the 'reference' condition for light and CO2 respectively. This is the relative error around the mode in that detection.
-Licor.QC <- function(dat, curve = c("ACi", "AQ"), tol = 0.05) {
+Licor_QC <- function(dat, curve = c("ACi", "AQ"), tol = 0.05) {
if (!("QC" %in% names(dat))) {
dat$QC <- rep(0, nrow(dat))
@@ -112,7 +112,7 @@ Licor.QC <- function(dat, curve = c("ACi", "AQ"), tol = 0.05) {
}
}
return(invisible(dat))
-} # Licor.QC
+} # Licor_QC
##' @name estimate_mode
##' @title estimate_mode
diff --git a/modules/photosynthesis/code/test.fitA.R b/modules/photosynthesis/code/test.fitA.R
index c08930b538a..6b08f901b45 100644
--- a/modules/photosynthesis/code/test.fitA.R
+++ b/modules/photosynthesis/code/test.fitA.R
@@ -14,7 +14,7 @@
## run QA/QC checks
for(i in 1:length(master)){
- master[[i]] = Licor.QC(master[[i]])
+ master[[i]] = Licor_QC(master[[i]])
save(master,file="master.RData")
}
diff --git a/modules/photosynthesis/vignettes/ResponseCurves.Rmd b/modules/photosynthesis/vignettes/ResponseCurves.Rmd
index a1a6d9717ef..ee3d227c8d9 100644
--- a/modules/photosynthesis/vignettes/ResponseCurves.Rmd
+++ b/modules/photosynthesis/vignettes/ResponseCurves.Rmd
@@ -70,7 +70,7 @@ The code below performs a set of interactive QA/QC checks on the LI-COR data tha
If you want to get a feel for how the code works you'll want to run it first on just one file, rather than looping over all the files
```
-master[[1]] <- Licor.QC(master[[1]])
+master[[1]] <- Licor_QC(master[[1]])
```
On the first screen you will be shown an A-Ci curve. Click on points that are outliers that you want to remove. Be aware that these points will not change color in **THIS SCREEN**, but will be updated in the next. Also be aware that if your data set is not an A-Ci curve (or contains both A-Ci and A-Q curves) there are points that may look like outliers just because they are data from the other curve. When you are done selecting outliers, click **[esc]** to move to the next screen.
@@ -81,7 +81,7 @@ The third and fourth screens are the equivalent plots for the A-Q (light respons
Finally, this function returns a copy of the original data with a new column, "QC", added. This column will flag all passed values with 1, all unchecked values with 0, and all failed values with -1.
-The function Licor.QC has an optional arguement, _curve_, which can be set to either "ACi" or "AQ" if you only want to perform one of these diagnostics rather than both (which is the default).
+The function Licor_QC has an optional arguement, _curve_, which can be set to either "ACi" or "AQ" if you only want to perform one of these diagnostics rather than both (which is the default).
Also, the QC code attempts to automatically separate which points are part of the ACi curve from which parts are part of the AQ curve, based on how close points are to the the variable which is supposed to be held constant. The optional variable "tol" controls the tolerance of this filter, and is expressed as a proportion of the fixed value. The default value, 0.05, corresponds to a 5% deviation. For example, in the ACi curve the light level should be held constant so the code filters the PARi variable to find the mode and then included any data that's within 5% of the mode in the ACi curve.
@@ -89,7 +89,7 @@ Once you have a feel for the QA/QC function, you'll want to run it for all the d
```
for(i in 1:length(master)){
- master[[i]] = Licor.QC(master[[i]])
+ master[[i]] = Licor_QC(master[[i]])
}
```
From 7f207a61facb55342e3e324af9cba3a318f4abd4 Mon Sep 17 00:00:00 2001
From: LiamBurke24
Date: Fri, 7 Jul 2017 16:14:47 -0400
Subject: [PATCH 071/771] First draft of 4 query and download utils for
handling data from the dataONE federation
---
modules/data.land/R/DataONE_doi_download.R | 114 +++++++++++++++++++++
1 file changed, 114 insertions(+)
create mode 100644 modules/data.land/R/DataONE_doi_download.R
diff --git a/modules/data.land/R/DataONE_doi_download.R b/modules/data.land/R/DataONE_doi_download.R
new file mode 100644
index 00000000000..a6832cc39c9
--- /dev/null
+++ b/modules/data.land/R/DataONE_doi_download.R
@@ -0,0 +1,114 @@
+##' Functions to determine if data can be found by doi in R
+##' Author: Liam Burke
+##' Code draws heavily on dataone r package for communication with the dataONE federation
+
+#--------------------------------------------------------------------------------#
+# 1. format.identifier -- convert doi or id into solrQuery format #
+#--------------------------------------------------------------------------------#
+
+format.identifier = function(id){
+ doi.template <- 'id:"_"' # solr format
+ doi1 <<- base::gsub("_", id, doi.template) # replace "_" with the doi or id and store in global environment
+ return(doi1)
+} # end function
+
+#--------------------------------------------------------------------------------#
+# 2. id.resolveable -- Is doi/ id available in dataONE? #
+#--------------------------------------------------------------------------------#
+
+##' Arguments
+#' id: doi or dataone id
+#' CNode: usually "PROD"
+#' return_result: boolean that returns or suppresses result of query
+
+id.resolveable = function(id, CNode, return_result){
+ format.identifier(id) # reformat the id in solr format
+
+ cn <- dataone::CNode("PROD")
+ queryParams <- list(q=doi1, rows="5")
+ result <- dataone::query(cn, solrQuery = queryParams, as = "data.frame") # return query results as a data.frame
+
+ if(return_result == TRUE){ # option that displays data.frame of query
+ print(result)
+
+ if(is.null(result[1,1])){ # if there is no data available, result[1,1] will return a NULL value
+ return("doi does not resolve in the DataOne federation and therefore cannot be retrieved by doi.
+ Either download this data locally and import using PEcAn's drag and drop feature, or search DataOne manually for another data identifier. Thank you for your patience.")
+ } else{
+ return("data can be found in D1 federation")
+ }
+ } else{ # option that does not display data.frame of query (return_result == FALSE)
+ if(is.null(result[1,1])){
+ return("doi does not resolve in the DataOne federation and therefore cannot be retrieved by doi.
+ Either download this data locally and import using PEcAn's drag and drop feature, or search DataOne manually for another data identifier (e.g. pid or resource_map) Thank you for your patience.")
+ } else{
+ return("data can be found in D1 federation")
+ }
+ }
+
+} # end function
+
+#--------------------------------------------------------------------------------#
+# Get resource_map from doi #
+#--------------------------------------------------------------------------------#
+
+##' Arguments:
+#' id: doi or dataone id
+#' CNode: usually "PROD"
+
+get.resource.map = function(id, CNode){
+ cn <- dataone::CNode("PROD")
+ locations <- dataone::resolve(cn, pid = id)
+ mnId <<- locations$data[1,"nodeIdentifier"] # store mnId in global environment
+ mn <<- dataone::getMNode(cn, mnId) # store mn in global environment
+
+ format.identifier(id) # format the identifier in solr Query format
+ queryParamList <- list(q=doi1, fl="resourceMap") # custom query for the resourceMap
+ resource_map_df <- dataone::query(cn, solrQuery = queryParamList, as="data.frame")
+ resource_map <<- resource_map_df[1,1] # store resource map in global env. resource map is always in resource_map_df[1,1]
+
+ if (is.null(resource_map_df[1,1])){ # inform user if id/ doi has a corresponding resource_map or if this needs to be found manually
+ print("doi does not resolve a resource_map. Please manually search for the resource_map in DataONE search: https://search.dataone.org/#data")
+ } else{
+ print("Continue to next phase to complete download")
+ return(resource_map)
+ }
+} # end function
+
+#--------------------------------------------------------------------------------#
+# download package using resource_map #
+#--------------------------------------------------------------------------------#
+
+### Arguments:
+#' resource_map: can be entered manually or can be called from the get.resource.map fn result
+#' CNode: usually "PROD"
+#' download_format: format of download defaulted to "application/bagit-097" -- other possible formats unknown
+#' overwrite_directory: boolean
+#' directory: indicates the destination directory for the BagItFile
+
+
+
+download.package.rm = function(resource_map, CNode, download_format = "application/bagit-097",
+ overwrite_directory = TRUE, directory){
+ # Finding the mnId (query)
+ cn <- dataone::CNode("PROD")
+ locations <- dataone::resolve(cn, pid = resource_map)
+ mnId <<- locations$data[1,"nodeIdentifier"]
+
+ # download the bagitFile
+ mn <<- dataone::getMNode(cn, mnId)
+ bagitFile <<- dataone::getPackage(mn, id = resource_map, format = download_format)
+ bagitFile
+
+
+ zip_contents <<- utils::unzip(bagitFile, files = NULL, list = TRUE, overwrite = TRUE, # list files in bagitFile
+ junkpaths = FALSE, exdir = "downloads", unzip = "internal",
+ setTimes = FALSE)
+
+ utils::unzip(bagitFile, files = NULL, list = FALSE, overwrite = overwrite_directory, # Unzip the bagitFile and store in directory specified under exdir
+ junkpaths = FALSE, exdir = directory, unzip = "internal",
+ setTimes = FALSE)
+ return(zip_contents)
+} # end function
+
+
From df489fa798232055594e81c2c640a9740838948f Mon Sep 17 00:00:00 2001
From: Betsy Cowdery
Date: Fri, 7 Jul 2017 14:14:32 -0600
Subject: [PATCH 072/771] Updating Documentation
---
modules/photosynthesis/NAMESPACE | 6 +++---
.../photosynthesis/man/{Licor.QC.Rd => Licor_QC.Rd} | 8 ++++----
modules/photosynthesis/man/plot.photo.Rd | 12 ------------
modules/photosynthesis/man/plot_photo.Rd | 11 +++++++++++
.../man/{read.Licor.Rd => read_Licor.Rd} | 8 ++++----
modules/photosynthesis/vignettes/ResponseCurves.Rmd | 2 +-
6 files changed, 23 insertions(+), 24 deletions(-)
rename modules/photosynthesis/man/{Licor.QC.Rd => Licor_QC.Rd} (82%)
delete mode 100644 modules/photosynthesis/man/plot.photo.Rd
create mode 100644 modules/photosynthesis/man/plot_photo.Rd
rename modules/photosynthesis/man/{read.Licor.Rd => read_Licor.Rd} (74%)
diff --git a/modules/photosynthesis/NAMESPACE b/modules/photosynthesis/NAMESPACE
index 705dd31e6fa..b9b3e4fc580 100644
--- a/modules/photosynthesis/NAMESPACE
+++ b/modules/photosynthesis/NAMESPACE
@@ -1,8 +1,8 @@
# Generated by roxygen2: do not edit by hand
-S3method(plot,photo)
-export(Licor.QC)
+export(Licor_QC)
export(ciEnvelope)
export(estimate_mode)
export(fitA)
-export(read.Licor)
+export(plot_photo)
+export(read_Licor)
diff --git a/modules/photosynthesis/man/Licor.QC.Rd b/modules/photosynthesis/man/Licor_QC.Rd
similarity index 82%
rename from modules/photosynthesis/man/Licor.QC.Rd
rename to modules/photosynthesis/man/Licor_QC.Rd
index c0c3ed702d8..ff09f0e9e2a 100644
--- a/modules/photosynthesis/man/Licor.QC.Rd
+++ b/modules/photosynthesis/man/Licor_QC.Rd
@@ -1,10 +1,10 @@
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/Licor.QC.R
-\name{Licor.QC}
-\alias{Licor.QC}
-\title{Licor.QC}
+\name{Licor_QC}
+\alias{Licor_QC}
+\title{Licor_QC}
\usage{
-Licor.QC(dat, curve = c("ACi", "AQ"), tol = 0.05)
+Licor_QC(dat, curve = c("ACi", "AQ"), tol = 0.05)
}
\arguments{
\item{dat}{data frame}
diff --git a/modules/photosynthesis/man/plot.photo.Rd b/modules/photosynthesis/man/plot.photo.Rd
deleted file mode 100644
index 84b4257d2fd..00000000000
--- a/modules/photosynthesis/man/plot.photo.Rd
+++ /dev/null
@@ -1,12 +0,0 @@
-% Generated by roxygen2: do not edit by hand
-% Please edit documentation in R/plots.R
-\name{plot.photo}
-\alias{plot.photo}
-\title{plot.photo}
-\usage{
-\method{plot}{photo}(data, out, curve = c("ACi", "AQ"), tol = 0.05,
- byLeaf = TRUE)
-}
-\author{
-Mike Dietze
-}
diff --git a/modules/photosynthesis/man/plot_photo.Rd b/modules/photosynthesis/man/plot_photo.Rd
new file mode 100644
index 00000000000..b9b8174680d
--- /dev/null
+++ b/modules/photosynthesis/man/plot_photo.Rd
@@ -0,0 +1,11 @@
+% Generated by roxygen2: do not edit by hand
+% Please edit documentation in R/plots.R
+\name{plot_photo}
+\alias{plot_photo}
+\title{plot_photo}
+\usage{
+plot_photo(data, out, curve = c("ACi", "AQ"), tol = 0.05, byLeaf = TRUE)
+}
+\author{
+Mike Dietze
+}
diff --git a/modules/photosynthesis/man/read.Licor.Rd b/modules/photosynthesis/man/read_Licor.Rd
similarity index 74%
rename from modules/photosynthesis/man/read.Licor.Rd
rename to modules/photosynthesis/man/read_Licor.Rd
index 4f0644654e1..a08c89385d9 100644
--- a/modules/photosynthesis/man/read.Licor.Rd
+++ b/modules/photosynthesis/man/read_Licor.Rd
@@ -1,10 +1,10 @@
% Generated by roxygen2: do not edit by hand
% Please edit documentation in R/fitA.R
-\name{read.Licor}
-\alias{read.Licor}
-\title{read.Licor}
+\name{read_Licor}
+\alias{read_Licor}
+\title{read_Licor}
\usage{
-read.Licor(filename, sep = "\\t", ...)
+read_Licor(filename, sep = "\\t", ...)
}
\arguments{
\item{filename}{name of the file to read}
diff --git a/modules/photosynthesis/vignettes/ResponseCurves.Rmd b/modules/photosynthesis/vignettes/ResponseCurves.Rmd
index ee3d227c8d9..6f6b1e4a51f 100644
--- a/modules/photosynthesis/vignettes/ResponseCurves.Rmd
+++ b/modules/photosynthesis/vignettes/ResponseCurves.Rmd
@@ -59,7 +59,7 @@ library(PEcAn.photosynthesis)
filenames <- system.file("extdata", paste0("flux-course-",rep(1:6,each=2),c("aci","aq")), package = "PEcAn.photosynthesis")
## Load files to a list
-master = lapply(filenames, read.Licor)
+master = lapply(filenames, read_Licor)
```
From 9b2816808273e45d99daa1fd527fc4ffe1988edd Mon Sep 17 00:00:00 2001
From: Andria Dawson
Date: Fri, 7 Jul 2017 23:08:10 -0600
Subject: [PATCH 073/771] Generalizing allom.predict to accept a list
---
modules/allometry/R/allom.predict.R | 62 +++++++++++++++++++++++------
1 file changed, 49 insertions(+), 13 deletions(-)
diff --git a/modules/allometry/R/allom.predict.R b/modules/allometry/R/allom.predict.R
index 207f1856296..9ec003eb20d 100644
--- a/modules/allometry/R/allom.predict.R
+++ b/modules/allometry/R/allom.predict.R
@@ -82,6 +82,7 @@ allom.predict <- function(object, dbh, pft = NULL, component = NULL, n = NULL, u
return(NA)
}
+
## build PFT x Component table and convert mcmclist objects to mcmc
pftByComp <- matrix(NA, npft, ncomp)
for (i in seq_len(npft)) {
@@ -180,27 +181,62 @@ allom.predict <- function(object, dbh, pft = NULL, component = NULL, n = NULL, u
names(params) <- names(object)
### perform actual allometric calculation
- out <- matrix(NA, n, length(dbh))
+ if (is(dbh, "list")) {
+ out <- list(length(dbh))
+ } else {
+ out <- matrix(NA, n, length(dbh))
+ }
for (p in unique(pft)) {
sel <- which(pft == p)
- a <- params[[p]][, 1]
- b <- params[[p]][, 2]
+ a <- params[[p]][,1]
+ b <- params[[p]][,2]
if (ncol(params[[p]]) > 2) {
- s <- sqrt(params[[p]][, 3]) ## sigma was originally calculated as a variance, so convert to std dev
+ s <- sqrt(params[[p]][,3]) ## sigma was originally calculated as a variance, so convert to std dev
} else {
s <- 0
}
- for (i in sel) {
- out[, i] <- exp(rnorm(n, a + b * log(dbh[i]), s))
- }
-
- # for a dbh time-series for a single tree, fix error for each draw
- if (single.tree == TRUE) {
- epsilon <- rnorm(n, 0, s)
- for (i in seq_len(n)) {
- out[i, ] <- exp(a[i] + b[i] * log(dbh) + epsilon[i])
+
+ if (is(dbh, "list")) {
+ for (j in 1:length(sel)) {
+ if ((is(dbh[[sel[j]]], "numeric")) & (all(is.na(dbh[[sel[j]]])))) {
+ out[[sel[j]]] <- array(NA, c(n,1,length(dbh[[sel[j]]])))
+ out[[sel[j]]][,,] <- NA
+ next
+ } else if (is(dbh[[sel[j]]], "numeric")) {
+ ntrees <- 1
+ nyears <- length(dbh[[sel[j]]])
+ } else {
+ ntrees <- nrow(dbh[[sel[j]]])
+ nyears <- ncol(dbh[[sel[j]]])
+ }
+
+ out[[sel[j]]] <- array(NA, c(n,ntrees,nyears))
+
+ for (k in 1:ntrees) {
+ epsilon <- rnorm(n, 0, s) # don't fix this for a single tree; fix for a single iteration for a single site across all trees
+ if (is(dbh[[sel[j]]], "numeric")) {
+ dbh_sel_k <- dbh[[sel[j]]]
+ } else {
+ dbh_sel_k <- dbh[[sel[j]]][k,]
+ }
+
+ log_x <- sapply(dbh_sel_k, function(x) if(is.na(x)|(x<=0)){return(NA)}else{log(x)})
+ out[[sel[j]]][,k,] <- sapply(log_x, function(x) if(is.na(x)){rep(NA, n)}else{exp(a+b*x + epsilon)})
+ }
+ }
+ } else if (single.tree == TRUE) {
+ # for a dbh time-series for a single tree, fix error for each draw
+ epsilon = rnorm(n, 0, s)
+ for (i in 1:n) {
+ out[i,] <- exp(a[i]+b[i]*log(dbh) + epsilon[i])
+ }
+ } else {
+ # for a dbh time-series for different trees, error not fixed across draws
+ for (i in sel) {
+ out[,i] <- exp(rnorm(n, a+b*log(dbh[i]),s))
}
}
+
}
return(out)
From 951f43554cb60d38ad7f614c5bcaaf07d31797ab Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sun, 9 Jul 2017 14:28:10 +0530
Subject: [PATCH 074/771] Typo fixes
---
Dockerfile | 2 +-
docker/bin/my_init | 4 ++--
2 files changed, 3 insertions(+), 3 deletions(-)
diff --git a/Dockerfile b/Dockerfile
index 28c7ece0548..d4829853229 100644
--- a/Dockerfile
+++ b/Dockerfile
@@ -17,7 +17,7 @@ RUN /build/system_services.sh
# run update machine to update machine
RUN /build/update_machine.sh
-# run inatall packages to install required packages
+# run install packages to install required packages
RUN /build/install_packages.sh
# run install R to install R packages
diff --git a/docker/bin/my_init b/docker/bin/my_init
index af87c28c78b..ccf79a7711b 100644
--- a/docker/bin/my_init
+++ b/docker/bin/my_init
@@ -1,4 +1,4 @@
-#! /bin/bash
+#!/bin/bash
export LC_ALL=C TERM="xterm"
trap 'shutdown_runit_services' INT TERM
@@ -88,7 +88,7 @@ run_startup_files() {
}
-# function to start corn jobs
+# function to start cron jobs
start_runit () {
echo "Booting runit daemon..."
/usr/bin/runsvdir -P /etc/service 'log:.........................................................................................................' &
From 9696ce7648ce0883b34a880dc1613b4d892fc388 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sun, 9 Jul 2017 08:58:49 -0500
Subject: [PATCH 075/771] Experimenting with loading external data
---
shiny/workflowPlot/helper.R | 5 ++++-
shiny/workflowPlot/server.R | 19 +++++++++++++++++--
shiny/workflowPlot/ui.R | 6 ++++--
3 files changed, 25 insertions(+), 5 deletions(-)
diff --git a/shiny/workflowPlot/helper.R b/shiny/workflowPlot/helper.R
index 0af0c05448f..5dfbdcd0b7f 100644
--- a/shiny/workflowPlot/helper.R
+++ b/shiny/workflowPlot/helper.R
@@ -9,4 +9,7 @@ checkAndDownload<-function(packageNames) {
isInstalled <- function(mypkg){
is.element(mypkg, installed.packages()[,1])
}
-# checkAndDownload(c('plotly','scales','dplyr'))
+checkAndDownload(c('plotly','scales','dplyr'))
+
+# write.csv(globalDF,file='/home/carya/pecan/shiny/workflowPlot/sampleFile.csv',
+# quote = FALSE,sep = ',',col.names = TRUE,row.names=FALSE)
\ No newline at end of file
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index 081a505da81..c823db298dd 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -93,9 +93,18 @@ server <- shinyServer(function(input, output, session) {
inFile <- input$file1
if (is.null(inFile))
return(data.frame())
- read.csv(inFile$datapath, header=input$header, sep=input$sep,
+ output$info1 <- renderText({
+ # paste0(nrow(externalData))
+ paste0(inFile$datapath)
+ })
+ externalData <- read.csv(inFile$datapath, header=input$header, sep=input$sep,
quote=input$quote)
+ return(externalData)
})
+ output$info <- renderText({
+ inFile <- input$file1
+ paste0(inFile$datapath)
+ })
# Renders ggplotly
output$outputPlot <- renderPlotly({
# Error messages
@@ -105,7 +114,10 @@ server <- shinyServer(function(input, output, session) {
need(input$variable_name, 'Click the button to load data. Please allow some time')
)
# Load data
- masterDF <- loadNewData()
+ externalData <- data.frame()
+ modelData <- loadNewData()
+ externalData <- loadExternalData()
+ masterDF <- rbind(modelData,externalData)
# Convert from factor to character. For subsetting
masterDF$var_name <- as.character(masterDF$var_name)
# Convert to factor. Required for ggplot
@@ -129,6 +141,9 @@ server <- shinyServer(function(input, output, session) {
plt <- plt + geom_line()
}
)
+ # if (!is.null(loaded_data)) {
+ # plt <- plt + geom_line(data = loaded_data, linetype = 'dashed')
+ # }
# geom_point() +
# Earlier smoothing and y labels
# geom_smooth(aes(fill = "Spline fit")) +
diff --git a/shiny/workflowPlot/ui.R b/shiny/workflowPlot/ui.R
index a1204eef7e6..5d7b0bba267 100644
--- a/shiny/workflowPlot/ui.R
+++ b/shiny/workflowPlot/ui.R
@@ -31,11 +31,13 @@ ui <- shinyUI(fluidPage(
c(None='',
'Double Quote'='"',
'Single Quote'="'"),
- '"'),
+ ''),
actionButton("load_data", "Load External Data")
),
mainPanel(
- plotlyOutput("outputPlot")
+ plotlyOutput("outputPlot"),
+ verbatimTextOutput("info1"),
+ verbatimTextOutput("info")
)
)
))
From 9d6d7ab457489dc23f855c9b6adf4e4bc6a57124 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sun, 9 Jul 2017 10:34:37 -0500
Subject: [PATCH 076/771] Loading external data
---
shiny/workflowPlot/server.R | 37 +++++++++++++++++++++++++------------
1 file changed, 25 insertions(+), 12 deletions(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index c823db298dd..d2b7b132292 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -92,19 +92,23 @@ server <- shinyServer(function(input, output, session) {
loadExternalData <-eventReactive(input$load_data,{
inFile <- input$file1
if (is.null(inFile))
- return(data.frame())
- output$info1 <- renderText({
- # paste0(nrow(externalData))
- paste0(inFile$datapath)
- })
+ return(NULL)
externalData <- read.csv(inFile$datapath, header=input$header, sep=input$sep,
quote=input$quote)
+ externalData$dates <- as.Date(externalData$dates)
+ externalData <- externalData %>%
+ dplyr::filter(var_name == input$variable_name)
+ # output$info1 <- renderText({
+ # paste0(nrow(externalData))
+ # # paste0(inFile$datapath)
+ # })
return(externalData)
})
- output$info <- renderText({
- inFile <- input$file1
- paste0(inFile$datapath)
- })
+ # output$info <- renderText({
+ # inFile <- input$file1
+ # paste0(inFile$datapath)
+ # # paste0(input$load_data)
+ # })
# Renders ggplotly
output$outputPlot <- renderPlotly({
# Error messages
@@ -116,7 +120,9 @@ server <- shinyServer(function(input, output, session) {
# Load data
externalData <- data.frame()
modelData <- loadNewData()
- externalData <- loadExternalData()
+ if (input$load_data>0) {
+ externalData <- loadExternalData()
+ }
masterDF <- rbind(modelData,externalData)
# Convert from factor to character. For subsetting
masterDF$var_name <- as.character(masterDF$var_name)
@@ -141,14 +147,21 @@ server <- shinyServer(function(input, output, session) {
plt <- plt + geom_line()
}
)
+ plt <- plt + labs(title=title, x=xlab, y=ylab)
+
# if (!is.null(loaded_data)) {
- # plt <- plt + geom_line(data = loaded_data, linetype = 'dashed')
+ # if (input$load_data>0) {
+ # loaded_data <- loadExternalData()
+ # output$info1 <- renderText({
+ # paste0(nrow(loaded_data))
+ # # paste0(inFile$datapath)
+ # })
+ # plt <- plt + geom_line(data = loaded_data,aes(x=dates, y=vals), linetype = 'dashed')
# }
# geom_point() +
# Earlier smoothing and y labels
# geom_smooth(aes(fill = "Spline fit")) +
# scale_y_continuous(labels=fancy_scientific) +
- plt <- plt + labs(title=title, x=xlab, y=ylab)
# Earlier color and fill values
# scale_color_manual(name = "", values = "black") +
# scale_fill_manual(name = "", values = "grey50")
From 291ac99f636c6c991a3c3dd0e7eb7abd928ddaa2 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Mon, 10 Jul 2017 09:01:51 -0400
Subject: [PATCH 077/771] Some expanatory comments added by Margaret Evans, as
well as the ability to specify an external state variable IC (z0)
---
modules/data.land/R/InventoryGrowthFusion.R | 37 ++++++++++++---------
1 file changed, 22 insertions(+), 15 deletions(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index 37f7d6ed5b3..64158cc5aa3 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -8,12 +8,14 @@
##' @note Requires JAGS
##' @return an mcmc.list object
##' @export
-InventoryGrowthFusion <- function(data, cov.data=NULL,time_data = NULL,n.iter=5000, random = NULL, fixed = NULL,time_varying=NULL, burnin_plot = FALSE,save.jags="IGF.txt") {
+InventoryGrowthFusion <- function(data, cov.data=NULL,time_data = NULL,n.iter=5000, random = NULL, fixed = NULL,time_varying=NULL, burnin_plot = FALSE, save.jags = "IGF.txt", z0 = NULL) {
library(rjags)
- burnin.variables <- c("tau_add", "tau_dbh", "tau_inc", "mu")
+ # baseline variables to monitor
+ burnin.variables <- c("tau_add", "tau_dbh", "tau_inc", "mu") # process variability, dbh and tree-ring observation error, intercept
out.variables <- c("x", "tau_add", "tau_dbh", "tau_inc", "mu")
+ # start text object that will be manipulated (to build different linear models, swap in/out covariates)
TreeDataFusionMV <- "
model{
@@ -41,17 +43,17 @@ model{
x[i,1] ~ dnorm(x_ic,tau_ic)
} ## end loop over individuals
-## RANDOM_EFFECTS
+ ## RANDOM_EFFECTS
#### Priors
tau_dbh ~ dgamma(a_dbh,r_dbh)
tau_inc ~ dgamma(a_inc,r_inc)
tau_add ~ dgamma(a_add,r_add)
mu ~ dnorm(0.5,0.5)
-## FIXED EFFECTS BETAS
-## ENDOGENOUS BETAS
-## TIME VARYING BETAS
-## RANDOM EFFECT TAUS
+ ## FIXED EFFECTS BETAS
+ ## ENDOGENOUS BETAS
+ ## TIME VARYING BETAS
+ ## RANDOM EFFECT TAUS
}"
Pformula <- NULL
@@ -103,8 +105,9 @@ model{
paste0("+ alpha_", r_var,"[",counter,index,"]"))
## create random effect
for(j in seq_along(nr)){
- Reffects <- paste(Reffects,paste0("for(k in 1:",nr[j],"){\n"),
- paste0(" alpha_",r_var[j],"[k] ~ dnorm(0,tau_",r_var[j],")\n}\n"))
+ Reffects <- paste(Reffects,
+ paste0("for(k in 1:",nr[j],"){\n"),
+ paste0(" alpha_",r_var[j],"[k] ~ dnorm(0,tau_",r_var[j],")\n}\n"))
}
## create priors
Rpriors <- paste(Rpriors,paste0("tau_",r_var," ~ dgamma(1,0.1)\n",collapse = " "))
@@ -120,7 +123,7 @@ model{
if(FALSE){
## DEV TESTING FOR X, polynomial X, and X interactions
- fixed <- "X + X^3 + X*bob + bob + dia + X*Tmin[t]"
+ fixed <- "X + X^3 + X*bob + bob + dia + X*Tmin[t]" ## faux model, just for testing jags code
}
## Design matrix
if (is.null(fixed)) {
@@ -137,6 +140,7 @@ model{
fixed <- paste("~", fixed)
}
+ ### BEGIN adding in tree size (endogenous variable X)
## First deal with endogenous terms (X and X*cov interactions)
fixedX <- sub("~","",fixed, fixed=TRUE)
lm.terms <- gsub("[[:space:]]", "", strsplit(fixedX,split = "+",fixed=TRUE)[[1]]) ## split on + and remove whitespace
@@ -248,9 +252,10 @@ model{
if(any(duplicated(names(data)))){PEcAn.utils::logger.error("duplicated variable at Xf",names(data))}
- if(FALSE){
+ if(FALSE){ # always false...just for development
## DEVEL TESTING FOR TIME VARYING
- time_varying <- "TminJuly + PrecipDec + TminJuly*PrecipDec"
+ #time_varying <- "TminJuly + PrecipDec + TminJuly*PrecipDec"
+ time_varying <- "tmax_Jun + ppt_Dec + tmax_Jun*ppt_Dec"
time_data <- list(TminJuly = matrix(0,4,4),PrecipDec = matrix(1,4,4))
}
@@ -351,9 +356,11 @@ model{
}
## state variable initial condition
- z0 <- t(apply(data$y, 1, function(y) {
- -rev(cumsum(rev(y)))
- })) + data$z[, ncol(data$z)]
+ if(is.null(z0)){
+ z0 <- t(apply(data$y, 1, function(y) {
+ -rev(cumsum(rev(y)))
+ })) + data$z[, ncol(data$z)]
+ }
## JAGS initial conditions
nchain <- 3
From 448276124d9a609b3e04097f844f2feaffce8d33 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Mon, 10 Jul 2017 13:29:57 -0400
Subject: [PATCH 078/771] InventoryGrowthFusion: * add deviance to tracked
variables to allow post-hoc calculation of information metrics * fix bug in
setting time-varying variables w/o interactions * naming consistency, dropped
exta underscore in time interaction * namespace on ciEnvelope * add gelman
diagnostics & traceplots on betas
---
modules/data.land/R/InventoryGrowthFusion.R | 44 +++++++++++++------
.../R/InventoryGrowthFusionDiagnostics.R | 38 +++++++++++-----
2 files changed, 59 insertions(+), 23 deletions(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index 64158cc5aa3..09da5559c50 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -13,7 +13,7 @@ InventoryGrowthFusion <- function(data, cov.data=NULL,time_data = NULL,n.iter=50
# baseline variables to monitor
burnin.variables <- c("tau_add", "tau_dbh", "tau_inc", "mu") # process variability, dbh and tree-ring observation error, intercept
- out.variables <- c("x", "tau_add", "tau_dbh", "tau_inc", "mu")
+ out.variables <- c("deviance","x", "tau_add", "tau_dbh", "tau_inc", "mu")
# start text object that will be manipulated (to build different linear models, swap in/out covariates)
TreeDataFusionMV <- "
@@ -57,7 +57,11 @@ model{
}"
Pformula <- NULL
- ## RANDOM EFFECTS
+ ########################################################################
+ ###
+ ### RANDOM EFFECTS
+ ###
+ ########################################################################
if (!is.null(random)) {
Rpriors <- NULL
Reffects <- NULL
@@ -121,6 +125,11 @@ model{
TreeDataFusionMV <- gsub(pattern = "## RANDOM_EFFECTS", Reffects, TreeDataFusionMV)
} ### END RANDOM EFFECTS
+ ########################################################################
+ ###
+ ### FIXED EFFECTS
+ ###
+ ########################################################################
if(FALSE){
## DEV TESTING FOR X, polynomial X, and X interactions
fixed <- "X + X^3 + X*bob + bob + dia + X*Tmin[t]" ## faux model, just for testing jags code
@@ -252,6 +261,12 @@ model{
if(any(duplicated(names(data)))){PEcAn.utils::logger.error("duplicated variable at Xf",names(data))}
+ ########################################################################
+ ###
+ ### TIME-VARYING
+ ###
+ ########################################################################
+
if(FALSE){ # always false...just for development
## DEVEL TESTING FOR TIME VARYING
#time_varying <- "TminJuly + PrecipDec + TminJuly*PrecipDec"
@@ -259,10 +274,9 @@ model{
time_data <- list(TminJuly = matrix(0,4,4),PrecipDec = matrix(1,4,4))
}
- ## Time-varying covariates
if(!is.null(time_varying)){
if (is.null(time_data)) {
- print("time_varying formula provided but time_data is absent:", time_varying)
+ PEcAn.utils::logger.error("time_varying formula provided but time_data is absent:", time_varying)
}
Xt.priors <- ""
@@ -270,19 +284,20 @@ model{
t_vars <- gsub(" ","",unlist(strsplit(time_varying,"+",fixed=TRUE))) ## split on +, remove whitespace
## check for interaction terms
it_vars <- t_vars[grep(pattern = "*",x=t_vars,fixed = TRUE)]
- t_vars <- t_vars[!(t_vars == it_vars)]
-
- ## need to deal with interactions with fixed variables
- ## will get really nasty if interactions are with catagorical variables
- ## need to create new data matrices on the fly
+ if(length(it_vars) > 0){
+ t_vars <- t_vars[!(t_vars == it_vars)]
+ }
+ ## INTERACTIONS WITH TIME-VARYING VARS
+ ## TODO: deal with interactions with catagorical variables
+ ## need to create new data matrices on the fly
for(i in seq_along(it_vars)){
##is covariate fixed or time varying?
covX <- strsplit(it_vars[i],"*",fixed=TRUE)[[1]]
tvar <- length(grep("[t]",covX[1],fixed=TRUE)) > 0
tvar[2] <- length(grep("[t]",covX[2],fixed=TRUE)) > 0
- myBeta <- "beta_"
+ myBeta <- "beta"
for(j in 1:2){
if(j == 2) myBeta <- paste0(myBeta,"_")
if(tvar[j]){
@@ -377,9 +392,11 @@ model{
year = rep(0, data$nt))
}
- ## compile JAGS model
+
+ PEcAn.utils::logger.info("COMPILE JAGS MODEL")
j.model <- jags.model(file = textConnection(TreeDataFusionMV), data = data, inits = init, n.chains = 3)
- ## burn-in
+
+ PEcAn.utils::logger.info("BURN IN")
jags.out <- coda.samples(model = j.model,
variable.names = burnin.variables,
n.iter = min(n.iter, 2000))
@@ -387,7 +404,8 @@ model{
plot(jags.out)
}
- ## run MCMC
+ PEcAn.utils::logger.info("RUN MCMC")
+ load.module("dic")
coda.samples(model = j.model, variable.names = out.variables, n.iter = n.iter)
} # InventoryGrowthFusion
diff --git a/modules/data.land/R/InventoryGrowthFusionDiagnostics.R b/modules/data.land/R/InventoryGrowthFusionDiagnostics.R
index 54a5cfe5f1a..e0723733115 100644
--- a/modules/data.land/R/InventoryGrowthFusionDiagnostics.R
+++ b/modules/data.land/R/InventoryGrowthFusionDiagnostics.R
@@ -6,8 +6,6 @@
##' @export
InventoryGrowthFusionDiagnostics <- function(jags.out, combined) {
- #### Diagnostic plots
-
### DBH par(mfrow=c(3,2))
layout(matrix(1:8, 4, 2, byrow = TRUE))
out <- as.matrix(jags.out)
@@ -22,7 +20,7 @@ InventoryGrowthFusionDiagnostics <- function(jags.out, combined) {
plot(data$time, ci[2, sel], type = "n",
ylim = range(rng), ylab = "DBH (cm)", main = i)
- ciEnvelope(data$time, ci[1, sel], ci[3, sel], col = "lightBlue")
+ PEcAn.visualization::ciEnvelope(data$time, ci[1, sel], ci[3, sel], col = "lightBlue")
points(data$time, data$z[i, ], pch = "+", cex = 1.5)
# lines(data$time,z0[i,],lty=2)
@@ -34,7 +32,7 @@ InventoryGrowthFusionDiagnostics <- function(jags.out, combined) {
plot(data$time[-1], inc.ci[2, ], type = "n",
ylim = range(inc.ci, na.rm = TRUE), ylab = "Ring Increment (mm)")
- ciEnvelope(data$time[-1], inc.ci[1, ], inc.ci[3, ], col = "lightBlue")
+ PEcAn.visualization::ciEnvelope(data$time[-1], inc.ci[1, ], inc.ci[3, ], col = "lightBlue")
points(data$time, data$y[i, ] * 5, pch = "+", cex = 1.5, type = "b", lty = 2)
}
@@ -47,26 +45,46 @@ InventoryGrowthFusionDiagnostics <- function(jags.out, combined) {
}
## process model
- vars <- (1:ncol(out))[-c(which(substr(colnames(out), 1, 1) == "x"), grep("tau", colnames(out)),
- grep("year", colnames(out)), grep("ind", colnames(out)))]
+ vars <- (1:ncol(out))[-c(which(substr(colnames(out), 1, 1) == "x"),
+ grep("tau", colnames(out)),
+ grep("year", colnames(out)),
+ grep("ind", colnames(out)),
+ grep("alpha",colnames(out)),
+ grep("deviance",colnames(out)))]
par(mfrow = c(1, 1))
for (i in vars) {
hist(out[, i], main = colnames(out)[i])
+ abline(v=0,lwd=3)
}
- if (length(vars) > 1) {
+ if (length(vars) > 1 & length(vars) < 10) {
pairs(out[, vars])
}
+
+ if("deviance" %in% colnames(out)){
+ hist(out[,"deviance"])
+ vars <- c(vars,which(colnames(out)=="deviance"))
+ }
+
+
+ ## rebuild coda for just vars
+ var.out <- as.mcmc.list(lapply(jags.out,function(x){ x[,vars]}))
+
+ ## convergence
+ gelman.diag(var.out)
+
+ #### Diagnostic plots
+ plot(var.out)
## Standard Deviations layout(matrix(c(1,2,3,3),2,2,byrow=TRUE))
par(mfrow = c(2, 3))
prec <- out[, grep("tau", colnames(out))]
- for (i in seq_along(prec)) {
+ for (i in seq_along(colnames(prec))) {
hist(1 / sqrt(prec[, i]), main = colnames(prec)[i])
}
cor(prec)
# pairs(prec)
-
+
par(mfrow = c(1, 1))
### YEAR
year.cols <- grep("year", colnames(out))
@@ -74,7 +92,7 @@ InventoryGrowthFusionDiagnostics <- function(jags.out, combined) {
ci.yr <- apply(out[, year.cols], 2, quantile, c(0.025, 0.5, 0.975))
plot(data$time, ci.yr[2, ], type = "n",
ylim = range(ci.yr, na.rm = TRUE), ylab = "Year Effect")
- ciEnvelope(data$time, ci.yr[1, ], ci.yr[3, ], col = "lightBlue")
+ PEcAn.visualization::ciEnvelope(data$time, ci.yr[1, ], ci.yr[3, ], col = "lightBlue")
lines(data$time, ci.yr[2, ], lty = 1, lwd = 2)
abline(h = 0, lty = 2)
}
From 93f6895c2b14f36f5b930fb9e0f4743517ef77ec Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Mon, 10 Jul 2017 14:49:32 -0400
Subject: [PATCH 079/771] tree-ring: visualization of arbitrary random effects
---
.../data.land/R/InventoryGrowthFusionDiagnostics.R | 12 ++++++++++++
1 file changed, 12 insertions(+)
diff --git a/modules/data.land/R/InventoryGrowthFusionDiagnostics.R b/modules/data.land/R/InventoryGrowthFusionDiagnostics.R
index e0723733115..9f453808e69 100644
--- a/modules/data.land/R/InventoryGrowthFusionDiagnostics.R
+++ b/modules/data.land/R/InventoryGrowthFusionDiagnostics.R
@@ -86,6 +86,18 @@ InventoryGrowthFusionDiagnostics <- function(jags.out, combined) {
par(mfrow = c(1, 1))
+ ### alpha
+ alpha.cols <- grep("alpha", colnames(out))
+ if (length(alpha.cols) > 0) {
+ alpha.ord <- 1:length(alpha.cols)
+ ci.alpha <- apply(out[, alpha.cols], 2, quantile, c(0.025, 0.5, 0.975))
+ plot(alpha.ord, ci.alpha[2, ], type = "n",
+ ylim = range(ci.alpha, na.rm = TRUE), ylab = "Random Effects")
+ PEcAn.visualization::ciEnvelope(alpha.ord, ci.alpha[1, ], ci.alpha[3, ], col = "lightBlue")
+ lines(alpha.ord, ci.alpha[2, ], lty = 1, lwd = 2)
+ abline(h = 0, lty = 2)
+ }
+
### YEAR
year.cols <- grep("year", colnames(out))
if (length(year.cols > 0)) {
From 096c8cadd3a00ea39888a77826fec9501126f512 Mon Sep 17 00:00:00 2001
From: Michael Dietze
Date: Mon, 10 Jul 2017 20:09:42 -0400
Subject: [PATCH 080/771] tree ring: time index fix on X*time-varying
---
modules/data.land/R/InventoryGrowthFusion.R | 5 +++--
1 file changed, 3 insertions(+), 2 deletions(-)
diff --git a/modules/data.land/R/InventoryGrowthFusion.R b/modules/data.land/R/InventoryGrowthFusion.R
index 09da5559c50..cb94fb047ac 100644
--- a/modules/data.land/R/InventoryGrowthFusion.R
+++ b/modules/data.land/R/InventoryGrowthFusion.R
@@ -138,12 +138,14 @@ model{
if (is.null(fixed)) {
Xf <- NULL
} else {
+
## check for covariate data (note: will falsely fail if only effect is X)
if (is.null(cov.data)) {
print("formula provided but covariate data is absent:", fixed)
} else {
cov.data <- as.data.frame(cov.data)
}
+
## check if there's a tilda in the formula
if (length(grep("~", fixed)) == 0) {
fixed <- paste("~", fixed)
@@ -183,8 +185,7 @@ model{
}
if(any(duplicated(names(data)))){PEcAn.utils::logger.error("duplicated variable at covX",names(data))}
-# covX <- paste0(covX,"[i,t-1]")
- myIndex <- "[i,t-1]"
+ myIndex <- "[i,t]"
} else {
## variable is fixed
if(covX %in% colnames(cov.data)){ ## covariate present
From f360a793b4879bfde92d6ecea348b4b6afe6e9e8 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Tue, 11 Jul 2017 16:15:32 -0400
Subject: [PATCH 081/771] first pass at dalec default param file
---
models/dalec/inst/default_param.dalec | 16 ++++++++++++++++
1 file changed, 16 insertions(+)
create mode 100644 models/dalec/inst/default_param.dalec
diff --git a/models/dalec/inst/default_param.dalec b/models/dalec/inst/default_param.dalec
new file mode 100644
index 00000000000..112294d43be
--- /dev/null
+++ b/models/dalec/inst/default_param.dalec
@@ -0,0 +1,16 @@
+cmdTag val
+t1 4.41E-06 #Decomposition from litter to SOM
+t2 0.473267 #% GPP lost to respiration
+t3 0.314951 #% NPP sent to foliage
+t4 0.434401 #% NPP sent to roots
+t5 0.00266518 #rate of leaf loss
+t6 2.06E-06 #rate of wood loss
+t7 2.48E-03 #rate of root loss
+t8 2.28E-02 #rate of respiration from litter
+t9 2.65E-06 #rate of respiration from litter SOM
+cf0 57.7049 #initial canopy foliar carbon (g/m2)
+cw0 769.863 #initial pool of woody carbon (g/m2)
+cr0 101.955 #initial pool of fine root carbon (g/m2)
+cl0 40.4494 #initial pool of litter carbon (g/m2)
+cs0 9896.7 #initial pool of soil organic matter and woody debris carbon (g/m2)
+##taken from dalec_model.c
\ No newline at end of file
From 5ae4d4748abdb4a55693353f4e97c01b0f971f2e Mon Sep 17 00:00:00 2001
From: annethomas
Date: Tue, 11 Jul 2017 18:12:45 -0400
Subject: [PATCH 082/771] IC for write.configs.dalec in progress
---
models/dalec/R/write.configs.dalec.R | 56 +++++++++++++++++++++++++++
models/dalec/inst/default_param.dalec | 2 +-
2 files changed, 57 insertions(+), 1 deletion(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index 4cc8c2a0c51..b4d0f51e993 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -114,6 +114,62 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
}
+ ### INITIAL CONDITIONS
+
+ default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"))
+ IC.param <- data.frame()
+ if (!is.null(settings$run$inputs$poolinitcond$path)) {
+ IC.path <- settings$run$inputs$poolinitcond$path
+ IC.nc <- try(ncdf4::nc_open(IC.path))
+
+ if(class(IC.nc) != "try-error"){
+ # cf0 initial canopy foliar carbon (g/m2)
+ leaf <- try(ncdf4::ncvar_get(IC.nc,"leaf_carbon_content"),silent = TRUE)
+ if (!is.na(leaf) && is.numeric(leaf)) {
+ param[["cf0"]] <- leaf
+ }
+ # cw0 initial pool of woody carbon (g/m2)
+ AbvGrndWood <- try(ncdf4::ncvar_get(IC.nc,"AbvGrndWood"),silent = TRUE)
+ if (!is.na(AbvGrndWood) && is.numeric(AbvGrndWood)) {
+ roots <- try(ncdf4::ncvar_get(IC.nc,"root_carbon_content"),silent = TRUE)
+ if(!is.na(roots) && is.numeric(roots)){
+ #wood <- partitioned coarse roots + abvgroundwood
+ }
+ else{
+ #wood <- (roots-default.fine) + abvgroundwood
+ }
+ param[["cw0"]] <- wood
+ }
+ # cr0 initial pool of fine root carbon (g/m2)
+ roots <- try(ncdf4::ncvar_get(IC.nc,"root_carbon_content"),silent = TRUE)
+ if (!is.na(roots) && is.numeric(roots)) {
+ #partition fine roots
+ param[["cr0"]] <- roots
+ }
+ # cl0 initial pool of litter carbon (g/m2)
+ litter <- try(ncdf4::ncvar_get(IC.nc,"litter_carbon_content"),silent = TRUE)
+ if (!is.na(litter) && is.numeric(litter)) {
+ param[["cl0"]] <- litter
+ }
+ # cs0 initial pool of soil organic matter and woody debris carbon (g/m2)
+ soil <- try(ncdf4::ncvar_get(IC.nc,"soil_organic_carbon_content"),silent = TRUE)
+ if(!is.numeric(soil)){
+ soil <- try(ncdf4::ncvar_get(IC.nc,"soil_carbon_content"),silent = TRUE)
+ if(is.numeric(soil)){
+ wood <- try(ncdf4::ncvar_get(IC.nc,"wood_debris_carbon_content"),silent = TRUE)
+ if(is.numeric(wood)){
+ soil_and_wood <- soil + sum(wood)
+ param[["cs0"]] <- soil_and_wood
+ }
+ }
+ }
+ }
+ else{
+ PEcAn.utils::logger.error("Bad initial conditions filepath; kept defaults")
+ }
+ }
+
+
# find out where to write run/ouput
rundir <- file.path(settings$host$rundir, as.character(run.id))
outdir <- file.path(settings$host$outdir, as.character(run.id))
diff --git a/models/dalec/inst/default_param.dalec b/models/dalec/inst/default_param.dalec
index 112294d43be..38f238d4ac5 100644
--- a/models/dalec/inst/default_param.dalec
+++ b/models/dalec/inst/default_param.dalec
@@ -1,4 +1,4 @@
-cmdTag val
+cmdFlag val
t1 4.41E-06 #Decomposition from litter to SOM
t2 0.473267 #% GPP lost to respiration
t3 0.314951 #% NPP sent to foliage
From 13176cdb02c66c827c386177eaa7f582810e7c11 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Wed, 12 Jul 2017 14:02:11 -0400
Subject: [PATCH 083/771] Add wood vars and reorder
---
utils/data/standard_vars.csv | 10 ++++++----
1 file changed, 6 insertions(+), 4 deletions(-)
diff --git a/utils/data/standard_vars.csv b/utils/data/standard_vars.csv
index 2186b5d33f3..7dbdcfb3790 100755
--- a/utils/data/standard_vars.csv
+++ b/utils/data/standard_vars.csv
@@ -26,20 +26,22 @@
"surface_litter_carbon_flux","surface_litter_carbon_flux","kg C m-2 s-1","Surface Litter Carbon Flux","Carbon Fluxes","real","lon","lat","time",NA,"Total carbon flux of surface litter"
"subsurface_litter_carbon_flux","subsurface_litter_carbon_flux","kg C m-2 s-1","Subsurface Litter Carbon Flux","Carbon Fluxes","real","lon","lat","time","depth","Total carbon flux of subsurface litter"
"leaf_litter_carbon_flux","leaf_litter_carbon_flux","kg C m-2 s-1","Leaf Litter Carbon Flux","Carbon Fluxes","real","lon","lat","time",NA,"Carbon flux of leaf litter"
+"WoodyLitter","wood_litter_carbon_flux","kg C m-2 s-1","Wood Litter Carbon Flux","Deprecated","real","lon","lat","time",NA,"DALEC output; haven't yet resolved standard woody litter flux"
"wood_debris_carbon_flux","wood_debris_carbon_flux","kg C m-2 s-1","Wood Debris Carbon Flux","Carbon Fluxes","real","lon","lat","time","wdsize","Total carbon flux of woody debris, including downed woody debris and standing deadwood; excludes litter; size class defined by wdsize dimension"
+"GWBI",NA,"kg C m-2 month-1","Gross Woody Biomass Increment","Carbon Pools","real","lon","lat","time",NA,"Variable most analogous to tree-ring-derived change in stem biomass (before mortality/CWD flux)"
+"CWDI",NA,"kg C m-2 month-1","Coarse Woody Debris Increment","Carbon Pools","real","lon","lat","time",NA,"Variable most analogous to flux of woody material material to the detrital pool resulting from mortality"
"CO2CAS",NA,"ppmv","CO2CAS","Carbon Fluxes","real","lon","lat","time",NA,"CO2 in canopy air space; ED2 output variable"
"CropYield",NA,"kg m-2","CropYield","Carbon Fluxes","real","lon","lat","time","pft","Crop yield; ED2 output variable"
"poolname",NA,"(-)","Name of each Carbon Pool","Deprecated","character","nchar","npool",NA,NA,"Name of each carbon pool (i.e., wood or Coarse Woody Debris)"
"CarbPools",NA,"kg C m-2","Size of each carbon pool","Deprecated","real","lon","lat","npool","time","Total size of each carbon pool vertically integrated over the entire soil column"
"AbvGrndWood",NA,"kg C m-2","Above ground woody biomass","Carbon Pools","real","lon","lat","time",NA,"Total above ground wood biomass"
"TotLivBiom",NA,"kg C m-2","Total living biomass","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of the living biomass (leaves+roots+wood)"
-"TotSoilCarb",NA,"kg C m-2","Total Soil Carbon","Carbon Pools","real","lon","lat","time",NA,"Total soil and litter carbon content vertically integrated over the enire soil column"
-"LAI",NA,"m2 m-2","Leaf Area Index","Carbon Pools","real","lon","lat","time",NA,"Area of leaves per area ground"
-"GWBI",NA,"kg C m-2 month-1","Gross Woody Biomass Increment","Carbon Pools","real","lon","lat","time",NA,"Variable most analogous to tree-ring-derived change in stem biomass (before mortality/CWD flux)"
-"CWDI",NA,"kg C m-2 month-1","Coarse Woody Debris Increment","Carbon Pools","real","lon","lat","time",NA,"Variable most analogous to flux of woody material material to the detrital pool resulting from mortality"
"AGB",NA,"kg C m-2","Total aboveground biomass","Carbon Pools","real","lon","lat","time",NA,"aboveground biomass"
+"LAI",NA,"m2 m-2","Leaf Area Index","Carbon Pools","real","lon","lat","time",NA,"Area of leaves per area ground"
"leaf_carbon_content","leaf_carbon_content","kg C m-2","Leaf Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Leaf carbon content"
"root_carbon_content","root_carbon_content_of_size_class","kg C m-2","Root Carbon Content","Carbon Pools","real","lon","lat","time","rtsize","Root carbon content by size class"
+"wood_carbon_content","wood_carbon_content","kg C m-2","Wood Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Wood carbon content including above (AbvGrndWood) and below ground (coarse roots, shared with root_carbon_content)"
+"TotSoilCarb",NA,"kg C m-2","Total Soil Carbon","Carbon Pools","real","lon","lat","time",NA,"Total soil and litter carbon content vertically integrated over the enire soil column"
"litter_carbon_content","litter_carbon_content","kg C m-2","Litter Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of litter pool, excluding coarse woody debris"
"surface_litter_carbon_content","surface_litter_carbon_content","kg C m-2","Surface Litter Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Carbon content of surface litter pool"
"subsurface_litter_carbon_content","subsurface_litter_carbon_content","kg C m-2","Subsurface Litter Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of subsurface litter pool; depth dimension optional"
From b07fc69e2be4334967d1dcb24332548486c0504a Mon Sep 17 00:00:00 2001
From: annethomas
Date: Wed, 12 Jul 2017 14:15:24 -0400
Subject: [PATCH 084/771] Rename DALEC output vars to standard names
---
models/dalec/R/model2netcdf.DALEC.R | 38 ++++++++++++++---------------
1 file changed, 18 insertions(+), 20 deletions(-)
diff --git a/models/dalec/R/model2netcdf.DALEC.R b/models/dalec/R/model2netcdf.DALEC.R
index 076e370c11c..30bf76dd896 100644
--- a/models/dalec/R/model2netcdf.DALEC.R
+++ b/models/dalec/R/model2netcdf.DALEC.R
@@ -48,25 +48,23 @@ model2netcdf.DALEC <- function(outdir, sitelat, sitelon, start_date, end_date) {
## Setup outputs for netCDF file in appropriate units
output <- list()
- ## standard variables: Fluxes
+ ## Fluxes
output[[1]] <- (sub.DALEC.output[, 1] * 0.001)/timestep.s # Autotrophic Respiration in kgC/m2/s
output[[2]] <- (sub.DALEC.output[, 21] + sub.DALEC.output[, 23]) * 0.001 / timestep.s # Heterotrophic Resp kgC/m2/s
output[[3]] <- (sub.DALEC.output[, 31] * 0.001)/timestep.s # GPP in kgC/m2/s
output[[4]] <- (sub.DALEC.output[, 33] * 0.001)/timestep.s # NEE in kgC/m2/s
output[[5]] <- (sub.DALEC.output[, 3] + sub.DALEC.output[, 5] + sub.DALEC.output[, 7]) *
0.001/timestep.s # NPP kgC/m2/s
+ output[[6]] <- (sub.DALEC.output[, 9] * 0.001) / timestep.s # Leaf Litter Flux, kgC/m2/s
+ output[[7]] <- (sub.DALEC.output[, 11] * 0.001) / timestep.s # Woody Litter Flux, kgC/m2/s
+ output[[8]] <- (sub.DALEC.output[, 13] * 0.001) / timestep.s # Root Litter Flux, kgC/m2/s
- ## non-standard variables: Fluxes
- output[[6]] <- (sub.DALEC.output[, 9] * 0.001) / timestep.s # Leaf Litter, kgC/m2/s
- output[[7]] <- (sub.DALEC.output[, 11] * 0.001) / timestep.s # Woody Litter, kgC/m2/s
- output[[8]] <- (sub.DALEC.output[, 13] * 0.001) / timestep.s # Root Litter, kgC/m2/s
-
- ## non-standard variables: Pools
- output[[9]] <- (sub.DALEC.output[, 15] * 0.001) # Leaf Biomass, kgC/m2
- output[[10]] <- (sub.DALEC.output[, 17] * 0.001) # Wood Biomass, kgC/m2
- output[[11]] <- (sub.DALEC.output[, 19] * 0.001) # Root Biomass, kgC/m2
- output[[12]] <- (sub.DALEC.output[, 27] * 0.001) # Litter Biomass, kgC/m2
- output[[13]] <- (sub.DALEC.output[, 29] * 0.001) # Soil C, kgC/m2
+ ## Pools
+ output[[9]] <- (sub.DALEC.output[, 15] * 0.001) # Leaf Carbon, kgC/m2
+ output[[10]] <- (sub.DALEC.output[, 17] * 0.001) # Wood Carbon, kgC/m2
+ output[[11]] <- (sub.DALEC.output[, 19] * 0.001) # Root Carbon, kgC/m2
+ output[[12]] <- (sub.DALEC.output[, 27] * 0.001) # Litter Carbon, kgC/m2
+ output[[13]] <- (sub.DALEC.output[, 29] * 0.001) # Soil Carbon, kgC/m2
## standard composites
output[[14]] <- output[[1]] + output[[2]] # Total Respiration
@@ -93,14 +91,14 @@ model2netcdf.DALEC <- function(outdir, sitelat, sitelon, start_date, end_date) {
nc_var[[3]] <- mstmipvar("GPP", lat, lon, t, NA)
nc_var[[4]] <- mstmipvar("NEE", lat, lon, t, NA)
nc_var[[5]] <- mstmipvar("NPP", lat, lon, t, NA)
- nc_var[[6]] <- ncvar_def("LeafLitter", "kgC/m2/s", list(lon, lat, t), -999)
- nc_var[[7]] <- ncvar_def("WoodyLitter", "kgC/m2/s", list(lon, lat, t), -999)
- nc_var[[8]] <- ncvar_def("RootLitter", "kgC/m2/s", list(lon, lat, t), -999)
- nc_var[[9]] <- ncvar_def("LeafBiomass", "kgC/m2", list(lon, lat, t), -999)
- nc_var[[10]] <- ncvar_def("WoodBiomass", "kgC/m2", list(lon, lat, t), -999)
- nc_var[[11]] <- ncvar_def("RootBiomass", "kgC/m2", list(lon, lat, t), -999)
- nc_var[[12]] <- ncvar_def("LitterBiomass", "kgC/m2", list(lon, lat, t), -999)
- nc_var[[13]] <- ncvar_def("SoilC", "kgC/m2", list(lon, lat, t), -999)
+ nc_var[[6]] <- ncvar_def("leaf_litter_carbon_flux", "kgC/m2/s", list(lon, lat, t), -999) #was LeafLitter
+ nc_var[[7]] <- ncvar_def("WoodyLitter", "kgC/m2/s", list(lon, lat, t), -999) #need to resolve standard woody litter flux
+ nc_var[[8]] <- ncvar_def("subsurface_litter_carbon_flux", "kgC/m2/s", list(lon, lat, t), -999) #was RootLitter
+ nc_var[[9]] <- ncvar_def("leaf_carbon_content", "kgC/m2", list(lon, lat, t), -999) #was LeafBiomass
+ nc_var[[10]] <- ncvar_def("wood_carbon_content", "kgC/m2", list(lon, lat, t), -999) #was WoodBiomass
+ nc_var[[11]] <- ncvar_def("root_carbon_content", "kgC/m2", list(lon, lat, t,rtsize), -999) #was RootBiomass
+ nc_var[[12]] <- ncvar_def("litter_carbon_content", "kgC/m2", list(lon, lat, t), -999) #was LitterBiomass
+ nc_var[[13]] <- ncvar_def("soil_carbon_content", "kgC/m2", list(lon, lat, t), -999) #was SoilC; SOM pool technically includes woody debris (can't be represented by our standard)
nc_var[[14]] <- mstmipvar("TotalResp", lat, lon, t, NA)
nc_var[[15]] <- mstmipvar("TotLivBiom", lat, lon, t, NA)
From c8ccb6b2cde3a395fc2c32000e6fea0d6880d0a7 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Wed, 12 Jul 2017 15:26:53 -0400
Subject: [PATCH 085/771] Fix categories
---
utils/data/standard_vars.csv | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/utils/data/standard_vars.csv b/utils/data/standard_vars.csv
index 7dbdcfb3790..d5f98969a27 100755
--- a/utils/data/standard_vars.csv
+++ b/utils/data/standard_vars.csv
@@ -28,8 +28,8 @@
"leaf_litter_carbon_flux","leaf_litter_carbon_flux","kg C m-2 s-1","Leaf Litter Carbon Flux","Carbon Fluxes","real","lon","lat","time",NA,"Carbon flux of leaf litter"
"WoodyLitter","wood_litter_carbon_flux","kg C m-2 s-1","Wood Litter Carbon Flux","Deprecated","real","lon","lat","time",NA,"DALEC output; haven't yet resolved standard woody litter flux"
"wood_debris_carbon_flux","wood_debris_carbon_flux","kg C m-2 s-1","Wood Debris Carbon Flux","Carbon Fluxes","real","lon","lat","time","wdsize","Total carbon flux of woody debris, including downed woody debris and standing deadwood; excludes litter; size class defined by wdsize dimension"
-"GWBI",NA,"kg C m-2 month-1","Gross Woody Biomass Increment","Carbon Pools","real","lon","lat","time",NA,"Variable most analogous to tree-ring-derived change in stem biomass (before mortality/CWD flux)"
-"CWDI",NA,"kg C m-2 month-1","Coarse Woody Debris Increment","Carbon Pools","real","lon","lat","time",NA,"Variable most analogous to flux of woody material material to the detrital pool resulting from mortality"
+"GWBI",NA,"kg C m-2 month-1","Gross Woody Biomass Increment","Carbon Fluxes","real","lon","lat","time",NA,"Variable most analogous to tree-ring-derived change in stem biomass (before mortality/CWD flux)"
+"CWDI",NA,"kg C m-2 month-1","Coarse Woody Debris Increment","Carbon Fluxes","real","lon","lat","time",NA,"Variable most analogous to flux of woody material material to the detrital pool resulting from mortality"
"CO2CAS",NA,"ppmv","CO2CAS","Carbon Fluxes","real","lon","lat","time",NA,"CO2 in canopy air space; ED2 output variable"
"CropYield",NA,"kg m-2","CropYield","Carbon Fluxes","real","lon","lat","time","pft","Crop yield; ED2 output variable"
"poolname",NA,"(-)","Name of each Carbon Pool","Deprecated","character","nchar","npool",NA,NA,"Name of each carbon pool (i.e., wood or Coarse Woody Debris)"
From 49a457e251415c4d78ee6ed4ec597380cc721d71 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Wed, 12 Jul 2017 15:34:24 -0400
Subject: [PATCH 086/771] Add fine/coarse root vars
---
utils/data/standard_vars.csv | 14 ++++++++++++++
1 file changed, 14 insertions(+)
diff --git a/utils/data/standard_vars.csv b/utils/data/standard_vars.csv
index d5f98969a27..3a650db0c03 100755
--- a/utils/data/standard_vars.csv
+++ b/utils/data/standard_vars.csv
@@ -40,6 +40,20 @@
"LAI",NA,"m2 m-2","Leaf Area Index","Carbon Pools","real","lon","lat","time",NA,"Area of leaves per area ground"
"leaf_carbon_content","leaf_carbon_content","kg C m-2","Leaf Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Leaf carbon content"
"root_carbon_content","root_carbon_content_of_size_class","kg C m-2","Root Carbon Content","Carbon Pools","real","lon","lat","time","rtsize","Root carbon content by size class"
+"fine_root_carbon_content","fine_root_carbon_content","kg C m-2","Fine Root Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of fine roots (2 mm and smaller); alternative to providing dimensions for root_carbon_content"
+"coarse_root_carbon_content","coarse_root_carbon_content","kg C m-2","Coarse Root Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of coarse roots (larger than 2 mm); alternative to providing dimensions for root_carbon_content"
+"WoodyLitter","wood_litter_carbon_flux","kg C m-2 s-1","Wood Litter Carbon Flux","Deprecated","real","lon","lat","time",NA,"DALEC output; haven't yet resolved standard woody litter flux"
+"wood_debris_carbon_flux","wood_debris_carbon_flux","kg C m-2 s-1","Wood Debris Carbon Flux","Carbon Fluxes","real","lon","lat","time","wdsize","Total carbon flux of woody debris, including downed woody debris and standing deadwood; excludes litter; size class defined by wdsize dimension"
+"GWBI",NA,"kg C m-2 month-1","Gross Woody Biomass Increment","Carbon Fluxes","real","lon","lat","time",NA,"Variable most analogous to tree-ring-derived change in stem biomass (before mortality/CWD flux)"
+"CWDI",NA,"kg C m-2 month-1","Coarse Woody Debris Increment","Carbon Fluxes","real","lon","lat","time",NA,"Variable most analogous to flux of woody material material to the detrital pool resulting from mortality"
+"CO2CAS",NA,"ppmv","CO2CAS","Carbon Fluxes","real","lon","lat","time",NA,"CO2 in canopy air space; ED2 output variable"
+"CropYield",NA,"kg m-2","CropYield","Carbon Fluxes","real","lon","lat","time","pft","Crop yield; ED2 output variable"
+"poolname",NA,"(-)","Name of each Carbon Pool","Deprecated","character","nchar","npool",NA,NA,"Name of each carbon pool (i.e., wood or Coarse Woody Debris)"
+"CarbPools",NA,"kg C m-2","Size of each carbon pool","Deprecated","real","lon","lat","npool","time","Total size of each carbon pool vertically integrated over the entire soil column"
+"AbvGrndWood",NA,"kg C m-2","Above ground woody biomass","Carbon Pools","real","lon","lat","time",NA,"Total above ground wood biomass"
+"TotLivBiom",NA,"kg C m-2","Total living biomass","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of the living biomass (leaves+roots+wood)"
+"AGB",NA,"kg C m-2","Total aboveground biomass","Carbon Pools","real","lon","lat","time",NA,"aboveground biomass"
+"wood_carbon_content","wood_carbon_content","kg C m-2","Wood Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Wood carbon content including above (AbvGrndWood) and below ground (coarse roots, shared with root_carbon_content)"
"wood_carbon_content","wood_carbon_content","kg C m-2","Wood Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Wood carbon content including above (AbvGrndWood) and below ground (coarse roots, shared with root_carbon_content)"
"TotSoilCarb",NA,"kg C m-2","Total Soil Carbon","Carbon Pools","real","lon","lat","time",NA,"Total soil and litter carbon content vertically integrated over the enire soil column"
"litter_carbon_content","litter_carbon_content","kg C m-2","Litter Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of litter pool, excluding coarse woody debris"
From 6b9983f522d3ad644be1422bfb16b6a59ccbb1a6 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Wed, 12 Jul 2017 15:38:24 -0400
Subject: [PATCH 087/771] Revert "Add fine/coarse root vars"
This reverts commit 49a457e251415c4d78ee6ed4ec597380cc721d71.
---
utils/data/standard_vars.csv | 14 --------------
1 file changed, 14 deletions(-)
diff --git a/utils/data/standard_vars.csv b/utils/data/standard_vars.csv
index 3a650db0c03..d5f98969a27 100755
--- a/utils/data/standard_vars.csv
+++ b/utils/data/standard_vars.csv
@@ -40,20 +40,6 @@
"LAI",NA,"m2 m-2","Leaf Area Index","Carbon Pools","real","lon","lat","time",NA,"Area of leaves per area ground"
"leaf_carbon_content","leaf_carbon_content","kg C m-2","Leaf Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Leaf carbon content"
"root_carbon_content","root_carbon_content_of_size_class","kg C m-2","Root Carbon Content","Carbon Pools","real","lon","lat","time","rtsize","Root carbon content by size class"
-"fine_root_carbon_content","fine_root_carbon_content","kg C m-2","Fine Root Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of fine roots (2 mm and smaller); alternative to providing dimensions for root_carbon_content"
-"coarse_root_carbon_content","coarse_root_carbon_content","kg C m-2","Coarse Root Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of coarse roots (larger than 2 mm); alternative to providing dimensions for root_carbon_content"
-"WoodyLitter","wood_litter_carbon_flux","kg C m-2 s-1","Wood Litter Carbon Flux","Deprecated","real","lon","lat","time",NA,"DALEC output; haven't yet resolved standard woody litter flux"
-"wood_debris_carbon_flux","wood_debris_carbon_flux","kg C m-2 s-1","Wood Debris Carbon Flux","Carbon Fluxes","real","lon","lat","time","wdsize","Total carbon flux of woody debris, including downed woody debris and standing deadwood; excludes litter; size class defined by wdsize dimension"
-"GWBI",NA,"kg C m-2 month-1","Gross Woody Biomass Increment","Carbon Fluxes","real","lon","lat","time",NA,"Variable most analogous to tree-ring-derived change in stem biomass (before mortality/CWD flux)"
-"CWDI",NA,"kg C m-2 month-1","Coarse Woody Debris Increment","Carbon Fluxes","real","lon","lat","time",NA,"Variable most analogous to flux of woody material material to the detrital pool resulting from mortality"
-"CO2CAS",NA,"ppmv","CO2CAS","Carbon Fluxes","real","lon","lat","time",NA,"CO2 in canopy air space; ED2 output variable"
-"CropYield",NA,"kg m-2","CropYield","Carbon Fluxes","real","lon","lat","time","pft","Crop yield; ED2 output variable"
-"poolname",NA,"(-)","Name of each Carbon Pool","Deprecated","character","nchar","npool",NA,NA,"Name of each carbon pool (i.e., wood or Coarse Woody Debris)"
-"CarbPools",NA,"kg C m-2","Size of each carbon pool","Deprecated","real","lon","lat","npool","time","Total size of each carbon pool vertically integrated over the entire soil column"
-"AbvGrndWood",NA,"kg C m-2","Above ground woody biomass","Carbon Pools","real","lon","lat","time",NA,"Total above ground wood biomass"
-"TotLivBiom",NA,"kg C m-2","Total living biomass","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of the living biomass (leaves+roots+wood)"
-"AGB",NA,"kg C m-2","Total aboveground biomass","Carbon Pools","real","lon","lat","time",NA,"aboveground biomass"
-"wood_carbon_content","wood_carbon_content","kg C m-2","Wood Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Wood carbon content including above (AbvGrndWood) and below ground (coarse roots, shared with root_carbon_content)"
"wood_carbon_content","wood_carbon_content","kg C m-2","Wood Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Wood carbon content including above (AbvGrndWood) and below ground (coarse roots, shared with root_carbon_content)"
"TotSoilCarb",NA,"kg C m-2","Total Soil Carbon","Carbon Pools","real","lon","lat","time",NA,"Total soil and litter carbon content vertically integrated over the enire soil column"
"litter_carbon_content","litter_carbon_content","kg C m-2","Litter Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of litter pool, excluding coarse woody debris"
From ffb141beedf3fe8e566e9f31ea37a15e6843d5d8 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Wed, 12 Jul 2017 15:41:04 -0400
Subject: [PATCH 088/771] Add fine/coarse root vars
---
utils/data/standard_vars.csv | 2 ++
1 file changed, 2 insertions(+)
diff --git a/utils/data/standard_vars.csv b/utils/data/standard_vars.csv
index d5f98969a27..2223697a88e 100755
--- a/utils/data/standard_vars.csv
+++ b/utils/data/standard_vars.csv
@@ -40,6 +40,8 @@
"LAI",NA,"m2 m-2","Leaf Area Index","Carbon Pools","real","lon","lat","time",NA,"Area of leaves per area ground"
"leaf_carbon_content","leaf_carbon_content","kg C m-2","Leaf Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Leaf carbon content"
"root_carbon_content","root_carbon_content_of_size_class","kg C m-2","Root Carbon Content","Carbon Pools","real","lon","lat","time","rtsize","Root carbon content by size class"
+"fine_root_carbon_content","fine_root_carbon_content","kg C m-2","Fine Root Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of fine roots (2 mm and smaller); alternative to providing dimensions for root_carbon_content"
+"coarse_root_carbon_content","coarse_root_carbon_content","kg C m-2","Coarse Root Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of coarse roots (larger than 2 mm); alternative to providing dimensions for root_carbon_content"
"wood_carbon_content","wood_carbon_content","kg C m-2","Wood Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Wood carbon content including above (AbvGrndWood) and below ground (coarse roots, shared with root_carbon_content)"
"TotSoilCarb",NA,"kg C m-2","Total Soil Carbon","Carbon Pools","real","lon","lat","time",NA,"Total soil and litter carbon content vertically integrated over the enire soil column"
"litter_carbon_content","litter_carbon_content","kg C m-2","Litter Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of litter pool, excluding coarse woody debris"
From 8c4ebc6787c01dc6dfb71ace35ca9562b12ab667 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Wed, 12 Jul 2017 16:08:09 -0400
Subject: [PATCH 089/771] Reorder fluxes and other edits
---
utils/data/standard_vars.csv | 11 ++++++-----
1 file changed, 6 insertions(+), 5 deletions(-)
diff --git a/utils/data/standard_vars.csv b/utils/data/standard_vars.csv
index 2223697a88e..969648f4414 100755
--- a/utils/data/standard_vars.csv
+++ b/utils/data/standard_vars.csv
@@ -15,34 +15,34 @@
"cal_date_beg",NA,"yr, mon, day, hr, min, sec","Calender date beginning averaging period","Deprecated","integer","ncal","time",NA,NA,"calender date beginning of time ave period: year, month, day, hour, minute, second for UTC time zone"
"cal_date_end",NA,"yr, mon, day, hr, min, sec","Calender date end averaging period","Deprecated","integer","ncal","time",NA,NA,"calender date end of time ave period: year, month, day, hour, minute, second for UTC time zone"
"GPP",NA,"kg C m-2 s-1","Gross Primary Productivity","Carbon Fluxes","real","lon","lat","time",NA,"Rate of photosynthesis (always positive)"
-"NPP",NA,"kg C m-2 s-1","Net Primary Productivity","Carbon Fluxes","real","lon","lat","time",NA,"Net Primary Productivity (NPP=GPP-AutoResp, positive into plants)"
+"NEE",NA,"kg C m-2 s-1","Net Ecosystem Exchange","Carbon Fluxes","real","lon","lat","time",NA,"Net Ecosystem Exchange (NEE=HeteroResp+AutoResp-GPP, positive into atmosphere)"
"TotalResp",NA,"kg C m-2 s-1","Total Respiration","Carbon Fluxes","real","lon","lat","time",NA,"Total respiration (TotalResp=AutoResp+heteroResp, always positive)"
"AutoResp",NA,"kg C m-2 s-1","Autotrophic Respiration","Carbon Fluxes","real","lon","lat","time",NA,"Autotrophic respiration rate (always positive)"
"HeteroResp",NA,"kg C m-2 s-1","Heterotrophic Respiration","Carbon Fluxes","real","lon","lat","time",NA,"Heterotrophic respiration rate (always positive)"
"DOC_flux",NA,"kg C m-2 s-1","Dissolved Organic Carbon flux","Carbon Fluxes","real","lon","lat","time",NA,"Loss of organic carbon dissolved in ground water or rivers (positive out of grid cell)"
"Fire_flux",NA,"kg C m-2 s-1","Fire emissions","Carbon Fluxes","real","lon","lat","time",NA,"Flux of carbon due to fires (always positive)"
-"NEE",NA,"kg C m-2 s-1","Net Ecosystem Exchange","Carbon Fluxes","real","lon","lat","time",NA,"Net Ecosystem Exchange (NEE=HeteroResp+AutoResp-GPP, positive into atmosphere)"
"litter_carbon_flux","litter_carbon_flux","kg C m-2 s-1","Litter Carbon Flux","Carbon Fluxes","real","lon","lat","time",NA,"Total carbon flux of litter, excluding coarse woody debris"
"surface_litter_carbon_flux","surface_litter_carbon_flux","kg C m-2 s-1","Surface Litter Carbon Flux","Carbon Fluxes","real","lon","lat","time",NA,"Total carbon flux of surface litter"
"subsurface_litter_carbon_flux","subsurface_litter_carbon_flux","kg C m-2 s-1","Subsurface Litter Carbon Flux","Carbon Fluxes","real","lon","lat","time","depth","Total carbon flux of subsurface litter"
"leaf_litter_carbon_flux","leaf_litter_carbon_flux","kg C m-2 s-1","Leaf Litter Carbon Flux","Carbon Fluxes","real","lon","lat","time",NA,"Carbon flux of leaf litter"
"WoodyLitter","wood_litter_carbon_flux","kg C m-2 s-1","Wood Litter Carbon Flux","Deprecated","real","lon","lat","time",NA,"DALEC output; haven't yet resolved standard woody litter flux"
"wood_debris_carbon_flux","wood_debris_carbon_flux","kg C m-2 s-1","Wood Debris Carbon Flux","Carbon Fluxes","real","lon","lat","time","wdsize","Total carbon flux of woody debris, including downed woody debris and standing deadwood; excludes litter; size class defined by wdsize dimension"
+"NPP",NA,"kg C m-2 s-1","Net Primary Productivity","Carbon Fluxes","real","lon","lat","time",NA,"Net Primary Productivity (NPP=GPP-AutoResp, positive into plants)"
"GWBI",NA,"kg C m-2 month-1","Gross Woody Biomass Increment","Carbon Fluxes","real","lon","lat","time",NA,"Variable most analogous to tree-ring-derived change in stem biomass (before mortality/CWD flux)"
"CWDI",NA,"kg C m-2 month-1","Coarse Woody Debris Increment","Carbon Fluxes","real","lon","lat","time",NA,"Variable most analogous to flux of woody material material to the detrital pool resulting from mortality"
-"CO2CAS",NA,"ppmv","CO2CAS","Carbon Fluxes","real","lon","lat","time",NA,"CO2 in canopy air space; ED2 output variable"
"CropYield",NA,"kg m-2","CropYield","Carbon Fluxes","real","lon","lat","time","pft","Crop yield; ED2 output variable"
"poolname",NA,"(-)","Name of each Carbon Pool","Deprecated","character","nchar","npool",NA,NA,"Name of each carbon pool (i.e., wood or Coarse Woody Debris)"
"CarbPools",NA,"kg C m-2","Size of each carbon pool","Deprecated","real","lon","lat","npool","time","Total size of each carbon pool vertically integrated over the entire soil column"
-"AbvGrndWood",NA,"kg C m-2","Above ground woody biomass","Carbon Pools","real","lon","lat","time",NA,"Total above ground wood biomass"
"TotLivBiom",NA,"kg C m-2","Total living biomass","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of the living biomass (leaves+roots+wood)"
"AGB",NA,"kg C m-2","Total aboveground biomass","Carbon Pools","real","lon","lat","time",NA,"aboveground biomass"
"LAI",NA,"m2 m-2","Leaf Area Index","Carbon Pools","real","lon","lat","time",NA,"Area of leaves per area ground"
"leaf_carbon_content","leaf_carbon_content","kg C m-2","Leaf Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Leaf carbon content"
-"root_carbon_content","root_carbon_content_of_size_class","kg C m-2","Root Carbon Content","Carbon Pools","real","lon","lat","time","rtsize","Root carbon content by size class"
+"root_carbon_content","root_carbon_content_of_size_class","kg C m-2","Root Carbon Content","Carbon Pools","real","lon","lat","time","rtsize","
+Root carbon content, optionally by size class; alternatively specify fine_ and coarse_root_carbon_content"
"fine_root_carbon_content","fine_root_carbon_content","kg C m-2","Fine Root Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of fine roots (2 mm and smaller); alternative to providing dimensions for root_carbon_content"
"coarse_root_carbon_content","coarse_root_carbon_content","kg C m-2","Coarse Root Carbon Content","Carbon Pools","real","lon","lat","time","depth","Carbon content of coarse roots (larger than 2 mm); alternative to providing dimensions for root_carbon_content"
"wood_carbon_content","wood_carbon_content","kg C m-2","Wood Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Wood carbon content including above (AbvGrndWood) and below ground (coarse roots, shared with root_carbon_content)"
+"AbvGrndWood",NA,"kg C m-2","Above ground woody biomass","Carbon Pools","real","lon","lat","time",NA,"Total above ground wood biomass"
"TotSoilCarb",NA,"kg C m-2","Total Soil Carbon","Carbon Pools","real","lon","lat","time",NA,"Total soil and litter carbon content vertically integrated over the enire soil column"
"litter_carbon_content","litter_carbon_content","kg C m-2","Litter Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Total carbon content of litter pool, excluding coarse woody debris"
"surface_litter_carbon_content","surface_litter_carbon_content","kg C m-2","Surface Litter Carbon Content","Carbon Pools","real","lon","lat","time",NA,"Carbon content of surface litter pool"
@@ -86,6 +86,7 @@
"SWE",NA,"kg m-2","Snow Water Equivalent","Physical Variables","real","lon","lat","time",NA,"Total water mass of snow pack, including ice and liquid water"
"SnowDen",NA,"kg m-3","Bulk Snow Density","Physical Variables","real","lon","lat","time",NA,"Overall bulk density of the snow pack, including ice and liquid water"
"SnowDepth",NA,"m","Total snow depth","Physical Variables","real","lon","lat","time",NA,"Total snow depth"
+"CO2CAS",NA,"ppmv","CO2CAS","Physical Variables","real","lon","lat","time",NA,"CO2 in canopy air space; ED2 output variable"
"CO2air",NA,"micromol mol-1","Near surface CO2 concentration","Driver","real","lon","lat","time",NA,"Near surface dry air CO2 mole fraction"
"LWdown","surface_downwelling_longwave_flux_in_air","W/m2","Surface incident longwave radiation","Driver","real","lon","lat","time",NA,"Surface incident longwave radiation"
"Psurf","air_pressure","Pa","Surface pressure","Driver","real","lon","lat","time",NA,"Surface pressure"
From cefe3b46b6ac4e9ff403fcf1b3caa62ee0e9bf28 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Wed, 12 Jul 2017 16:44:03 -0400
Subject: [PATCH 090/771] Change mstmip calls to to_ncvar
---
models/dalec/R/model2netcdf.DALEC.R | 38 +++++++++++++++--------------
1 file changed, 20 insertions(+), 18 deletions(-)
diff --git a/models/dalec/R/model2netcdf.DALEC.R b/models/dalec/R/model2netcdf.DALEC.R
index 30bf76dd896..56fc78735f5 100644
--- a/models/dalec/R/model2netcdf.DALEC.R
+++ b/models/dalec/R/model2netcdf.DALEC.R
@@ -19,7 +19,7 @@
##' @param start_date Start time of the simulation
##' @param end_date End time of the simulation
##' @importFrom ncdf4 ncvar_def ncdim_def
-##' @importFrom PEcAn.utils mstmipvar
+##' @importFrom PEcAn.utils mstmipvar to_ncvar to_ncdim
##' @export
##' @author Shawn Serbin, Michael Dietze
model2netcdf.DALEC <- function(outdir, sitelat, sitelon, start_date, end_date) {
@@ -77,7 +77,8 @@ model2netcdf.DALEC <- function(outdir, sitelat, sitelon, start_date, end_date) {
calendar = "standard", unlim = TRUE)
lat <- ncdim_def("lat", "degrees_north", vals = as.numeric(sitelat), longname = "station_latitude")
lon <- ncdim_def("lon", "degrees_east", vals = as.numeric(sitelon), longname = "station_longitude")
-
+
+ dims <- list(time = t, lon = lon, lat = lat)
## ***** Need to dynamically update the UTC offset here *****
for (i in seq_along(output)) {
@@ -85,24 +86,25 @@ model2netcdf.DALEC <- function(outdir, sitelat, sitelon, start_date, end_date) {
output[[i]] <- rep(-999, length(t$vals))
}
+
nc_var <- list()
- nc_var[[1]] <- mstmipvar("AutoResp", lat, lon, t, NA)
- nc_var[[2]] <- mstmipvar("HeteroResp", lat, lon, t, NA)
- nc_var[[3]] <- mstmipvar("GPP", lat, lon, t, NA)
- nc_var[[4]] <- mstmipvar("NEE", lat, lon, t, NA)
- nc_var[[5]] <- mstmipvar("NPP", lat, lon, t, NA)
- nc_var[[6]] <- ncvar_def("leaf_litter_carbon_flux", "kgC/m2/s", list(lon, lat, t), -999) #was LeafLitter
- nc_var[[7]] <- ncvar_def("WoodyLitter", "kgC/m2/s", list(lon, lat, t), -999) #need to resolve standard woody litter flux
- nc_var[[8]] <- ncvar_def("subsurface_litter_carbon_flux", "kgC/m2/s", list(lon, lat, t), -999) #was RootLitter
- nc_var[[9]] <- ncvar_def("leaf_carbon_content", "kgC/m2", list(lon, lat, t), -999) #was LeafBiomass
- nc_var[[10]] <- ncvar_def("wood_carbon_content", "kgC/m2", list(lon, lat, t), -999) #was WoodBiomass
- nc_var[[11]] <- ncvar_def("root_carbon_content", "kgC/m2", list(lon, lat, t,rtsize), -999) #was RootBiomass
- nc_var[[12]] <- ncvar_def("litter_carbon_content", "kgC/m2", list(lon, lat, t), -999) #was LitterBiomass
- nc_var[[13]] <- ncvar_def("soil_carbon_content", "kgC/m2", list(lon, lat, t), -999) #was SoilC; SOM pool technically includes woody debris (can't be represented by our standard)
+ nc_var[[1]] <- to_ncvar("AutoResp", dims)
+ nc_var[[2]] <- to_ncvar("HeteroResp", dims)
+ nc_var[[3]] <- to_ncvar("GPP", dims)
+ nc_var[[4]] <- to_ncvar("NEE", dims)
+ nc_var[[5]] <- to_ncvar("NPP", dims)
+ nc_var[[6]] <- to_ncvar("leaf_litter_carbon_flux", dims) #was LeafLitter
+ nc_var[[7]] <- to_ncvar("WoodyLitter", dims) #need to resolve standard woody litter flux
+ nc_var[[8]] <- to_ncvar("subsurface_litter_carbon_flux", dims) #was RootLitter
+ nc_var[[9]] <- to_ncvar("leaf_carbon_content", dims) #was LeafBiomass
+ nc_var[[10]] <- to_ncvar("wood_carbon_content", dims) #was WoodBiomass
+ nc_var[[11]] <- to_ncvar("root_carbon_content", dims) #was RootBiomass
+ nc_var[[12]] <- to_ncvar("litter_carbon_content", dims) #was LitterBiomass
+ nc_var[[13]] <- to_ncvar("soil_carbon_content", dims) #was SoilC; SOM pool technically includes woody debris (can't be represented by our standard)
- nc_var[[14]] <- mstmipvar("TotalResp", lat, lon, t, NA)
- nc_var[[15]] <- mstmipvar("TotLivBiom", lat, lon, t, NA)
- nc_var[[16]] <- mstmipvar("TotSoilCarb", lat, lon, t, NA)
+ nc_var[[14]] <- to_ncvar("TotalResp", dims)
+ nc_var[[15]] <- to_ncvar("TotLivBiom", dims)
+ nc_var[[16]] <- to_ncvar("TotSoilCarb", dims)
# ******************** Declar netCDF variables ********************#
From d9a8e083c6b06915c971e848a4e34f696bc64cb7 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 11:29:59 -0400
Subject: [PATCH 091/771] Root partitioning and IC readin redesign
---
models/dalec/R/write.configs.dalec.R | 59 +++++++++++++++++++++++++---
1 file changed, 53 insertions(+), 6 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index b4d0f51e993..621f21e8cf2 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -115,6 +115,9 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
### INITIAL CONDITIONS
+ is.loaded <- function(var){
+ return(all(!is.na(var) && is.numeric(var))) #check that ncvar was present (numeric) and a value was given it (not NA)
+ }
default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"))
IC.param <- data.frame()
@@ -123,16 +126,60 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
IC.nc <- try(ncdf4::nc_open(IC.path))
if(class(IC.nc) != "try-error"){
- # cf0 initial canopy foliar carbon (g/m2)
+ #check/load biomass netcdf variables
+ totBiom <- try(ncdf4::ncvar_get(IC.nc,"TotLivBiom"),silent = TRUE)
leaf <- try(ncdf4::ncvar_get(IC.nc,"leaf_carbon_content"),silent = TRUE)
- if (!is.na(leaf) && is.numeric(leaf)) {
+ AbvGrndWood <- try(ncdf4::ncvar_get(IC.nc,"AbvGrndWood"),silent = TRUE)
+ roots <- try(ncdf4::ncvar_get(IC.nc,"root_carbon_content"),silent = TRUE)
+ fine.roots <- try(ncdf4::ncvar_get(IC.nc,"fine_root_carbon_content"),silent = TRUE)
+ coarse.roots <- try(ncdf4::ncvar_get(IC.nc,"coarse_root_carbon_content"),silent = TRUE)
+
+
+ #check if total roots are partitioned
+ if(is.loaded(roots) && !is.loaded(fine.roots) || !is.loaded(coarse.roots)){
+ if("rtsize" %in% names(IC.nc$dim)){
+ rtsize = IC.nc$dim$rtsize$vals
+ if(length(rtsize) > 1 && length(rtsize) == length(roots)){
+ threshold = .002
+ epsilon <- .0005
+ rtsize_thresh_idx = which.min(sapply(rtsize-threshold,abs))
+ rtsize_thresh = rtsize[rtsize_thresh_idx]
+ if(abs(rtsize_thresh-threshold) > epsilon){
+ PEcAn.utils::logger.error(paste("Closest rtsize to fine root threshold of", threshold, "m (", rtsize_thresh,
+ ") is greater than", epsilon,
+ "m off; fine roots can't be partitioned. Please improve rtsize dimensions or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf."))
+ }
+ else{
+ fine.roots.temp <- sum(roots[1:rtsize_thresh_idx-1])
+ coarse.roots.temp <- sum(roots) - fine.roots
+ if(fine.roots.temp > 0 && coarse.roots.temp > 0){
+ fine.roots <- fine.roots.temp
+ coarse.roots <- coarse.roots.temp
+ } else{
+ PEcAn.utils::logger.error("Roots could not be partitioned (fine or coarse is less than 0); please provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
+ }
+ }
+ } else {
+ PEcAn.utils::logger.error("Not enough levels of rtsize to partition roots; please provide finer resolution for root_carbon_content or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
+ }
+ } else{
+ PEcAn.utils::logger.error("Please provide rtsize dimension with root_carbon_content to allow partitioning or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
+ }
+ }
+
+
+ # cf0 initial canopy foliar carbon (g/m2)
+ if (is.loaded(leaf)) {
param[["cf0"]] <- leaf
}
+ else if(is.loaded(totBiom) && is.loaded(AbvGrndWood) &&
+ is.loaded(fine.roots) && is.loaded(coarse.roots)){
+ leaf <- totBiom - AbvGrndWood - fine.roots - coarse.roots
+ }
+ }
# cw0 initial pool of woody carbon (g/m2)
- AbvGrndWood <- try(ncdf4::ncvar_get(IC.nc,"AbvGrndWood"),silent = TRUE)
- if (!is.na(AbvGrndWood) && is.numeric(AbvGrndWood)) {
- roots <- try(ncdf4::ncvar_get(IC.nc,"root_carbon_content"),silent = TRUE)
- if(!is.na(roots) && is.numeric(roots)){
+ if (is.loaded(AbvGrndWood)) {
+ if(is.loaded(fine.roots) && is.loaded(coarse.roots)){
#wood <- partitioned coarse roots + abvgroundwood
}
else{
From 8d0e35e7ba167a62e857762c62c28909eedf608c Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 12:43:12 -0400
Subject: [PATCH 092/771] First complete draft of IC readin
---
models/dalec/R/write.configs.dalec.R | 119 ++++++++++++++++-----------
1 file changed, 73 insertions(+), 46 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index 621f21e8cf2..05cfe1d1659 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -115,19 +115,19 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
### INITIAL CONDITIONS
- is.loaded <- function(var){
- return(all(!is.na(var) && is.numeric(var))) #check that ncvar was present (numeric) and a value was given it (not NA)
+ is.valid <- function(var){
+ return(all(!is.na(var) && is.numeric(var) && var >= 0)) #check that ncvar was present (numeric) and a valid value was given it (not NA or negative)
}
default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"))
IC.param <- data.frame()
- if (!is.null(settings$run$inputs$poolinitcond$path)) {
+ if (!is.null(settings$run$inputs$poolinitcond$path)) {
IC.path <- settings$run$inputs$poolinitcond$path
IC.nc <- try(ncdf4::nc_open(IC.path))
if(class(IC.nc) != "try-error"){
#check/load biomass netcdf variables
- totBiom <- try(ncdf4::ncvar_get(IC.nc,"TotLivBiom"),silent = TRUE)
+ TotLivBiom <- try(ncdf4::ncvar_get(IC.nc,"TotLivBiom"),silent = TRUE)
leaf <- try(ncdf4::ncvar_get(IC.nc,"leaf_carbon_content"),silent = TRUE)
AbvGrndWood <- try(ncdf4::ncvar_get(IC.nc,"AbvGrndWood"),silent = TRUE)
roots <- try(ncdf4::ncvar_get(IC.nc,"root_carbon_content"),silent = TRUE)
@@ -135,15 +135,16 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
coarse.roots <- try(ncdf4::ncvar_get(IC.nc,"coarse_root_carbon_content"),silent = TRUE)
- #check if total roots are partitioned
- if(is.loaded(roots) && !is.loaded(fine.roots) || !is.loaded(coarse.roots)){
+ #check if total roots are partitioned (pull out as a function for readability)
+ #note: if fine and coarse roots are both loaded, they will override root_carbon_content
+ if(is.valid(roots) && (!is.valid(fine.roots) || !is.valid(coarse.roots)){
if("rtsize" %in% names(IC.nc$dim)){
- rtsize = IC.nc$dim$rtsize$vals
+ rtsize <- IC.nc$dim$rtsize$vals
if(length(rtsize) > 1 && length(rtsize) == length(roots)){
- threshold = .002
+ threshold <- .002
epsilon <- .0005
- rtsize_thresh_idx = which.min(sapply(rtsize-threshold,abs))
- rtsize_thresh = rtsize[rtsize_thresh_idx]
+ rtsize_thresh_idx <- which.min(sapply(rtsize-threshold,abs))
+ rtsize_thresh <- rtsize[rtsize_thresh_idx]
if(abs(rtsize_thresh-threshold) > epsilon){
PEcAn.utils::logger.error(paste("Closest rtsize to fine root threshold of", threshold, "m (", rtsize_thresh,
") is greater than", epsilon,
@@ -152,9 +153,10 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
else{
fine.roots.temp <- sum(roots[1:rtsize_thresh_idx-1])
coarse.roots.temp <- sum(roots) - fine.roots
- if(fine.roots.temp > 0 && coarse.roots.temp > 0){
+ if(fine.roots.temp >= 0 && coarse.roots.temp >= 0){
fine.roots <- fine.roots.temp
coarse.roots <- coarse.roots.temp
+ PEcAn.utils::logger.info("Using partitioned root values", fine.roots, "for fine and", coarse.roots, "for coarse.")
} else{
PEcAn.utils::logger.error("Roots could not be partitioned (fine or coarse is less than 0); please provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
}
@@ -167,51 +169,76 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
}
-
+ ###write initial conditions from netcdf
# cf0 initial canopy foliar carbon (g/m2)
- if (is.loaded(leaf)) {
- param[["cf0"]] <- leaf
- }
- else if(is.loaded(totBiom) && is.loaded(AbvGrndWood) &&
- is.loaded(fine.roots) && is.loaded(coarse.roots)){
- leaf <- totBiom - AbvGrndWood - fine.roots - coarse.roots
- }
+ if (is.valid(leaf)) {
+ param[["cf0"]] <- leaf * 1000 #standard kg C m-2
+ } else if(is.valid(TotLivBiom) && is.valid(AbvGrndWood) &&
+ is.valid(fine.roots) && is.valid(coarse.roots)){
+ leaf <- (TotLivBiom - AbvGrndWood - fine.roots - coarse.roots) * 1000 #standard kg C m-2
+ if(leaf >= 0){
+ param[["cf0"]] <- leaf
+ } else{
+ PEcAn.utils::logger.error("TotLivBiom is less than sum of AbvGrndWood and roots; using default for leaf biomass")
+ }
}
+
# cw0 initial pool of woody carbon (g/m2)
- if (is.loaded(AbvGrndWood)) {
- if(is.loaded(fine.roots) && is.loaded(coarse.roots)){
- #wood <- partitioned coarse roots + abvgroundwood
+ if (is.valid(AbvGrndWood)) {
+ if(is.valid(coarse.roots)){
+ param[["cw0"]] <- (AbvGrndWood + coarse.roots) * 1000 #standard kg C m-2
}
- else{
- #wood <- (roots-default.fine) + abvgroundwood
+ else if (is.valid(TotLivBiom) && is.valid(leaf) && is.valid(fine.roots)){
+ wood <- (TotLivBiom - leaf - fine.roots) * 1000 #standard kg C m-2
+ if (wood >= 0){
+ param[["cw0"]] <- wood
+ } else{
+ PEcAn.utils::logger.error("TotLivBiom is less than sum of leaf and fine roots; using default for woody biomass")
+ }
+ } else{
+ PEcAn.utils::logger.error("write.configs.DALEC IC can't calculate total woody biomass with only AbvGrndWood; using defaults. Please provide coarse_root_carbon_content OR root_carbon_content with rtsize dimensions OR leaf_carbon_content, fine_root_carbon_content, and TotLivBiom in netcdf")
+ }
+ } else if (is.valid(TotLivBiom) && is.valid(leaf) && is.valid(fine.roots)){
+ wood <- (TotLivBiom - leaf - fine.roots) * 1000 #standard kg C m-2
+ if (wood >= 0){
+ param[["cw0"]] <- wood
+ }else{
+ PEcAn.utils::logger.error("TotLivBiom is less than sum of leaf and fine roots; using default for woody biomass")
}
- param[["cw0"]] <- wood
+ } else{
+ #use default wood
}
+
# cr0 initial pool of fine root carbon (g/m2)
- roots <- try(ncdf4::ncvar_get(IC.nc,"root_carbon_content"),silent = TRUE)
- if (!is.na(roots) && is.numeric(roots)) {
- #partition fine roots
- param[["cr0"]] <- roots
- }
+ if (is.valid(fine.roots)) {
+ param[["cr0"]] <- fine.roots * 1000 #standard kg C m-2
+ }
+
# cl0 initial pool of litter carbon (g/m2)
- litter <- try(ncdf4::ncvar_get(IC.nc,"litter_carbon_content"),silent = TRUE)
- if (!is.na(litter) && is.numeric(litter)) {
- param[["cl0"]] <- litter
- }
+ litter <- try(ncdf4::ncvar_get(IC.nc,"litter_carbon_content"),silent = TRUE)
+ if (is.valid(litter)) {
+ param[["cl0"]] <- litter * 1000 #standard kg C m-2
+ }
+
# cs0 initial pool of soil organic matter and woody debris carbon (g/m2)
- soil <- try(ncdf4::ncvar_get(IC.nc,"soil_organic_carbon_content"),silent = TRUE)
- if(!is.numeric(soil)){
- soil <- try(ncdf4::ncvar_get(IC.nc,"soil_carbon_content"),silent = TRUE)
- if(is.numeric(soil)){
- wood <- try(ncdf4::ncvar_get(IC.nc,"wood_debris_carbon_content"),silent = TRUE)
- if(is.numeric(wood)){
- soil_and_wood <- soil + sum(wood)
- param[["cs0"]] <- soil_and_wood
- }
- }
+ soil <- try(ncdf4::ncvar_get(IC.nc,"soil_organic_carbon_content"),silent = TRUE)
+ wood.debris <- try(ncdf4::ncvar_get(IC.nc,"wood_debris_carbon_content"),silent = TRUE)
+ if(is.valid(soil) && is.valid(wood.debris)){
+ param[["cs0"]] <- (soil + sum(wood.debris)) * 1000 #standard kg C m-2
+ } else if(!is.valid(soil) && is.valid(wood.debris)){
+ soil <- try(ncdf4::ncvar_get(IC.nc,"soil_carbon_content"),silent = TRUE)
+ if(is.valid(soil)){
+ param[["cs0"]] <- (soil + sum(wood.debris)) * 1000 #standard kg C m-2
+ } else{
+ PEcAn.utils::logger.error("write.configs.DALEC IC can't calculate soil matter pool without soil carbon; using default. Please provide soil_organic_carbon_content in netcdf.")
}
- }
- else{
+ } else if(is.valid(soil) && !is.valid(wood.debris)){
+ PEcAn.utils::logger.error("write.configs.DALEC IC can't calculate soil matter pool without wood debris; using default. Please provide wood_debris_carbon_content in netcdf.")
+ } else{
+ #use default soil pool
+ }
+
+ } else{
PEcAn.utils::logger.error("Bad initial conditions filepath; kept defaults")
}
}
From 7001fb3ae56b1291219b5ce077beb90780a7b7ba Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 12:55:18 -0400
Subject: [PATCH 093/771] Change some names
---
models/dalec/R/write.configs.dalec.R | 26 ++++++++++++++++----------
1 file changed, 16 insertions(+), 10 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index 05cfe1d1659..b2a5dbea03e 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -120,7 +120,7 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"))
- IC.param <- data.frame()
+ IC.params <- data.frame()
if (!is.null(settings$run$inputs$poolinitcond$path)) {
IC.path <- settings$run$inputs$poolinitcond$path
IC.nc <- try(ncdf4::nc_open(IC.path))
@@ -172,12 +172,12 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
###write initial conditions from netcdf
# cf0 initial canopy foliar carbon (g/m2)
if (is.valid(leaf)) {
- param[["cf0"]] <- leaf * 1000 #standard kg C m-2
+ IC.params[["cf0"]] <- leaf * 1000 #standard kg C m-2
} else if(is.valid(TotLivBiom) && is.valid(AbvGrndWood) &&
is.valid(fine.roots) && is.valid(coarse.roots)){
leaf <- (TotLivBiom - AbvGrndWood - fine.roots - coarse.roots) * 1000 #standard kg C m-2
if(leaf >= 0){
- param[["cf0"]] <- leaf
+ IC.params[["cf0"]] <- leaf
} else{
PEcAn.utils::logger.error("TotLivBiom is less than sum of AbvGrndWood and roots; using default for leaf biomass")
}
@@ -186,12 +186,12 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
# cw0 initial pool of woody carbon (g/m2)
if (is.valid(AbvGrndWood)) {
if(is.valid(coarse.roots)){
- param[["cw0"]] <- (AbvGrndWood + coarse.roots) * 1000 #standard kg C m-2
+ IC.params[["cw0"]] <- (AbvGrndWood + coarse.roots) * 1000 #standard kg C m-2
}
else if (is.valid(TotLivBiom) && is.valid(leaf) && is.valid(fine.roots)){
wood <- (TotLivBiom - leaf - fine.roots) * 1000 #standard kg C m-2
if (wood >= 0){
- param[["cw0"]] <- wood
+ IC.params[["cw0"]] <- wood
} else{
PEcAn.utils::logger.error("TotLivBiom is less than sum of leaf and fine roots; using default for woody biomass")
}
@@ -201,7 +201,7 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
} else if (is.valid(TotLivBiom) && is.valid(leaf) && is.valid(fine.roots)){
wood <- (TotLivBiom - leaf - fine.roots) * 1000 #standard kg C m-2
if (wood >= 0){
- param[["cw0"]] <- wood
+ IC.params[["cw0"]] <- wood
}else{
PEcAn.utils::logger.error("TotLivBiom is less than sum of leaf and fine roots; using default for woody biomass")
}
@@ -211,24 +211,24 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
# cr0 initial pool of fine root carbon (g/m2)
if (is.valid(fine.roots)) {
- param[["cr0"]] <- fine.roots * 1000 #standard kg C m-2
+ IC.params[["cr0"]] <- fine.roots * 1000 #standard kg C m-2
}
# cl0 initial pool of litter carbon (g/m2)
litter <- try(ncdf4::ncvar_get(IC.nc,"litter_carbon_content"),silent = TRUE)
if (is.valid(litter)) {
- param[["cl0"]] <- litter * 1000 #standard kg C m-2
+ IC.params[["cl0"]] <- litter * 1000 #standard kg C m-2
}
# cs0 initial pool of soil organic matter and woody debris carbon (g/m2)
soil <- try(ncdf4::ncvar_get(IC.nc,"soil_organic_carbon_content"),silent = TRUE)
wood.debris <- try(ncdf4::ncvar_get(IC.nc,"wood_debris_carbon_content"),silent = TRUE)
if(is.valid(soil) && is.valid(wood.debris)){
- param[["cs0"]] <- (soil + sum(wood.debris)) * 1000 #standard kg C m-2
+ IC.params[["cs0"]] <- (soil + sum(wood.debris)) * 1000 #standard kg C m-2
} else if(!is.valid(soil) && is.valid(wood.debris)){
soil <- try(ncdf4::ncvar_get(IC.nc,"soil_carbon_content"),silent = TRUE)
if(is.valid(soil)){
- param[["cs0"]] <- (soil + sum(wood.debris)) * 1000 #standard kg C m-2
+ IC.params[["cs0"]] <- (soil + sum(wood.debris)) * 1000 #standard kg C m-2
} else{
PEcAn.utils::logger.error("write.configs.DALEC IC can't calculate soil matter pool without soil carbon; using default. Please provide soil_organic_carbon_content in netcdf.")
}
@@ -238,6 +238,12 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
#use default soil pool
}
+ ###Write to command line file
+ PEcAn.utils::logger.info(names(paste("Adding IC tags to file:", IC.params))
+ for (i in seq_along(IC.params)) {
+ cmdFlags <- paste0(cmdFlags, " -", names(IC.params)[i], " ", IC.params[[i]])
+ }
+
} else{
PEcAn.utils::logger.error("Bad initial conditions filepath; kept defaults")
}
From a0ee25483196ee010a852d81e1c3bf0aa6b1f4af Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 13:00:27 -0400
Subject: [PATCH 094/771] Have partitioned roots override fine/coarse
---
models/dalec/R/write.configs.dalec.R | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index b2a5dbea03e..7c153c23635 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -136,8 +136,8 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
#check if total roots are partitioned (pull out as a function for readability)
- #note: if fine and coarse roots are both loaded, they will override root_carbon_content
- if(is.valid(roots) && (!is.valid(fine.roots) || !is.valid(coarse.roots)){
+ #note: if roots are patritionable, they will override fine_ and/or coarse_root_carbon_content if loaded
+ if(is.valid(roots)){
if("rtsize" %in% names(IC.nc$dim)){
rtsize <- IC.nc$dim$rtsize$vals
if(length(rtsize) > 1 && length(rtsize) == length(roots)){
From 8aaa7d0eb78a9519d401e787788fad7c682168bb Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 13:38:26 -0400
Subject: [PATCH 095/771] Make partition_roots a function
---
models/dalec/R/write.configs.dalec.R | 86 +++++++++++++++-------------
1 file changed, 46 insertions(+), 40 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index 7c153c23635..0838d006605 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -71,17 +71,40 @@ convert.samples.DALEC <- function(trait.samples) {
names(trait.samples)[which(names(trait.samples) == "som_respiration_rate")] <- "t9"
}
- ### INITIAL CONDITIONS
-
- # cf0 initial canopy foliar carbon (g/m2)
- # cw0 initial pool of woody carbon (g/m2)
- # cr0 initial pool of fine root carbon (g/m2)
- # cl0 initial pool of litter carbon (g/m2)
- # cs0 initial pool of soil organic matter and woody debris carbon (g/m2)
-
return(trait.samples)
} # convert.samples.DALEC
+####function to split root_carbon_content into fine and coarse roots by rtsize dimension at the .002 m threshold
+partition_roots <- function(roots, rtsize){
+ if(length(rtsize) > 1 && length(rtsize) == length(roots)){
+ threshold <- .002
+ epsilon <- .0005
+ rtsize_thresh_idx <- which.min(sapply(rtsize-threshold,abs))
+ rtsize_thresh <- rtsize[rtsize_thresh_idx]
+ if(abs(rtsize_thresh-threshold) > epsilon){
+ PEcAn.utils::logger.error(paste("Closest rtsize to fine root threshold of", threshold, "m (", rtsize_thresh,
+ ") is greater than", epsilon,
+ "m off; fine roots can't be partitioned. Please improve rtsize dimensions or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf."))
+ return(NULL)
+ } else{
+ fine.roots.temp <- sum(roots[1:rtsize_thresh_idx-1])
+ coarse.roots.temp <- sum(roots) - fine.roots.temp
+ if(fine.roots.temp >= 0 && coarse.roots.temp >= 0){
+ fine.roots <- fine.roots.temp
+ coarse.roots <- coarse.roots.temp
+ PEcAn.utils::logger.info("Using partitioned root values", fine.roots, "for fine and", coarse.roots, "for coarse.")
+ return(list(fine.roots = fine.roots, coarse.roots = coarse.roots))
+ } else{
+ PEcAn.utils::logger.error("Roots could not be partitioned (fine or coarse is less than 0); please provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
+ return(NULL)
+ }
+ }
+ } else {
+ PEcAn.utils::logger.error("Not enough levels of rtsize associated with root_carbon_content to partition roots; please provide finer resolution for root_carbon_content or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
+ return(NULL)
+ }
+}
+
#--------------------------------------------------------------------------------------------------#
##' Writes a configuration files for your model
#--------------------------------------------------------------------------------------------------#
@@ -115,11 +138,13 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
### INITIAL CONDITIONS
+
+ #function to check that ncvar was present (numeric) and a valid value was given it (not NA or negative)
is.valid <- function(var){
- return(all(!is.na(var) && is.numeric(var) && var >= 0)) #check that ncvar was present (numeric) and a valid value was given it (not NA or negative)
+ return(all(is.numeric(var) && !is.na(var) && var >= 0))
}
- default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"))
+ #default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"))
IC.params <- data.frame()
if (!is.null(settings$run$inputs$poolinitcond$path)) {
IC.path <- settings$run$inputs$poolinitcond$path
@@ -140,36 +165,21 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
if(is.valid(roots)){
if("rtsize" %in% names(IC.nc$dim)){
rtsize <- IC.nc$dim$rtsize$vals
- if(length(rtsize) > 1 && length(rtsize) == length(roots)){
- threshold <- .002
- epsilon <- .0005
- rtsize_thresh_idx <- which.min(sapply(rtsize-threshold,abs))
- rtsize_thresh <- rtsize[rtsize_thresh_idx]
- if(abs(rtsize_thresh-threshold) > epsilon){
- PEcAn.utils::logger.error(paste("Closest rtsize to fine root threshold of", threshold, "m (", rtsize_thresh,
- ") is greater than", epsilon,
- "m off; fine roots can't be partitioned. Please improve rtsize dimensions or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf."))
- }
- else{
- fine.roots.temp <- sum(roots[1:rtsize_thresh_idx-1])
- coarse.roots.temp <- sum(roots) - fine.roots
- if(fine.roots.temp >= 0 && coarse.roots.temp >= 0){
- fine.roots <- fine.roots.temp
- coarse.roots <- coarse.roots.temp
- PEcAn.utils::logger.info("Using partitioned root values", fine.roots, "for fine and", coarse.roots, "for coarse.")
- } else{
- PEcAn.utils::logger.error("Roots could not be partitioned (fine or coarse is less than 0); please provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
- }
- }
- } else {
- PEcAn.utils::logger.error("Not enough levels of rtsize to partition roots; please provide finer resolution for root_carbon_content or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
+ part_roots <- partition_roots(roots, rtsize)
+ if(!is.null(part_roots)){
+ fine.roots <- part_roots$fine.roots
+ coarse.roots <- part_roots$coarse.roots
+ } else{
+ #couldn't partition roots; error messages handled by function
}
} else{
PEcAn.utils::logger.error("Please provide rtsize dimension with root_carbon_content to allow partitioning or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
}
+ } else{
+ #proceed without error message
}
- ###write initial conditions from netcdf
+ ###write initial conditions from netcdf (wherever valid input isn't available, DALEC default remains)
# cf0 initial canopy foliar carbon (g/m2)
if (is.valid(leaf)) {
IC.params[["cf0"]] <- leaf * 1000 #standard kg C m-2
@@ -205,9 +215,7 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}else{
PEcAn.utils::logger.error("TotLivBiom is less than sum of leaf and fine roots; using default for woody biomass")
}
- } else{
- #use default wood
- }
+ }
# cr0 initial pool of fine root carbon (g/m2)
if (is.valid(fine.roots)) {
@@ -234,9 +242,7 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
} else if(is.valid(soil) && !is.valid(wood.debris)){
PEcAn.utils::logger.error("write.configs.DALEC IC can't calculate soil matter pool without wood debris; using default. Please provide wood_debris_carbon_content in netcdf.")
- } else{
- #use default soil pool
- }
+ }
###Write to command line file
PEcAn.utils::logger.info(names(paste("Adding IC tags to file:", IC.params))
From d9f2b19e7d6d73c51652556c9776aceebf19be93 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 14:24:30 -0400
Subject: [PATCH 096/771] Add some notes
---
models/dalec/R/write.configs.dalec.R | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index 0838d006605..82eef085852 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -159,8 +159,11 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
fine.roots <- try(ncdf4::ncvar_get(IC.nc,"fine_root_carbon_content"),silent = TRUE)
coarse.roots <- try(ncdf4::ncvar_get(IC.nc,"coarse_root_carbon_content"),silent = TRUE)
+ if(!all(sapply(c(TotLivBiom,leaf,AbvGrndWood,roots,fine.roots,coarse.roots),is.numeric))){
+ PEcAn.utils::logger.info("Any missing vars will be calculated from those provided or replaced by DALEC's defaults")
+ }
- #check if total roots are partitioned (pull out as a function for readability)
+ #check if total roots are partitionable
#note: if roots are patritionable, they will override fine_ and/or coarse_root_carbon_content if loaded
if(is.valid(roots)){
if("rtsize" %in% names(IC.nc$dim)){
@@ -179,7 +182,8 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
#proceed without error message
}
- ###write initial conditions from netcdf (wherever valid input isn't available, DALEC default remains)
+ ###Write initial conditions from netcdf (Note: wherever valid input isn't available, DALEC default remains)
+
# cf0 initial canopy foliar carbon (g/m2)
if (is.valid(leaf)) {
IC.params[["cf0"]] <- leaf * 1000 #standard kg C m-2
From 10a9b0707d800ef00ef4a076c5b73b3f387210a8 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 14:41:49 -0400
Subject: [PATCH 097/771] Cleanup
---
models/dalec/R/write.configs.dalec.R | 16 +++++++++++++---
1 file changed, 13 insertions(+), 3 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index 82eef085852..feebd699148 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -145,7 +145,7 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
#default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"))
- IC.params <- data.frame()
+ IC.params <- list()
if (!is.null(settings$run$inputs$poolinitcond$path)) {
IC.path <- settings$run$inputs$poolinitcond$path
IC.nc <- try(ncdf4::nc_open(IC.path))
@@ -160,13 +160,14 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
coarse.roots <- try(ncdf4::ncvar_get(IC.nc,"coarse_root_carbon_content"),silent = TRUE)
if(!all(sapply(c(TotLivBiom,leaf,AbvGrndWood,roots,fine.roots,coarse.roots),is.numeric))){
- PEcAn.utils::logger.info("Any missing vars will be calculated from those provided or replaced by DALEC's defaults")
+ PEcAn.utils::logger.info("DALEC IC: Any missing vars will be calculated from those provided or replaced by DALEC's defaults")
}
#check if total roots are partitionable
#note: if roots are patritionable, they will override fine_ and/or coarse_root_carbon_content if loaded
if(is.valid(roots)){
if("rtsize" %in% names(IC.nc$dim)){
+ PEcAn.utils::logger.info("DALEC IC: Attempting to partition root_carbon_content")
rtsize <- IC.nc$dim$rtsize$vals
part_roots <- partition_roots(roots, rtsize)
if(!is.null(part_roots)){
@@ -176,7 +177,7 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
#couldn't partition roots; error messages handled by function
}
} else{
- PEcAn.utils::logger.error("Please provide rtsize dimension with root_carbon_content to allow partitioning or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
+ PEcAn.utils::logger.error("DALEC IC: Please provide rtsize dimension with root_carbon_content to allow partitioning or provide fine_root_carbon_content and coarse_root_carbon_content in netcdf.")
}
} else{
#proceed without error message
@@ -224,8 +225,17 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
# cr0 initial pool of fine root carbon (g/m2)
if (is.valid(fine.roots)) {
IC.params[["cr0"]] <- fine.roots * 1000 #standard kg C m-2
+ } else if(is.valid(TotLivBiom) && is.valid(AbvGrndWood) &&
+ is.valid(leaf) && is.valid(coarse.roots)){
+ fine.roots <- (TotLivBiom - AbvGrndWood - leaf - coarse.roots) * 1000 #standard kg C m-2
+ if(leaf >= 0){
+ IC.params[["cr0"]] <- fine.roots
+ } else{
+ PEcAn.utils::logger.error("TotLivBiom is less than sum of AbvGrndWood, coarse roots, and leaf; using default for fine.roots biomass")
+ }
}
+ ###non-living variables
# cl0 initial pool of litter carbon (g/m2)
litter <- try(ncdf4::ncvar_get(IC.nc,"litter_carbon_content"),silent = TRUE)
if (is.valid(litter)) {
From e9979386b63699070f4f2b8906db7006756b13a8 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 16:34:07 -0400
Subject: [PATCH 098/771] More cleanup
---
models/dalec/R/write.configs.dalec.R | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index feebd699148..258ceb0b7c8 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -259,10 +259,10 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
}
###Write to command line file
- PEcAn.utils::logger.info(names(paste("Adding IC tags to file:", IC.params))
for (i in seq_along(IC.params)) {
cmdFlags <- paste0(cmdFlags, " -", names(IC.params)[i], " ", IC.params[[i]])
}
+ PEcAn.utils::logger.info(paste("All command flags:",cmdFlags))
} else{
PEcAn.utils::logger.error("Bad initial conditions filepath; kept defaults")
From 8342eab228594189b49450f6b00411a5aed24d69 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Thu, 13 Jul 2017 16:43:02 -0400
Subject: [PATCH 099/771] Change list2netcdf dim check
---
modules/data.land/R/pool_ic_list2netcdf.R | 10 +++++++---
1 file changed, 7 insertions(+), 3 deletions(-)
diff --git a/modules/data.land/R/pool_ic_list2netcdf.R b/modules/data.land/R/pool_ic_list2netcdf.R
index 1650167e242..9539e4f231e 100644
--- a/modules/data.land/R/pool_ic_list2netcdf.R
+++ b/modules/data.land/R/pool_ic_list2netcdf.R
@@ -9,13 +9,17 @@
##' @author Anne Thomas
pool_ic_list2netcdf <- function(input, outdir, siteid){
- if(is.null(input$dims) || length(input$dims) == 0){
- PEcAn.utils::logger.severe("Please provide non-empty 'dims' list in input")
- }
if(is.null(input$vals) || length(input$vals) == 0){
PEcAn.utils::logger.severe("Please provide 'vals' list in input with variable names assigned to values")
}
+ if(is.null(input$dims) || length(input$dims) == 0){
+ if (any(sapply(input$vals,length) > 1)){
+ PEcAn.utils::logger.severe("A variable has length > 1; please provide non-empty 'dims' list in input")
+ }
+ }
+ #to do: check
+
dims <- list()
for(dimname in names(input$dims)){
vals <- input$dims[[which(names(input$dims) == dimname)]]
From 4e37a9b2cc5ab77e3c90abff047a4e402f254f11 Mon Sep 17 00:00:00 2001
From: Chris Black
Date: Fri, 14 Jul 2017 06:53:10 -0400
Subject: [PATCH 100/771] Biocro check cleanup (#1540)
* RNCEP no longer used (#1309)
* Remove demo
See vignettes/ for a more complete equivalent
---
models/biocro/BioCro_demo.Rmd | 25 -------------------------
models/biocro/DESCRIPTION | 3 +--
2 files changed, 1 insertion(+), 27 deletions(-)
delete mode 100644 models/biocro/BioCro_demo.Rmd
diff --git a/models/biocro/BioCro_demo.Rmd b/models/biocro/BioCro_demo.Rmd
deleted file mode 100644
index 47b64b94b90..00000000000
--- a/models/biocro/BioCro_demo.Rmd
+++ /dev/null
@@ -1,25 +0,0 @@
-
-
-```{r}
-library(PEcAn.all)
-logger.setQuitOnSevere(FALSE)
-
-settings <- read.settings("models/biocro/inst/extdata/misp.xml")
-
-#---------------- Run PEcAn workflow. -------------------------------------------------------------#
-# Query the trait database for data and priors
-settings$pfts <- get.trait.data(settings$pfts, settings$model$type, settings$run$dbfiles, settings$database$bety, settings$meta.analysis$update)
-
-# Run the PEcAn meta.analysis
-run.meta.analysis(settings$pfts, settings$meta.analysis$iter, settings$meta.analysis$threshold, settings$run$dbfiles, settings$database$bety)
-
-run.write.configs(settings = settings, write = FALSE) # Calls model specific write.configs e.g. write.config.ed.R
-## load met data
-start.model.runs(settings = settings, write = FALSE) # Start ecosystem model runs
-
-get.results(settings) # Get results of model runs
-
-run.sensitivity.analysis() # Run sensitivity analysis and variance decomposition on model output
-
-run.ensemble.analysis() # Run ensemble analysis on model output.
-```
diff --git a/models/biocro/DESCRIPTION b/models/biocro/DESCRIPTION
index a6fd881a75d..93d8aef5dc6 100644
--- a/models/biocro/DESCRIPTION
+++ b/models/biocro/DESCRIPTION
@@ -20,8 +20,7 @@ Imports:
Suggests:
BioCro,
testthat (>= 1.0.2),
- RPostgreSQL,
- RNCEP
+ RPostgreSQL
Remotes:
github::ebimodeling/biocro
License: FreeBSD + file LICENSE
From 32243d71bae46bb1bf29295078474440abe48d01 Mon Sep 17 00:00:00 2001
From: annethomas
Date: Fri, 14 Jul 2017 10:17:10 -0400
Subject: [PATCH 101/771] Tiny notes changes
---
models/dalec/R/write.configs.dalec.R | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index 258ceb0b7c8..141355ddf4c 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -74,7 +74,7 @@ convert.samples.DALEC <- function(trait.samples) {
return(trait.samples)
} # convert.samples.DALEC
-####function to split root_carbon_content into fine and coarse roots by rtsize dimension at the .002 m threshold
+####partition_roots: function to split root_carbon_content into fine and coarse roots by rtsize dimension at the .002 m threshold
partition_roots <- function(roots, rtsize){
if(length(rtsize) > 1 && length(rtsize) == length(roots)){
threshold <- .002
@@ -139,7 +139,7 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
### INITIAL CONDITIONS
- #function to check that ncvar was present (numeric) and a valid value was given it (not NA or negative)
+ #function to check that ncvar was loaded (numeric) and has a valid value (not NA or negative)
is.valid <- function(var){
return(all(is.numeric(var) && !is.na(var) && var >= 0))
}
From a1cbc1c6a1d4485560899db65df2e478284ec9bf Mon Sep 17 00:00:00 2001
From: annethomas
Date: Fri, 14 Jul 2017 16:56:49 -0400
Subject: [PATCH 102/771] Add LAI calcuations
---
models/dalec/R/write.configs.dalec.R | 24 +++++++++++++++++++-----
models/dalec/inst/default_param.dalec | 1 +
2 files changed, 20 insertions(+), 5 deletions(-)
diff --git a/models/dalec/R/write.configs.dalec.R b/models/dalec/R/write.configs.dalec.R
index 141355ddf4c..768257aa52a 100644
--- a/models/dalec/R/write.configs.dalec.R
+++ b/models/dalec/R/write.configs.dalec.R
@@ -15,13 +15,13 @@ PREFIX_XML <- "\n"
convert.samples.DALEC <- function(trait.samples) {
DEFAULT.LEAF.C <- 0.48
- ## convert SLA from m2 / kg leaf to m2 / kg C
+ ## convert SLA from m2 / kg leaf to m2 / g C
if ("SLA" %in% names(trait.samples)) {
trait.samples[["SLA"]] <- trait.samples[["SLA"]]/DEFAULT.LEAF.C/1000
}
- # t1 rate variable controling decomposition from litter to soil organinc matter [day-1, ref T
+ # t1 rate variable controlling decomposition from litter to soil organinc matter [day-1, ref T
# 10C]
if ("litter_decomposition_to_SOM" %in% names(trait.samples)) {
names(trait.samples)[which(names(trait.samples) == "litter_decomposition_to_SOM")] <- "t1"
@@ -144,9 +144,10 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
return(all(is.numeric(var) && !is.na(var) && var >= 0))
}
- #default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"))
+ default.param <- read.table(system.file("default_param.dalec", package = "PEcAn.DALEC"), header = TRUE)
IC.params <- list()
- if (!is.null(settings$run$inputs$poolinitcond$path)) {
+
+ if (!is.null(settings$run$inputs$poolinitcond$path)) {
IC.path <- settings$run$inputs$poolinitcond$path
IC.nc <- try(ncdf4::nc_open(IC.path))
@@ -154,12 +155,13 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
#check/load biomass netcdf variables
TotLivBiom <- try(ncdf4::ncvar_get(IC.nc,"TotLivBiom"),silent = TRUE)
leaf <- try(ncdf4::ncvar_get(IC.nc,"leaf_carbon_content"),silent = TRUE)
+ LAI <- try(ncdf4::ncvar_get(IC.nc,"LAI"),silent = TRUE)
AbvGrndWood <- try(ncdf4::ncvar_get(IC.nc,"AbvGrndWood"),silent = TRUE)
roots <- try(ncdf4::ncvar_get(IC.nc,"root_carbon_content"),silent = TRUE)
fine.roots <- try(ncdf4::ncvar_get(IC.nc,"fine_root_carbon_content"),silent = TRUE)
coarse.roots <- try(ncdf4::ncvar_get(IC.nc,"coarse_root_carbon_content"),silent = TRUE)
- if(!all(sapply(c(TotLivBiom,leaf,AbvGrndWood,roots,fine.roots,coarse.roots),is.numeric))){
+ if(!all(sapply(c(TotLivBiom,leaf,LAI,AbvGrndWood,roots,fine.roots,coarse.roots),is.numeric))){
PEcAn.utils::logger.info("DALEC IC: Any missing vars will be calculated from those provided or replaced by DALEC's defaults")
}
@@ -183,11 +185,23 @@ write.config.DALEC <- function(defaults, trait.values, settings, run.id) {
#proceed without error message
}
+
###Write initial conditions from netcdf (Note: wherever valid input isn't available, DALEC default remains)
# cf0 initial canopy foliar carbon (g/m2)
if (is.valid(leaf)) {
IC.params[["cf0"]] <- leaf * 1000 #standard kg C m-2
+ } else if(is.valid(LAI)){
+ if("SLA" %in% names(params)){
+ LMA <- 1/params[1,"SLA"] #SLA converted to m2 kgC-1 in convert.samples
+ leaf <- LAI * LMA
+ IC.params[["cf0"]] <- leaf
+ } else{
+ SLA = default.param[which(default.param$cmdFlag == "SLA"),"val"]
+ LMA <- 1/SLA
+ leaf <- LAI * LMA
+ IC.params[["cf0"]] <- leaf
+ }
} else if(is.valid(TotLivBiom) && is.valid(AbvGrndWood) &&
is.valid(fine.roots) && is.valid(coarse.roots)){
leaf <- (TotLivBiom - AbvGrndWood - fine.roots - coarse.roots) * 1000 #standard kg C m-2
diff --git a/models/dalec/inst/default_param.dalec b/models/dalec/inst/default_param.dalec
index 38f238d4ac5..fd5be66a333 100644
--- a/models/dalec/inst/default_param.dalec
+++ b/models/dalec/inst/default_param.dalec
@@ -8,6 +8,7 @@ t6 2.06E-06 #rate of wood loss
t7 2.48E-03 #rate of root loss
t8 2.28E-02 #rate of respiration from litter
t9 2.65E-06 #rate of respiration from litter SOM
+SLA 9.01E-03 #specific leaf area 1.0/111.0
cf0 57.7049 #initial canopy foliar carbon (g/m2)
cw0 769.863 #initial pool of woody carbon (g/m2)
cr0 101.955 #initial pool of fine root carbon (g/m2)
From d1bc58de6e86155df78c8ff061a2b95ced56a883 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 17:04:21 +0530
Subject: [PATCH 103/771] Added files to display the input files for the
variables in the config.php
---
web/setups/core.php | 45 +++++++++++++++++++++++++++++++++++++++++++++
web/setups/edit.php | 42 ++++++++++++++++++++++++++++++++++++++++++
2 files changed, 87 insertions(+)
create mode 100644 web/setups/core.php
create mode 100644 web/setups/edit.php
diff --git a/web/setups/core.php b/web/setups/core.php
new file mode 100644
index 00000000000..07bd74b3d24
--- /dev/null
+++ b/web/setups/core.php
@@ -0,0 +1,45 @@
+
diff --git a/web/setups/edit.php b/web/setups/edit.php
new file mode 100644
index 00000000000..b5734e28fbe
--- /dev/null
+++ b/web/setups/edit.php
@@ -0,0 +1,42 @@
+
+
+
From dcf539289760f3c72a70df10ae142f12f91cd4a4 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 17:09:40 +0530
Subject: [PATCH 104/771] Added a simple template to be used in all the
config pages
---
web/setups/page.template.php | 58 ++++++++++++++++++++++++++++++
web/setups/pagefooter.template.php | 19 ++++++++++
2 files changed, 77 insertions(+)
create mode 100644 web/setups/page.template.php
create mode 100644 web/setups/pagefooter.template.php
diff --git a/web/setups/page.template.php b/web/setups/page.template.php
new file mode 100644
index 00000000000..e0b390ce76e
--- /dev/null
+++ b/web/setups/page.template.php
@@ -0,0 +1,58 @@
+
+
+
+
+PEcAn
+
+
+
+
+
+
+
+
+
+
+
diff --git a/web/setups/pagefooter.template.php b/web/setups/pagefooter.template.php
new file mode 100644
index 00000000000..ff5a4559465
--- /dev/null
+++ b/web/setups/pagefooter.template.php
@@ -0,0 +1,19 @@
+
+
+
+
+
+
+
From 2cb6dd24d223e873050c346dc3b826d3c5d7c404 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 17:12:47 +0530
Subject: [PATCH 105/771] redirection added if the config.php doesn't
exist
---
web/01-introduction.php | 18 ++++++++++++------
1 file changed, 12 insertions(+), 6 deletions(-)
diff --git a/web/01-introduction.php b/web/01-introduction.php
index c0e54835140..6b2d6002627 100644
--- a/web/01-introduction.php
+++ b/web/01-introduction.php
@@ -2,12 +2,18 @@
/**
* Copyright (c) 2012 University of Illinois, NCSA.
* All rights reserved. This program and the accompanying materials
- * are made available under the terms of the
+ * are made available under the terms of the
* University of Illinois/NCSA Open Source License
* which accompanies this distribution, and is available at
* http://opensource.ncsa.illinois.edu/license.html
*/
+// Check for config.php if doesn't exits then redirect to the setup page
+if (!file_exists('config.php'))
+{
+ header('/setups/edit.php?key=all');
+}
+
// Check login
require("common.php");
@@ -40,7 +46,7 @@
function validate() {
$("#error").html("");
}
-
+
function prevStep() {
$("#formprev").submit();
}
@@ -71,10 +77,10 @@ function nextStep() {
-
+
-
+
Documentation
@@ -89,7 +95,7 @@ function nextStep() {
PEcAn worklflow. You will be able to always go back to a
previous step to change inputs. However once the model is
running it will continue to run until it finishes. You will
- be able to use the history button to jump to existing
+ be able to use the history button to jump to existing
executions of PEcAn.
The following webpages will help to setup the PEcAn
workflow. You will be asked the following questions:
@@ -104,7 +110,7 @@ function nextStep() {
PEcAn will execute the workflow.
Results After execution of the PEcAn workflow you
will be presented with a page showing the results of the
- PEcAn workflow.
+ PEcAn workflow.
From 44807d4050648393b45e21cbcae82fcb8bbc9e3b Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 19:46:38 +0530
Subject: [PATCH 106/771] Updated Documentation for Dockers
---
.../basic_users_guide/Getting-started.Rmd | 38 +++++++++++++------
web/common.php | 17 +++++++--
2 files changed, 40 insertions(+), 15 deletions(-)
diff --git a/book_source/basic_users_guide/Getting-started.Rmd b/book_source/basic_users_guide/Getting-started.Rmd
index a587dd4a465..ffe866509da 100644
--- a/book_source/basic_users_guide/Getting-started.Rmd
+++ b/book_source/basic_users_guide/Getting-started.Rmd
@@ -12,13 +12,13 @@ There are two ways of using PEcAn, via the web interface and directly within R.
### Working with the PEcAn VM
-1. PEcAn consists of a set of scripts and code that is compiled within a Linux operating system and saved in a “virtual machine (VM)”. Virtual machines allow for running consistent set-ups without worrying about differences between operating systems, library dependencies, compiling the code, etc.
+1. PEcAn consists of a set of scripts and code that is compiled within a Linux operating system and saved in a “virtual machine (VM)”. Virtual machines allow for running consistent set-ups without worrying about differences between operating systems, library dependencies, compiling the code, etc.
-2. To run the PEcAn VM you will need to install VirtualBox, the program that runs the virtual machine [http://www.virtualbox.org](http://www.virtualbox.org). On Windows you may see a warning about Logo testing, it is okay to ignore the warning.
+2. To run the PEcAn VM you will need to install VirtualBox, the program that runs the virtual machine [http://www.virtualbox.org](http://www.virtualbox.org). On Windows you may see a warning about Logo testing, it is okay to ignore the warning.
3. After you have Virtual Box installed you’ll need to download the PEcAn virtual machine: [http://opensource.ncsa.illinois.edu/projects/artifacts.php?key=PECAN](http://opensource.ncsa.illinois.edu/projects/artifacts.php?key=PECAN). The virtual machine is available under the "**Files**" header. Click the 32 or 64 bit ".ova" file and note that the download is ~5 GB so will take from several minutes to hours depending on the connection speed.
-4. To open up the virtual machine you'll first want to open up VirtualBox.
+4. To open up the virtual machine you'll first want to open up VirtualBox.
5. The first time you use the VM you'll want to use File → Import Appliance in VirtualBox in order to import the VM. This will create a virtual machine from the disk image. When asked about the Appliance Import Settings make sure you select "Reinitialize the MAC address of all network cards". This is not selected by default and can result in networking issues since multiple machines might claim to have the same network MAC Address. That said, users who have experienced network connection difficulties within the VM have sometimes had better luck after reinstalling without reinitializing.
@@ -46,21 +46,21 @@ Login to [Amazon Web Services (AWS)](http://console.aws.amazon.com/) and select
+ Type “pecan” into the search window
+ Click on the toggle button on the left next to PEcAn1.4.6
+ Click on the “Launch” button at the top
-2. Choose an Instance Type
+2. Choose an Instance Type
+ Select what type of machine you want to run. For this demo the default, t2.micro, will be adequate. Be aware that different machine types incur very different costs, from 1.3 cents/hour to over $5/hr https://aws.amazon.com/ec2/pricing/
+ Select t2.micro, then click “Next: Configure Instance Details”
-3. Configure Instance Details
+3. Configure Instance Details
+ The defaults are OK. Click “Next: Add Storage”
-4. Add Storage
+4. Add Storage
+ The defaults are OK. Click “Next: Tag Instance”
-5. Tag Instance
+5. Tag Instance
+ You can name your instance if you want. Click “Next: Configure Security Group”
6. Configure Security Group
+ You will need to add two new rules:
+ Click “Add Rule” then select “HTTP” from the pull down menu. This rule allows you to access the webserver on PEcAn.
+ Click “Add Rule”, leave the pull down on “Custom TCP Rule”, and then change the Port Range from 0 to 8787. Set “Source” to Anywhere. This rule allows you to access RStudio Server on PEcAn.
- + Click “Review and Launch” . You will then see this pop-up:
-
+ + Click “Review and Launch” . You will then see this pop-up:
+
```{r, echo=FALSE,fig.align='center'}
knitr::include_graphics(rep("figures/pic2.jpg"))
```
@@ -69,7 +69,7 @@ Select the default drive volume type and click Next
7. Review and Launch
+ Review the settings and then click “Launch”, which will pop up a select/create Key Pair window.
-8. Key Pair
+8. Key Pair
+ Select “Create a new key pair” and give it a name. You won’t actually need this key unless you need to SSH into your PEcAn server, but AWS requires you to create one. Click on “Download Key Pair” then on “Launch Instances”. Next click on “View Instances” at the bottom of the following page.
@@ -77,7 +77,7 @@ Select the default drive volume type and click Next
knitr::include_graphics(rep("../figures/pic3.jpg"))
```
-9. Instances
+9. Instances
+ You will see the status of your PEcAn VM, which will take a minute to boot up. Wait until the Instance State reads “running”. The most important piece of information here is the Public IP, which is the URL you will need in order to access your PEcAn instance from within your web browser (see Demo 1 below).
+ Be aware that it often takes ~1 hr for AWS instances to become fully operational, so if you get an error when you put the Public IP in you web browser, most of the time you just need to wait a bit longer.
Congratulations! You just started a PEcAn server in the “cloud”!
@@ -87,4 +87,20 @@ Select the default drive volume type and click Next
+ To TERMINATE the instance (which will DELETE your PEcAn machine), select your instance and click Actions > Instance state > Terminate. Terminated instances will not incur costs. In most cases you will also want to go to the Volumes menu and delete the storage associated with your PEcAn VM.Remember, AWS is free for one year, but will automatically charge a fee in second year if account is not cancelled.
+### Working with the PEcAn Containers (Docker)
+
+Following are the steps to setup a Docker instance of the PEcAn.
+
+1. Make sure that the machine on which have docker and docker-compose installed. For instruction on how to install docker and docker-compose please visit the [official documentations](https://docs.docker.com/engine/installation/).
+
+2. Visit the PEcAn Project on [github](https://github.com/PecanProject/pecan/tree/develop) and clone the repository to your machine.
+
+3. cd to root of the repository and run `docker-compose up -d` here -d makes it run in detached mode so it won't show the log on the terminal
+ The above command pull the respective docker images and also create the required images.
+
+ To access the web interface can visit :8080
+ If using localmachine then can use localhost:8080
+
+Only SIPNET model is included as the default package in it.
+
[pecan-wikipedia]: https://en.wikipedia.org/wiki/Pecan
diff --git a/web/common.php b/web/common.php
index 956d02e9757..c04e3c6000c 100644
--- a/web/common.php
+++ b/web/common.php
@@ -10,7 +10,7 @@
function get_footer() {
return "The PEcAn project is supported by the National Science Foundation
- (ABI #1062547, ABI #1458021, DIBBS #1261582, ARC #1023477, EF #1318164, EF #1241894, EF #1241891), NASA
+ (ABI #1062547, ABI #1458021, DIBBS #1261582, ARC #1023477, EF #1318164, EF #1241894, EF #1241891), NASA
Terrestrial Ecosystems, the Energy Biosciences Institute, and an Amazon AWS in Education Grant.
PEcAn Version 1.4.10.1 ";
}
@@ -38,7 +38,7 @@ function passvars($ignore) {
echo " ";
}
}
- }
+ }
}
# ----------------------------------------------------------------------
# CONVERT STRING TO XML
@@ -59,7 +59,16 @@ function open_database() {
global $db_bety_type;
global $pdo;
- $pdo = new PDO("${db_bety_type}:host=${db_bety_hostname};dbname=${db_bety_database}", $db_bety_username, $db_bety_password);
+ try {
+ $pdo = new PDO("${db_bety_type}:host=${db_bety_hostname};dbname=${db_bety_database}", $db_bety_username, $db_bety_password);
+ $pdo->setAttribute(PDO::ATTR_ERRMODE, PDO::ERRMODE_EXCEPTION);
+ } catch (PDOException $e) {
+ // handler to input database configurations manually
+ echo "Something wrong :(Connection failed: " . $e->getMessage()." You can use the reset button to reset the settings and try agin.";
+ die();
+ }
+
+// $pdo = new PDO("${db_bety_type}:host=${db_bety_hostname};dbname=${db_bety_database}", $db_bety_username, $db_bety_password);
}
function close_database() {
@@ -121,7 +130,7 @@ function encrypt_password($password, $salt) {
for($i=0; $i<$REST_AUTH_DIGEST_STRETCHES; $i++) {
$digest=sha1($digest . "--" . $salt . "--" . $password . "--" . $REST_AUTH_SITE_KEY);
}
- return $digest;
+ return $digest;
}
function logout() {
From 8e2fbb47df5f8169432677164f286efb6f5bc0db Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 19:47:34 +0530
Subject: [PATCH 107/771] Typo fix
---
book_source/basic_users_guide/Getting-started.Rmd | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/book_source/basic_users_guide/Getting-started.Rmd b/book_source/basic_users_guide/Getting-started.Rmd
index ffe866509da..72c0dfabef9 100644
--- a/book_source/basic_users_guide/Getting-started.Rmd
+++ b/book_source/basic_users_guide/Getting-started.Rmd
@@ -101,6 +101,6 @@ Following are the steps to setup a Docker instance of the PEcAn.
To access the web interface can visit :8080
If using localmachine then can use localhost:8080
-Only SIPNET model is included as the default package in it.
+Only SIPNET model is included as the default model in it.
[pecan-wikipedia]: https://en.wikipedia.org/wiki/Pecan
From faeb7ea5cda96123436f283183eadabfd1e2e6b6 Mon Sep 17 00:00:00 2001
From: shubhamagarwal92
Date: Sat, 15 Jul 2017 09:22:33 -0500
Subject: [PATCH 108/771] Adding geom smooth
---
shiny/workflowPlot/server.R | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/shiny/workflowPlot/server.R b/shiny/workflowPlot/server.R
index d2b7b132292..52e0a791515 100644
--- a/shiny/workflowPlot/server.R
+++ b/shiny/workflowPlot/server.R
@@ -147,7 +147,7 @@ server <- shinyServer(function(input, output, session) {
plt <- plt + geom_line()
}
)
- plt <- plt + labs(title=title, x=xlab, y=ylab)
+ plt <- plt + labs(title=title, x=xlab, y=ylab) + geom_smooth()
# if (!is.null(loaded_data)) {
# if (input$load_data>0) {
From cb2cefa066e70cda4dda606d747e6037b5abec93 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 21:09:45 +0530
Subject: [PATCH 109/771] Added a script to update the requested
configuration in config.php
---
web/setups/add.php | 54 +++++++++++++++++++++++++++++++++++++++++++++
web/setups/core.php | 2 +-
2 files changed, 55 insertions(+), 1 deletion(-)
create mode 100644 web/setups/add.php
diff --git a/web/setups/add.php b/web/setups/add.php
new file mode 100644
index 00000000000..c857b335bf7
--- /dev/null
+++ b/web/setups/add.php
@@ -0,0 +1,54 @@
+";
+ if(preg_match($pattern,$line)){
+ //spliting variable and values so can used variable as the input field names
+ $temp = preg_split('/=/',$line);
+ $inputname = preg_split('/\$/',$temp[0]);
+
+ // get the new value from the post request
+ $newvalue = $_POST[$inputname[1]];
+
+ //$newline = preg_replace('/'.$temp[0].'/',$temp[0].'="'.$newvalue.'";',$line);
+ //echo $temp[0].'="'.$newvalue.'";';
+ //var_dump($newvalue);
+ fwrite($file, $temp[0].'="'.$newvalue.'";');
+ //var_dump($inputname);
+ //var_dump($temp);
+ //echo "match found ";
+ }
+ else {
+ // if no change in the line write as it is
+ fwrite($file, $line);
+ }
+}
+fclose($file);
+
+// copy the temprory file to config.php and remove it
+rename('../config.php.temp', '../config.php');
+unlink('../config.php.temp');
+
+include 'page.template.php';
+?>
+ Configuration details
+ Configuration Sucessfully updated
+
diff --git a/web/setups/core.php b/web/setups/core.php
index 07bd74b3d24..49f64e758c6 100644
--- a/web/setups/core.php
+++ b/web/setups/core.php
@@ -32,7 +32,7 @@
}
// read content of file
- $file = fopen('../config.php', "c+") or die('Cannot open file: Check whether file exist and it have correct permissions');
+ //$file = fopen('../config.php', "c+") or die('Cannot open file: Check whether file exist and it have correct permissions');
$file_contents = file('../config.php');
From f9478ae7b68e9e56325b57976dc74945e7712fd5 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 21:21:30 +0530
Subject: [PATCH 110/771] New line problem fiexd in the config.php
---
web/setups/add.php | 2 +-
1 file changed, 1 insertion(+), 1 deletion(-)
diff --git a/web/setups/add.php b/web/setups/add.php
index c857b335bf7..1d3853d5234 100644
--- a/web/setups/add.php
+++ b/web/setups/add.php
@@ -29,7 +29,7 @@
//$newline = preg_replace('/'.$temp[0].'/',$temp[0].'="'.$newvalue.'";',$line);
//echo $temp[0].'="'.$newvalue.'";';
//var_dump($newvalue);
- fwrite($file, $temp[0].'="'.$newvalue.'";');
+ fwrite($file, $temp[0].'="'.$newvalue.'";'."\n");
//var_dump($inputname);
//var_dump($temp);
//echo "match found ";
From 2e2a0c50e72346f0a30b9dc97660c0cf5b3da265 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 22:32:22 +0530
Subject: [PATCH 111/771] Added a list of avaliable configuration in the
web page
---
web/setups/core.php | 4 ++--
web/setups/edit.php | 2 +-
web/setups/page.template.php | 7 +++++++
3 files changed, 10 insertions(+), 3 deletions(-)
diff --git a/web/setups/core.php b/web/setups/core.php
index 49f64e758c6..ab71593f90b 100644
--- a/web/setups/core.php
+++ b/web/setups/core.php
@@ -13,7 +13,7 @@
*/
// If file doesn't exist then create a new file
- if (file_exists ("config.php") == false){
+ if (file_exists ("../config.php") == false){
copy ('../config.example.php', '../config.php');
}
@@ -24,7 +24,7 @@
// set the pattern to match with the input
switch ($key) {
- case 'all': $pattern = '/^\$/i'; break;
+ case 'all': $pattern = '/^\$/i'; break; // not working properly
case 'browndog': $pattern = '/\$browndog*/i'; break;
case 'database': $pattern = '/\$db_bety_*/i'; break;
case 'fiadb': $pattern = '/\$db_fia_*/i'; break;
diff --git a/web/setups/edit.php b/web/setups/edit.php
index b5734e28fbe..b3f12a3cb27 100644
--- a/web/setups/edit.php
+++ b/web/setups/edit.php
@@ -13,7 +13,7 @@
include 'core.php';
include 'page.template.php';
?>
-
+
+
List of available configurations
+ Database
+ Browndog
+ FIA Database
+ Google MapKey
+
Documentation
From dd09ffd19e009403299e4a85d3a8a869dc8c9cff Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 22:37:57 +0530
Subject: [PATCH 112/771] minor url fix in 01-introduction.php
---
web/01-introduction.php | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/web/01-introduction.php b/web/01-introduction.php
index 54184ccea0d..051e55d9db6 100644
--- a/web/01-introduction.php
+++ b/web/01-introduction.php
@@ -9,9 +9,9 @@
*/
// Check for config.php if doesn't exits then redirect to the setup page
-if (!file_exists('config.php'))
+if (file_exists('config.php') == false)
{
- header('/setups/edit.php?key=all');
+ header('setups/edit.php?key=all');
}
// Check login
From b737321808a35592fb8b8814bc16ab27dee32e6e Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 22:40:58 +0530
Subject: [PATCH 113/771] redirect in common.php if config.php doesn't
exist
---
web/common.php | 5 +++++
1 file changed, 5 insertions(+)
diff --git a/web/common.php b/web/common.php
index c04e3c6000c..8ab07107b6e 100644
--- a/web/common.php
+++ b/web/common.php
@@ -1,5 +1,10 @@
Date: Sat, 15 Jul 2017 22:48:13 +0530
Subject: [PATCH 114/771] Added a temporary rediect to setup pages if
config.php doesn't exist
---
web/01-introduction.php | 5 ++++-
web/common.php | 5 ++++-
2 files changed, 8 insertions(+), 2 deletions(-)
diff --git a/web/01-introduction.php b/web/01-introduction.php
index 051e55d9db6..0b7587370fc 100644
--- a/web/01-introduction.php
+++ b/web/01-introduction.php
@@ -11,7 +11,10 @@
// Check for config.php if doesn't exits then redirect to the setup page
if (file_exists('config.php') == false)
{
- header('setups/edit.php?key=all');
+ $host = $_SERVER['HTTP_HOST'];
+ $uri = rtrim(dirname($_SERVER['PHP_SELF']), '/\\');
+ header("Location: http://$host$uri/edit.php?key=all",TRUE,307);
+ exit;
}
// Check login
diff --git a/web/common.php b/web/common.php
index 8ab07107b6e..8541c5e627e 100644
--- a/web/common.php
+++ b/web/common.php
@@ -2,7 +2,10 @@
if (file_exists('config.php') == false)
{
- header('setups/edit.php?key=all');
+ $host = $_SERVER['HTTP_HOST'];
+ $uri = rtrim(dirname($_SERVER['PHP_SELF']), '/\\');
+ header("Location: http://$host$uri/edit.php?key=all",TRUE,307);
+ exit;
}
require("config.php");
From 2b8dbc6e7b312b535b80d51cff4f0e10096a2987 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 22:51:03 +0530
Subject: [PATCH 115/771] Removed the redirect to setup page from
01-introduction.php
---
web/01-introduction.php | 9 ---------
web/common.php | 2 +-
2 files changed, 1 insertion(+), 10 deletions(-)
diff --git a/web/01-introduction.php b/web/01-introduction.php
index 0b7587370fc..81c4cec32dd 100644
--- a/web/01-introduction.php
+++ b/web/01-introduction.php
@@ -8,15 +8,6 @@
* http://opensource.ncsa.illinois.edu/license.html
*/
-// Check for config.php if doesn't exits then redirect to the setup page
-if (file_exists('config.php') == false)
-{
- $host = $_SERVER['HTTP_HOST'];
- $uri = rtrim(dirname($_SERVER['PHP_SELF']), '/\\');
- header("Location: http://$host$uri/edit.php?key=all",TRUE,307);
- exit;
-}
-
// Check login
require("common.php");
diff --git a/web/common.php b/web/common.php
index 8541c5e627e..1dd232f63be 100644
--- a/web/common.php
+++ b/web/common.php
@@ -4,7 +4,7 @@
{
$host = $_SERVER['HTTP_HOST'];
$uri = rtrim(dirname($_SERVER['PHP_SELF']), '/\\');
- header("Location: http://$host$uri/edit.php?key=all",TRUE,307);
+ header("Location: http://$host$uri/setups/edit.php?key=all",TRUE,307);
exit;
}
From e0184395bb22d9227484bb514663a92236ea5280 Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 23:03:42 +0530
Subject: [PATCH 116/771] Added handler if copying of config files fails
---
web/setups/core.php | 8 ++++++--
1 file changed, 6 insertions(+), 2 deletions(-)
diff --git a/web/setups/core.php b/web/setups/core.php
index ab71593f90b..d2625c1f7f9 100644
--- a/web/setups/core.php
+++ b/web/setups/core.php
@@ -14,7 +14,11 @@
// If file doesn't exist then create a new file
if (file_exists ("../config.php") == false){
- copy ('../config.example.php', '../config.php');
+ if (!copy ('../config.example.php', '../config.php')){
+ $error = error_get_last();
+ echo "error:$error";
+ die();
+ }
}
// key defines the attribute or the group of attributes which are needed to modify
@@ -24,7 +28,7 @@
// set the pattern to match with the input
switch ($key) {
- case 'all': $pattern = '/^\$/i'; break; // not working properly
+ case 'all': $pattern = '/^\$/i'; break; // not working properly
case 'browndog': $pattern = '/\$browndog*/i'; break;
case 'database': $pattern = '/\$db_bety_*/i'; break;
case 'fiadb': $pattern = '/\$db_fia_*/i'; break;
From 199197bfb13e3ebdc7378a91cc1b626ff06a809e Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 23:09:34 +0530
Subject: [PATCH 117/771] Exception handler for the reading config.php
contents
---
web/setups/core.php | 4 ++--
1 file changed, 2 insertions(+), 2 deletions(-)
diff --git a/web/setups/core.php b/web/setups/core.php
index d2625c1f7f9..c8c4e5f6415 100644
--- a/web/setups/core.php
+++ b/web/setups/core.php
@@ -16,7 +16,7 @@
if (file_exists ("../config.php") == false){
if (!copy ('../config.example.php', '../config.php')){
$error = error_get_last();
- echo "error:$error";
+ echo "error:$error";
die();
}
}
@@ -38,7 +38,7 @@
// read content of file
//$file = fopen('../config.php', "c+") or die('Cannot open file: Check whether file exist and it have correct permissions');
- $file_contents = file('../config.php');
+ $file_contents = file('../config.php') or die('Cannot open file: Check whether file exist and it have correct permissions');
//var_dump($file_contents);
//var_dump($pattern);
From cab18a0f7584ec524ef0df9675048f3bbe2b007b Mon Sep 17 00:00:00 2001
From: Amanskywalker
Date: Sat, 15 Jul 2017 23:38:54 +0530
Subject: [PATCH 118/771] fixed the redirect loop problem Added
temporary redriect code 307 when redirecting the to config page Added
footer in pagefooter.template.php
---
web/common.php | 3 +--
web/setups/core.php | 3 +--
web/setups/page.template.php | 2 --
web/setups/pagefooter.template.php | 5 ++++-
4 files changed, 6 insertions(+), 7 deletions(-)
diff --git a/web/common.php b/web/common.php
index 1dd232f63be..42542491087 100644
--- a/web/common.php
+++ b/web/common.php
@@ -3,8 +3,7 @@
if (file_exists('config.php') == false)
{
$host = $_SERVER['HTTP_HOST'];
- $uri = rtrim(dirname($_SERVER['PHP_SELF']), '/\\');
- header("Location: http://$host$uri/setups/edit.php?key=all",TRUE,307);
+ header("Location: http://$host/setups/edit.php?key=all",TRUE,307);
exit;
}
diff --git a/web/setups/core.php b/web/setups/core.php
index c8c4e5f6415..f13637d50c4 100644
--- a/web/setups/core.php
+++ b/web/setups/core.php
@@ -15,8 +15,7 @@
// If file doesn't exist then create a new file
if (file_exists ("../config.php") == false){
if (!copy ('../config.example.php', '../config.php')){
- $error = error_get_last();
- echo "error:$error";
+ echo "error: permissions denined";
die();
}
}
diff --git a/web/setups/page.template.php b/web/setups/page.template.php
index b3668efefba..0d3302a7c69 100644
--- a/web/setups/page.template.php
+++ b/web/setups/page.template.php
@@ -8,8 +8,6 @@
* http://opensource.ncsa.illinois.edu/license.html
*/
-require("../common.php");
-
// This page is designed to act as the template page for all the configurations setups
?>
diff --git a/web/setups/pagefooter.template.php b/web/setups/pagefooter.template.php
index ff5a4559465..1fbe2f7e6c6 100644
--- a/web/setups/pagefooter.template.php
+++ b/web/setups/pagefooter.template.php
@@ -13,7 +13,10 @@
?>
-
+