PecanProject / pecan

The Predictive Ecosystem Analyzer (PEcAn) is an integrated ecological bioinformatics toolbox.
www.pecanproject.org
Other
203 stars 235 forks source link

Missing function in SA? #790

Closed serbinsh closed 8 years ago

serbinsh commented 8 years ago
2016-04-11 10:45:15 INFO   [read.output] :
   GPP Mean: 86200 Median: 86200
$coef.vars
      Jmax      Vcmax
0.01210844 0.01824607

$elasticities
    Jmax    Vcmax
0.000000 1.000001

$sensitivities
    Jmax    Vcmax
   0.000 1169.344

$variances
        Jmax        Vcmax
9.354102e-24 2.418720e+06

$partial.variances
        Jmax        Vcmax
3.867376e-30 1.000000e+00

           Jmax    Vcmax
0.135  86171.32 81408.62
2.275  86171.32 83002.06
15.866 86171.32 84589.82
50     86171.32 86171.32
84.134 86171.32 87751.30
97.725 86171.32 89314.52
99.865 86171.32 89291.53
TableGrob (2 x 1) "arrange": 2 grobs
      z     cells    name           grob
Jmax  1 (1-1,1-1) arrange gtable[layout]
Vcmax 2 (2-2,1-1) arrange gtable[layout]
$Jmax

$Vcmax

Loading required package: ggmap
Error: could not find function "status.start"
Execution halted

I have been seeing this pop up with my MAAT tests. Not sure if this is a specific issue with my current coding with MAAT or others are seeing this with other models? @dlebauer @tonygardella

dlebauer commented 8 years ago

status.start is defined in web/workflow.R

dlebauer commented 8 years ago

https://github.com/PecanProject/pecan/blob/master/web/workflow.R

serbinsh commented 8 years ago

Ahh yes OK. Let me fix my test workflow

serbinsh commented 8 years ago

I see the issue, I was borrowing from /test/interactive-workflow.R which does not define the status function but calls to it at the end

#!/usr/bin/env Rscript

# install_github("blernermhc/RDataTracker")
library(RDataTracker)
#args <- commandArgs(trailingOnly = TRUE)
#settings.file = args[1]
settings.file <- "tests/ebi-forecast.igb.illinois.edu.biocro.xml"
## See README in tests/ folder for details
require("PEcAn.all")

#--------------------------------------------------------------------------------#
# functions used to write STATUS used by history
#--------------------------------------------------------------------------------#

# remove previous runs
unlink("pecan", recursive=TRUE)

# show all queries to the database
#db.showQueries(TRUE)

# check settings
settings <- read.settings(settings.file)

# get traits of pfts
settings$pfts <- get.trait.data(settings$pfts, settings$model$type, settings$run$dbfiles, settings$database$bety, settings$meta.analysis$update)
saveXML(listToXml(settings, "pecan"), file=file.path(settings$outdir, 'pecan.xml'))

# run meta-analysis
run.meta.analysis(settings$pfts, settings$meta.analysis$iter, settings$meta.analysis$random.effects, 
                  settings$meta.analysis$threshold, settings$run$dbfiles, settings$database$bety)

# do conversions
for(i in 1:length(settings$run$inputs)) {
  input <- settings$run$inputs[[i]]
  if (is.null(input)) next
  if (length(input) == 1) next

  # fia database
  if (input['input'] == 'fia') {
    fia.to.psscss(settings)
  }

  # met download
  if (input['input'] == 'Ameriflux') {
    # start/end date for weather
    start_date <- settings$run$start.date
    end_date <- settings$run$end.date

    # site
    site <- sub(".* \\((.*)\\)", "\\1", settings$run$site$name)

    # download data
    fcn <- paste("download", input['input'], sep=".")
    do.call(fcn, list(site, file.path(settings$run$dbfiles, input['input']), start_date=start_date, end_date=end_date))

    # convert to CF
    met2CF.Ameriflux(file.path(settings$run$dbfiles, input['input']), site, file.path(settings$run$dbfiles, "cf"), start_date=start_date, end_date=end_date)

    # gap filing
    metgapfill(file.path(settings$run$dbfiles, "cf"), site, file.path(settings$run$dbfiles, "gapfill"), start_date=start_date, end_date=end_date)

    # model specific
    load.modelpkg(input['output'])
    fcn <- paste("met2model", input['output'], sep=".")
    r <- do.call(fcn, list(file.path(settings$run$dbfiles, "gapfill"), site, file.path(settings$run$dbfiles, input['output']), start_date=start_date, end_date=end_date))
    settings$run$inputs[[i]] <- r[['file']]
  }

  # narr download
}
saveXML(listToXml(settings, "pecan"), file=file.path(settings$outdir, 'pecan.xml'))

# write configurations
if (!file.exists(file.path(settings$rundir, "runs.txt")) | settings$meta.analysis$update == "TRUE") {
  run.write.configs(settings, settings$database$bety$write)
} else {
  logger.info("Already wrote configuraiton files")    
}

# run model
if (!file.exists(file.path(settings$rundir, "runs.txt"))) {
  logger.severe("No ensemble or sensitivity analysis specified in pecan.xml, work is done.")
} else {
  start.model.runs(settings, settings$database$bety$write)
}

# get results
get.results(settings)

# ensemble analysis
if (!file.exists(file.path(settings$outdir,"ensemble.ts.pdf"))) {
  run.ensemble.analysis(TRUE)    
} else {
  logger.info("Already executed run.ensemble.analysis()")
}

# sensitivity analysis
if (!file.exists(file.path(settings$outdir, "sensitivity.results.Rdata"))) {
  run.sensitivity.analysis()
} else {
  logger.info("Already executed run.sensitivity.analysis()")    
}

# all done
status.start("FINISHED")

# send email if configured
if (!is.null(settings$email) && !is.null(settings$email$to) && (settings$email$to != "")) {
  sendmail(settings$email$from, settings$email$to,
           paste0("Workflow has finished executing at ", date()),
           paste0("You can find the results on ", fqdn(), " in ", normalizePath(settings$outdir)))
}

# write end time in database
if (settings$workflow$id != 'NA') {
  db.query(paste0("UPDATE workflows SET finished_at=NOW() WHERE id=", settings$workflow$id, " AND finished_at IS NULL"), params=settings$database$bety)
}
status.end()

db.print.connections()