Open rburghol opened 2 years ago
segs="A51161 A51161"
for i in $segs; do
# convert grid CSVs into land segment CSVs
# already did this part
# a2l_one 1984010100 2020123123 /backup/meteorology/out/grid_met_csv /backup/meteorology/out/lseg_csv $i
# update long term averages
LongTermAvgRNMax /backup/meteorology/out/lseg_csv/1984010100-2020123123 /backup/meteorology/out/lseg_csv/RNMax 1 $i
# finally, create a WDM for each land seg
# this script reads the file /etc/hspf.config to get directories.
wdm_pm_one $i 1984010100 2020123123 nldas2 harp2021 nldas1221 p20211221
done
# Run them
cbp run_all.csh p532sova_2021 OR7_8490_0000
for i in $segs; do
# export to OM readable CSV
Rscript /opt/model/p6/gb604b/run/export/wdm_export_flow.R p532sova_2021 $i 1984 2020 /media/model/p532/out /opt/model/p53/p532c-sova
# preload database tables into OM
create_landseg_table.sh $i CFBASE30Y20180615_vadeq p532 1
done
Run below wayside park
cbp run_all.csh p532sova_2021 OR2_8130_7900
segs=`cbp get_landsegs OR2_8130_7900`
for i in $segs; do
# export to OM readable CSV
Rscript /opt/model/p6/gb604b/run/export/wdm_export_flow.R p532sova_2021 $i 1984 2020 /media/model/p532/out /opt/model/p53/p532c-sova
# preload database tables into OM
create_landseg_table.sh $i CFBASE30Y20180615_vadeq p532 1
done
segs=`cbp get_landsegs JL2_6850_6890`
for i in $segs; do
# convert grid CSVs into land segment CSVs
a2l_one 1984010100 2020123123 /backup/meteorology/out/grid_met_csv /backup/meteorology/out/lseg_csv $i
# update long term averages
LongTermAvgRNMax /backup/meteorology/out/lseg_csv/1984010100-2020123123 /backup/meteorology/out/lseg_csv/RNMax 1 $i
# finally, create a WDM for each land seg
# this script reads the file /etc/hspf.config to get directories.
wdm_pm_one $i 1984010100 2020123123 nldas2 harp2021 nldas1221 p20211221
done
Pigg River:
cd /opt/model/p53/p532c-sova/
# get list of land segments needed
segs=`cbp get_landsegs OR2_8460_8271`
for i in $segs; do
# convert grid CSVs into land segment CSVs
a2l_one 1984010100 2020123123 /backup/meteorology/out/grid_met_csv /backup/meteorology/out/lseg_csv $i
# update long term averages
LongTermAvgRNMax /backup/meteorology/out/lseg_csv/1984010100-2020123123 /backup/meteorology/out/lseg_csv/RNMax 1 $i
# finally, create a WDM for each land seg
# this script reads the file /etc/hspf.config to get directories.
wdm_pm_one $i 1984010100 2020123123 nldas2 harp2021 nldas1221 p20211221
done
# Run them
cbp run_lug.csh p532sova_2021 OR2_8460_8271
cbp run_land.csh p532sova_2021 OR2_8460_8271
for i in $segs; do
# export to OM readable CSV
Rscript /opt/model/p6/gb604b/run/export/wdm_export_flow.R p532sova_2021 $i 1984 2020 /media/model/p532/out /opt/model/p53/p532c-sova
# preload database tables into OM
create_landseg_table.sh $i CFBASE30Y20180615_vadeq p532 1
done
Nottoway
basin=MN3_7930_8010
. hspf_config
syear="1984"
eyear="2022"
sdate="${syear}010100"
edate="${eyear}123123"
ftype="cbp532_landseg"
model_version="cbp-5.3.2"
segs=`cbp get_landsegs $basin`
cd /backup/meteorology
dataset=${sdate}-${edate}
update_grid_years="2021 2022"
for i in $segs; do
for y in $update_grid_years; do
spd="${y}010100"
epd="${y}123123"
./grid2land.sh $spd $epd /backup/meteorology /backup/meteorology/out/grid_met_csv $i
done
cd /opt/model/p6/vadeq
# convert grid CSVs into land segment CSVs for whole time period
a2l_one $sdate $edate /backup/meteorology/out/grid_met_csv /backup/meteorology/out/lseg_csv $i
# update long term averages in model component
cd /backup/meteorology/
LongTermAvgRNMax /backup/meteorology/out/lseg_csv/${sdate}-${edate} /backup/meteorology/out/lseg_csv/RNMax 1 $i
Rscript R/lseg_het.R $i $dataset
Rscript R/lseg_hset.R $i $dataset
Rscript R/Table_Rolling_Averages.R $i $dataset $ftype $model_version
Rscript R/lseg_rolling_avg_graphs.R $i $dataset $ftype $model_version
echo "View at: http://deq1.bse.vt.edu:81/met/out/lseg_csv/1984010100-2020123123/images/df.90day.precip_${i}.png"
# Run QA
Rscript R/lseg_qa_test_timeseries.R $i $dataset $ftype $model_version
# import the data into the WDM and model scenario
cd /opt/model/p53/p532c-sova/
wdm_pm_one $i $sdate $edate nldas2 harp2021 nldas1221 p20211221
done
cbp run_lug.csh p532sova_2021 $basin
cbp run_land.csh p532sova_2021 $basin
cbp run_etm.csh p532sova_2021 $basin
cbp run_rug.csh p532sova_2021 $basin
cbp run_river.csh p532sova_2021 $basin
for i in $segs; do
# export to OM readable CSV
Rscript /opt/model/meta_model/scripts/wdm/wdm_export_flow.R p532sova_2021 $i 1984 2020 /media/model/p532/out /opt/model/p53/p532c-sova
# preload database tables into OM
create_landseg_table.sh $i CFBASE30Y20180615_vadeq p532 1
done
luyear
variable to 2013
instead of year
, which results in valid, non-time varying land use for all segmentsluyear = year
for historic analysis, but that is OK, since in reality we need to do an update to the landuse
dataMatrix, or the way the matrices handle missing keys (i.e., when year in the model is before our later than the years in landuse
) to run an actual historical analysis anyhow.
cd /opt.model/p532/p532c-sova
basin=MN3_7930_8010
. hspf_config
syear="1984"
eyear="2022"
sdate="${syear}010100"
edate="${eyear}123123"
ftype="cbp532_landseg"
model_version="cbp-5.3.2"
segs=`cbp get_landsegs $basin`
cd /backup/meteorology
dataset=${sdate}-${edate}
update_grid_years="2021 2022"
update_grid_years="1984"
for i in $segs; do
for y in $update_grid_years; do
spd="${y}010100"
epd="${y}123123"
./grid2land.sh $spd $epd /backup/meteorology /backup/meteorology/out/grid_met_csv $i
done
# get list of land segments needed
cd /opt/model/p53/p532c-sova/
segs=`cbp get_landsegs M`
for i in $segs; do
# convert grid CSVs into land segment CSVs
a2l_one 1984010100 2020123123 /backup/meteorology/out/grid_met_csv /backup/meteorology/out/lseg_csv $i
# update long term averages
LongTermAvgRNMax /backup/meteorology/out/lseg_csv/1984010100-2020123123 /backup/meteorology/out/lseg_csv/RNMax 1 $i
# finally, create a WDM for each land seg
# this script reads the file /etc/hspf.config to get directories.
wdm_pm_one $i 1984010100 2020123123 nldas2 harp2021 nldas1221 p20211221
done
# Run them
cbp run_all.csh p532sova_2021 M
for i in $segs; do
# export to OM readable CSV
Rscript /opt/model/p6/gb604b/run/export/wdm_export_flow.R p532sova_2021 $i 1984 2020 /media/model/p532/out /opt/model/p53/p532c-sova
# preload database tables into OM
create_landseg_table.sh $i CFBASE30Y20180615_vadeq p532 1
done
Overview
Testing with a full basin using procedure outlined in HARPgroup/model_meteorology#29
p5_g2a_all 19840101 20201231 /backup/meteorology /backup/meteorology/out/grid_met_csv
p5_g2a_all 20200101 20211231 /backup/meteorology /backup/meteorology/out/grid_met_csv
Data Model
flow_scenario
, if it has one, over-writes local variablescenario
[modelpath] / out/land / [scenario] / eos / [landseg]_0111-0211-0411.csv
Watershed Runs
Roanoke River
Tennessee River
Run them
cbp run_all.csh p532sova_2021 T
for i in $segs; do
export to OM readable CSV
Rscript /opt/model/p6/gb604b/run/export/wdm_export_flow.R p532sova_2021 $i 1984 2020 /media/model/p532/out /opt/model/p53/p532c-sova
preload database tables into OM
create_landseg_table.sh $i CFBASE30Y20180615_vadeq p532 1 done
/opt/model/p53/p532c-sova/
get list of land segments needed
segs=
cbp get_landsegs N
for i in $segs; doconvert grid CSVs into land segment CSVs
a2l_one 1984010100 2020123123 /backup/meteorology/out/grid_met_csv /backup/meteorology/out/lseg_csv $i
update long term averages
LongTermAvgRNMax /backup/meteorology/out/lseg_csv/1984010100-2020123123 /backup/meteorology/out/lseg_csv/RNMax 1 $i
finally, create a WDM for each land seg
this script reads the file /etc/hspf.config to get directories.
wdm_pm_one $i 1984010100 2020123123 nldas2 harp2021 nldas1221 p20211221 done
Run them - note: other river we used run_all.csh, why? We just need lad, unless we are looking at flow too for validation/calibration
cbp run_land.csh p532sova_2021 N
for i in $segs; do
export to OM readable CSV
Rscript /opt/model/p6/gb604b/run/export/wdm_export_flow.R p532sova_2021 $i 1984 2020 /media/model/p532/out /opt/model/p53/p532c-sova
preload database tables into OM
create_landseg_table.sh $i CFBASE30Y20180615_vadeq p532 1 done
cd /backup/meteorology
There was an error running a2l_one for segment A51810, so tried running this individually:
./grid2land.sh 1985010100 2020123123 /backup/meteorology /backup/meteorology/out/grid_met_csv A51810
get list of land segments needed
cd /opt/model/p53/p532c-sova/ segs=
cbp get_landsegs M
for i in $segs; doconvert grid CSVs into land segment CSVs
a2l_one 1984010100 2020123123 /backup/meteorology/out/grid_met_csv /backup/meteorology/out/lseg_csv $i
update long term averages
LongTermAvgRNMax /backup/meteorology/out/lseg_csv/1984010100-2020123123 /backup/meteorology/out/lseg_csv/RNMax 1 $i
finally, create a WDM for each land seg
this script reads the file /etc/hspf.config to get directories.
wdm_pm_one $i 1984010100 2020123123 nldas2 harp2021 nldas1221 p20211221 done
Run them
cbp run_all.csh p532sova_2021 M
for i in $segs; do
export to OM readable CSV
Rscript /opt/model/p6/gb604b/run/export/wdm_export_flow.R p532sova_2021 $i 1984 2020 /media/model/p532/out /opt/model/p53/p532c-sova
preload database tables into OM
create_landseg_table.sh $i CFBASE30Y20180615_vadeq p532 1 done