WAM2layers / Moisture_tracking_intercomparison

Apache License 2.0
4 stars 0 forks source link

reading data por Scotland case #50

Closed apalarcon closed 1 month ago

apalarcon commented 1 month ago
directory_str = "results WAM2layers/"
directory =  os.fsencode(directory_str)

n=0
for file in os.listdir(directory):
    filename = os.fsdecode(file)
    if filename.endswith(".nc"):
        if n ==0:
            temp = xr.open_dataset(os.path.join(directory_str, filename))
            a_gridcell, lx, ly = get_grid_info(temp)
            srcs_wam2layers = temp["e_track"]
            n=n+1
        else:
            temp = xr.open_dataset(os.path.join(directory_str, filename))
            srcs_wam2layers.values = srcs_wam2layers.values + temp["e_track"].values
            n=n+1

#print(srcs_wam2layers.max())

#quit()

#try:
    #fname="WAM2layers_out.nc"
    #srcs_wam2layers = xr.open_dataset("results WAM2layers/WAM2layers_out.nc")['e_track']

#except:
    #n=0
    #aux_wam2layers=0
    #for file in os.listdir(directory):
            #filename = os.fsdecode(file)
            #if filename.endswith("00.nc"):
                #temp = xr.open_dataset(os.path.join(directory_str, filename))

                #a_gridcell, lx, ly = get_grid_info(temp)

                #aux_wam2layers = aux_wam2layers + (temp["e_track"].values) #* 1000 / a_gridcell[:, None]
                ##n=n+1
            ##else:
                ##temp = xr.open_dataset(os.path.join(directory_str, filename))
                ##a_gridcell, lx, ly = get_grid_info(temp)
                ##srcs_wam2layers = srcs_wam2layers + temp["e_track"]* 1000 / a_gridcell[:, None]
                ##n=n+1
        ##continue
        ##else:
        ##continue

    #write_nc(temp['latitude'].values,temp['longitude'].values, aux_wam2layers, filename="results WAM2layers/WAM2layers_out")

    #fname="WAM2layers_out.nc"
    #srcs_wam2layers = xr.open_dataset("results WAM2layers/WAM2layers_out.nc")['e_track']

# Data loading this way gives an error message for me while plotting, but in principe it should work
#dsall = xr.open_mfdataset('results WAM2layers/backtrack_*T00-00.nc', combine = 'nested', concat_dim='time')
#lat = dsall.latitude.values
#lon = dsall.longitude.values
#a_gridcell_new, l_ew_gridcell, l_mid_gridcell = get_grid_info_new(lat, lon)
#E_track_totalmm = (dsall/ a_gridcell_new) *1000 # mm
#srcs_wam2layers_new = E_track_totalmm["e_track"].sum('time') # mm

srcs_wam2layers=srcs_wam2layers.sum('time') # mm

## Make sample figure (in this case of WAM2layers)
#my_projection = crs.PlateCarree(central_longitude=0)

#fig = plt.figure(figsize=(16, 10))
#ax = fig.add_subplot(111, projection=crs.PlateCarree())

#srcs_wam2layers.plot(
        #vmin=0, #Min source in mm
        #vmax=15, #Max source in mm
        #robust=False,
        #cmap=cm.rain, #Colour map from cmocean package
        #cbar_kwargs=dict(fraction=0.05, shrink=0.5),
    #)
#srcs_wam2layers.plot.contour(ax=ax, levels=[0.1, 1], colors=["lightgrey", "grey"])
#ax.set_title("Accumulated tracked moisture [mm]", loc="left")
#ax.add_feature(cartopy.feature.COASTLINE, linewidth=0.8)
#ax.add_feature(cartopy.feature.BORDERS, linestyle='-', linewidth=.2)

#ax.set_xticks(np.arange(-180, 181, 10), crs=my_projection)
#ax.set_yticks(np.arange(-90, 91, 10), crs=my_projection)
#ax.set_xlim(-85, 40) #Setting limits in longitude dimension
#ax.set_ylim(10,80) #Setting limits in latitude dimension
#plt.savefig("Wam2layers_sources_Pakistan.png")

########################################################
## University of Vigo                                 ##
########################################################
print("--> Reading results UVigo")
srcs_Vigo_e1_Stohl    = xr.open_dataset("results Uvigo/ERA5_Stohl_backwardreg.nc")["E_P"]
srcs_Vigo_e2_Sodemann = xr.open_dataset("results Uvigo/ERA5_sodemann_reg.nc")["E_P"]

########################################################
## UTRACK - Arie Staal                                ##
## - Based on the communication with Arie             ##
## - the results should be area corrected             ##
## - given the values I assumed that the area needs   ##
## - to be in km**2, however this should be checked.  ##
########################################################
print("--> Reading results Utrack Arie Staal")
directory_str = "results Utrack Arie Staal/"
directory =  os.fsencode(directory_str)

dsall = xr.open_mfdataset('results Utrack Arie Staal/*_mixing48h_dt025h_100p.nc', combine = 'nested', concat_dim='time')
a_gridcell_new, l_ew_gridcell, l_mid_gridcell = get_grid_info_new(dsall.lat.values, dsall.lon.values) #Calcluate grid cell area

srcs_utrack_e1 = dsall["moisture_source"].sum("time")*a_gridcell_new/10**6.0 #1000.0
dsall = xr.open_mfdataset('results Utrack Arie Staal/*_mixing24h_dt025h_100p.nc', combine = 'nested', concat_dim='time')
srcs_utrack_e2 = dsall["moisture_source"].sum("time")*a_gridcell_new/10**6.0 #1000.0
dsall = xr.open_mfdataset('results Utrack Arie Staal/*_mixing12h_dt025h_100p.nc', combine = 'nested', concat_dim='time')
srcs_utrack_e3 = dsall["moisture_source"].sum("time")*a_gridcell_new/10**6.0 #1000.0
dsall = xr.open_mfdataset('results Utrack Arie Staal/*_mixing24h_dt025h_1000p.nc', combine = 'nested', concat_dim='time')
srcs_utrack_e4 = dsall["moisture_source"].sum("time")*a_gridcell_new/10**6.0 #1000.0
dsall = xr.open_mfdataset('results Utrack Arie Staal/*_mixing24h_dt010h_100p.nc', combine = 'nested', concat_dim='time')
srcs_utrack_e5 = dsall["moisture_source"].sum("time")*a_gridcell_new/10**6.0 #1000.0

########################################################
## HAMSTER (Ghent)                                    ##
########################################################
print("--> Reading results UGhent HAMSTER")
# E1: Sodemann #
temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens1_sod08/bias_corrected_20231006120000_sod08.nc")
srcs_ghent_e1 = temp["E2P_BC"].mean("time") #Mean over time to remove time dimension

for date in range(7,8):
    temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens1_sod08/bias_corrected_202310" + str(date).zfill(2) + "120000_sod08.nc")
    srcs_ghent_e1 += temp["E2P_BC"].mean("time")

# E2: FAS19 (Fremme + Sodemann, 2019) #
temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens2_fas19/bias_corrected_20231006120000_fas19.nc")
srcs_ghent_e2 = temp["E2P_BC"].mean("time")

for date in range(7,8):
    temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens2_fas19/bias_corrected_202310" + str(date).zfill(2) + "120000_fas19.nc")
    srcs_ghent_e2 += temp["E2P_BC"].mean("time")

# E3: FAS19 (Fremme + Sodemann, 2019) #
temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens3_rh20/bias_corrected_20231006120000_rh20.nc")
srcs_ghent_e3 = temp["E2P_BC"].mean("time")

for date in range(7,8):
    temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens3_rh20/bias_corrected_202310" + str(date).zfill(2) + "120000_rh20.nc")
    srcs_ghent_e3 += temp["E2P_BC"].mean("time")

# E4:
temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens4_allabl/bias_corrected_20231006120000_allabl.nc")
srcs_ghent_e4 = temp["E2P_BC"].mean("time")

for date in range(7,8):
    temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens4_allabl/bias_corrected_202310" + str(date).zfill(2) + "120000_allabl.nc")
    srcs_ghent_e4 += temp["E2P_BC"].mean("time")

temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens5_rhadap80/bias_corrected_20231006120000_rhadap80.nc")
srcs_ghent_e5 = temp["E2P_BC"].mean("time")

for date in range(7,8):
    temp = xr.open_dataset("results UGhent HAMSTER/Scotland simulations/bias_corrected/ens5_rhadap80/bias_corrected_202310" + str(date).zfill(2) + "120000_rhadap80.nc")
    srcs_ghent_e5 += temp["E2P_BC"].mean("time")

########################################################
## TRACMASS Dipanjan Dey                              ##
## Units in mm/day, so multiplied with # of           ##
## event days                                         ##
########################################################
print("--> Reading results TRACMASS Dipanjan Dey")
nrdays = 3
ds_TRACMASS = xr.open_dataset("results TRACMASS Dipanjan Dey/TRACMASS_evap_sources_06-08oct2023.nc") #Evaporative sources (and preicp?) mm/day
ds_pr_TRACMASS = xr.open_dataset("results TRACMASS Dipanjan Dey/PR_ERA5_TRACMASS_06-08oct2023.nc") #Precip ERA5 and TRACMASS Comparison
#convert to -180 to 180 lon
ds_TRACMASS.coords['lon'] = (ds_TRACMASS.coords['lon'] + 180) % 360 - 180
ds_TRACMASS = ds_TRACMASS.sortby(ds_TRACMASS.lon)

srcs_TRACMASS = ds_TRACMASS["E_TRACMASS"]*nrdays #Units of data is mm/day but we want mm over whole time period

########################################################
## FLEXPART-Watersip TatFanCheng                      ##
########################################################
print("--> Reading results FLEXPART_WaterSip_TatFanCheng")
filename = "results FLEXPART_WaterSip_TatFanCheng/WaterSip_moisture_source_Scotland_20231006-20231008_Ens1.nc"
ds_flexpart_watersip1 = xr.open_dataset(filename)
#convert to -180 to 180 lon
ds_flexpart_watersip1.coords['lon'] = (ds_flexpart_watersip1.coords['lon'] + 180) % 360 - 180
ds_flexpart_watersip1 = ds_flexpart_watersip1.sortby(ds_flexpart_watersip1.lon)
srcs_flexpart_watersip1 = ds_flexpart_watersip1["Cb"]

filename = "results FLEXPART_WaterSip_TatFanCheng/WaterSip_moisture_source_Scotland_20231006-20231008_Ens2.nc"
ds_flexpart_watersip2 = xr.open_dataset(filename)
#convert to -180 to 180 lon
ds_flexpart_watersip2.coords['lon'] = (ds_flexpart_watersip2.coords['lon'] + 180) % 360 - 180
ds_flexpart_watersip2 = ds_flexpart_watersip2.sortby(ds_flexpart_watersip2.lon)
srcs_flexpart_watersip2 = ds_flexpart_watersip2["Cb"]

filename = "results FLEXPART_WaterSip_TatFanCheng/WaterSip_moisture_source_Scotland_20231006-20231008_Ens3.nc"
ds_flexpart_watersip3 = xr.open_dataset(filename)
#convert to -180 to 180 lon
ds_flexpart_watersip3.coords['lon'] = (ds_flexpart_watersip3.coords['lon'] + 180) % 360 - 180
ds_flexpart_watersip3 = ds_flexpart_watersip3.sortby(ds_flexpart_watersip3.lon)
srcs_flexpart_watersip3 = ds_flexpart_watersip3["Cb"]

########################################################
## Flexpart Ru Xu                                     ##
########################################################
print("--> Reading results Ru_Xu_FLEXPART")
ds_flexpart_xu = xr.open_dataset("results Ru_Xu_FLEXPART/scot_e_daily.nc")
srcs_flexpart_xu = ds_flexpart_xu["data"].sum("time")

########################################################
## results CHc LAGRANTO                                    ##
########################################################
print("--> Reading results CHc LAGRANTO ")
ds_lagranto_ = xr.open_dataset("results CHc LAGRANTO/Scotland_2023_CHc_eventtotal_ens1.nc")

ds_lagranto_ = ds_lagranto_["N"].sum("time").sum("dimz_N")

ds_lagranto = xr.DataArray(

    ds_lagranto_.values,
    coords = [np.arange(-90,90.25,0.25), np.arange(-180,180.25,0.25)],
    dims=('lat', 'lon'),
)

print("--> Reading results UViena ")

srcs_flexpart_uvie_ = xr.open_dataset("results univie FLEXPART/scotland_univie.nc")
srcs_flexpart_uvie_bl = srcs_flexpart_uvie_["moisture_uptakes_bl"].sum("time")
srcs_flexpart_uvie_ft = srcs_flexpart_uvie_["moisture_uptakes_ft"].sum("time")

srcs_flexpart_uvie = srcs_flexpart_uvie_bl + srcs_flexpart_uvie_ft

Add reading fuction for 2LDRM
Peter9192 commented 1 month ago

Thanks for the code! It was now incorporated in #52, so it is now part of the main branch (combine_data.py)