guziy / pylibrmn

An effort to separate the wrapper for reading rpn files from all the rest of my files
GNU Lesser General Public License v2.1
4 stars 0 forks source link

After pylibrmn updates my old scripts don't work #8

Closed houssank closed 6 years ago

houssank commented 6 years ago
import h5py
from get_geo_ice import get_geo_ice
import numpy as np
import datetime as dt
import dateutil.tz as tz
import matplotlib
from matplotlib import pyplot as plt
import pyproj
from numpy import ma
from IPython.display import Image, display
import glob
from mpl_toolkits.basemap import Basemap
from matplotlib.patches import Polygon
from matplotlib import cm
from matplotlib.colors import Normalize
import matplotlib as mpl
from matplotlib.colors import LogNorm
from rpn.rpn import RPN
from rpn import level_kinds
from datetime import datetime
from rpn.domains.rotated_lat_lon import RotatedLatLon
import math
import nclcmaps

def DIST_POINT_PROCHE(point_sat,vecteur_model):
    xpoint=point_sat[1]
    ypoint=point_sat[0]
    ymod=vecteur_model[:,0]
    xmod=vecteur_model[:,1]
    npointmod=len(ymod)
    print 'npoint mod = ',npointmod 
    d=50000.
    yp,xp=(0,0)
    for i in range(npointmod):
    #   dtmp=6371*distance_en_spherique(xmod[i],ymod[i],xpoint,ypoint)
                dtmp=dtmp=np.sqrt((xmod[i]-xpoint)**2+(ymod[i]-ypoint)**2)
        if(dtmp<=d):
            #print 'dtmp = ', dtmp 
            d=dtmp
            yp,xp=(ymod[i],xmod[i])
    return yp,xp

flx_file='/BIG1/sankare/Data_PhD/Calcul_rpn/Data_Sat/2007006223541_03693_CS_2B-FLXHR_GRANULE_P2_R04_E02.h5'
#all_files=glob.glob(hr_file1)
meters2km=1.e3
if flx_file.find('FLXHR_GRANULE') > 0: # pass the correct field name to get_geo_ice
  fieldname='2B-FLXHR'

elif flx_file.find('FLXHR-LIDAR') > 0:
    fieldname='2B-FLXHR-LIDAR'
else:
    raise Exception('{} not a ICE file'.format(args.ice_file))
lats,lons,date_times,prof_times,dem_elevation=get_geo_ice(flx_file,fieldname)
with h5py.File(flx_file,'r') as flxin:
     flxvals=flxin['2B-FLXHR']['Data Fields']['QR'][...] #QR c est le heating rates
     height=flxin['2B-FLXHR']['Geolocation Fields']['Height'][...]
     Lat=flxin['2B-FLXHR']['Geolocation Fields']['Latitude'][:][:]
     Lon=flxin['2B-FLXHR']['Geolocation Fields']['Longitude'][...]
     factor=flxin['2B-FLXHR']['Swath Attributes']['QR.factor'][0][0]
     missing=flxin['2B-FLXHR']['Swath Attributes']['QR.missing'][0][0]
     granule=flxin['2B-FLXHR']['Swath Attributes']['granule_number'][0][0]
#separation de la radiation de longue longueur d onde et courte longueur d onde    
shortwave_hr=flxvals[0,:,:]
longwave_hr=flxvals[1,:,:]
#Trouver les valeurs manquantes et le facteur multicatif
print('\nmissing values given by={}\nfactor={}'.format(missing,factor))
hit=( longwave_hr == missing)
print('There are {} longwave_hr missing values'.format(np.sum(hit)))
long_wave_hr=np.ma.masked_where(hit,longwave_hr)
hit=(shortwave_hr == missing)
print('There are {} shortwave_hr missing values'.format(np.sum(hit)))
#Appliquer le masque
short_wave_hr=np.ma.masked_where(hit,shortwave_hr)
longwave_hr = longwave_hr/factor
shortwave_hr = shortwave_hr/factor
# Modification de Lat et Lon pour les avoir en vecteur maniable    
lon_vect=[]
lat_vect=[]
lon_m=[]
lat_m=[]
latlon_s_index=[]
for i in range(0,len(Lat)):
        lat_m.append(Lat[i][0])
        lon_m.append(Lon[i][0])
lon_vect=np.array(lon_m)
lat_vect=np.array(lat_m)

for i in range(0,len(Lon)):
        lat=lat_vect[i]
        lon=lon_vect[i]
        if (lat>66 and lat<82)and ((lon>100 and lon<180)or(lon>-180 and lon<-170)):
                                                                      latlon_s_index.append(i)
index3=np.array(latlon_s_index)
#### storm = var in range ####
storm_lats=lat_vect[index3]
storm_lons=lon_vect[index3]
storm_height=height[index3]
storm_longwave_hr=longwave_hr[index3] 
if len(storm_height) ==0:
  print("!!!! Cross section out of range !!!!")
#### geo position ####
def lon2str(lonf, degree=""):
   if lonf >= 0.0: return "%.2f%sE" % (lonf, degree)
   else: return "%.2f%sW" % (-lonf, degree)
def lat2str(latf, degree=""):
   if latf >= 0.0: return "%.2f%sN" % (latf, degree)
   else: return "%.2f%sS" % (-latf, degree)
if storm_lons is not None and storm_lats is not None:
                             print "Longitude: %s, %s" % (lon2str(np.min(storm_lons)), lon2str(np.max(storm_lons)))
                 print "Latitude: %s, %s" % (lat2str(np.min(storm_lats)), lat2str(np.max(storm_lats)))
TIME_F = '%d%b%Y'
Tit=(date_times[0].strftime(TIME_F)+"_"+str(int(granule)))
####################            
####        ####
#### DONNEES  CRCM5####
####        ####
####################
#path="/BIG2/nikiema/Data_Housseyni/ArcticJPB_0.22d_18L_ERAInt_SN_T3_JAN2007"
#Arctic_0.22d_originef_TTk_JAN2007
#Arctic_0.22d_originef_TTk_JAN2007
#path="/BIG1/sankare/Data_PhD/Calcul_rpn/Simulations/Arctic_0.22d_overcldsf_T3_JAN2007"
path="/BIG1/sankare/Data_PhD/Calcul_rpn/Simulations/Arctic_0.22d_ov_no_diff_T3_JAN2007"
vname ="T3"
##open the rpn file##
r = RPN(path)
dat = r.get_first_record_for_name(vname)
lons2d, lats2d = r.get_longitudes_and_latitudes_for_the_last_read_rec()
datte=date_times[index3]
dv=datte[0]
dv = datetime(2007, 1, 6, 23, 40, 0)
do = datetime(2007, 1, 1, 0, 0, 0)
## levels ##
lvl=[1000,975,925,900,850,800,700,600,500,400,300,250,200]
## get record for date & level ##
nlev=len(lvl)
dat1=[]#np.zeros((205,196,nlev),'f')
count=0
r = RPN(path)
for i in range(len(lvl)):
        dat=r.get_record_for_date_and_level(var_name=vname,date=dv,date_o=do,level=lvl[i],level_kind=level_kinds.PRESSURE)
        dat1.append(dat)
#dat1=r.variables["T3"][:]         
var_mod_3d=np.array(dat1) 
#lons2dd=lons2d-180i

#Recherhe des indices des point souhaitE avec la simulation du CRCM5            
lat_s_mo_vect=[]
lon_s_mo_vect=[]
index_latlon=[]
var_s_mo=[]
c=[]
d=[]
for i in range(0,len(lats2d)):
        for j in range(0,196):
                if (lons2d[i,j]>180):
                   lons2d[i,j]=lons2d[i,j]-360
           lons2dd=lons2d
                a=lats2d[i,j]
                b=lons2dd[i,j]
                if (a>66 and a<82) and(( b>100 and b<180)or(b>-180 and b<-170)):
                                                                      c.append(i)
                                                                      d.append(j)
                                                                      index_latlon.append((i,j))
index_i=np.array(c)
index_j=np.array(d)
lat_s_mo_vect=lats2d[index_i,index_j]
lon_s_mo_vect=lons2dd[index_i,index_j]
var_s_mo=var_mod_3d[:,index_i,index_j]

## the path of cloudsat on the our intressing domain  ##
lo_x=storm_lons
la_x=storm_lats
latlon_s_2dm=np.array((lat_s_mo_vect,lon_s_mo_vect)).T
latlon_s_2ds=np.array((la_x,lo_x)).T

###trouver les points du modele proches des points  du satellite
npoint_sat=len(la_x)
latlon_m_p_s=np.zeros((npoint_sat,2),'f')
for isat in range(npoint_sat):
#for isat in range(0,2):
    point_sat=(la_x[isat],lo_x[isat])
    print 'point satt == ', point_sat
    latlon_m_p_s[isat,0:2]=DIST_POINT_PROCHE(point_sat,latlon_s_2dm)
        print 'point mod  == ',latlon_m_p_s[isat,0:2]
index_m_p_s=[] # Trouver les indices des points du mod proches du satellites
for ll in range(0,len(latlon_m_p_s)):
         for nn in range(len(latlon_s_2dm)):
                  lat_m_p_s=latlon_m_p_s[ll,0]
                  lon_m_p_s=latlon_m_p_s[ll,1]
                  lat_s_m=latlon_s_2dm[nn,0]
                  lon_s_m=latlon_s_2dm[nn,1]
                  if (lat_s_m==lat_m_p_s and lon_s_m==lon_m_p_s):
                                            index_m_p_s.append(nn)
#Calcul des valeurs des variables aux points proches de la trajectoire du sat
var_m_p_s=np.squeeze(var_s_mo[:,index_m_p_s])
###########################graphe du trajectoire et les points du mod proches###############
m=Basemap(projection='nplaea',boundinglat=58,lon_0=0,resolution='l')
x,y=m(lon_vect,lat_vect)
x1,y1=m(lo_x,la_x)
x2,y2=m(latlon_m_p_s[:,1],latlon_m_p_s[:,0])
m.plot(x,y,color='r',linestyle='-',linewidth=2.0)
m.plot(x1,y1,color='blue',linestyle='-',linewidth=3.0)
m.plot(x2,y2,color='black',linestyle=':',marker='.',linewidth=2.0)
m.drawcoastlines()
m.fillcontinents(color='y',lake_color='w')
# draw parallels and meridians.
m.drawparallels(np.arange(-80.,81.,20.))
m.drawmeridians(m.drawmeridians(np.arange(-180.,181.,20.)),labels=[1,1,1,1],fontsize=13,fontweight='bold')
m.drawmapboundary(fill_color='w')
ln=[-174,-162]
lt=[60.5,80.5]
x2,y2 = m(ln,lt)
labels = ['60$\degree$N','80$\degree$N']
for label, xpt, ypt in zip(labels, x2, y2):
    plt.text(xpt, ypt, label,fontsize=13,fontweight='bold')
#TIME_F = '%d%b%Y'
#corn=(date_times[0].strftime(TIME_F)+"_"+str(int(granule)))
# plt.savefig(rep_fig1+"%s_granule.png"%corn)
#plt.text(xpt, ypt, label,fontsize=13,fontweight='bold')
#TIME_F = '%d%b%Y'
plt.show()
#plt.close()
##############################################################################3#################
###########################Partie graphique pour les variables###################################
x_latx=np.arange(0,len(storm_lats))
#### figure ####
## limit & colorbar ##
vmin0=-9
vmax0=9
the_norm=Normalize(vmin=vmin0,vmax=vmax0,clip=False)
cmap_ref=cm.RdBu_r
cmap_ref=nclcmaps.cmaps('MPL_seismic')#RdBu_r
#cmap_ref.set_over('pink')
#cmap_ref.set_under('k')
#cmap_ref.set_bad('0.75')
def plot_field(distance,height,field,ax,cmap=None,norm=None):
   if cmap is None:
     cmap=cm.jet
   col=ax.pcolormesh(distance,height,field,cmap=cmap,norm=the_norm)
   cax=fig.colorbar(col,extend='both',ax=ax,pad= 0.02)
   return ax,cax
#cmap=cm.jet
fig,ax1=plt.subplots(1,1,figsize=(25,5))
cloud_height_km=storm_height[0,:]/meters2km
#ax1,cax1=plot_field(x_latx,cloud_height_km,storm_longwave_hr.T,ax1,cmap=cmap_ref,norm=the_norm)
ax1,cax1=plot_field(x_latx,cloud_height_km,storm_longwave_hr.T,ax1,cmap=cmap_ref,norm=the_norm)
if len(x_latx)>=40:
  nb=10
else:
   nb=5
cca=np.round(len(x_latx)/nb)
if cca==0:
  cca=1
xtick=np.arange(min(x_latx), max(x_latx), cca)
ax1.set_xticks(xtick)
lat_tick=[]
lon_tick=[]
for i in range(0,len(xtick)):
        a=storm_lats[[xtick[i]]]
        b=storm_lons[[xtick[i]]]
        lat_tick.append(a)
        lon_tick.append(b)
list_lat=np.array(lat_tick)
list_lon=np.array(lon_tick)           
xticklbs=[]
for i in range(0,len(xtick)): # show the right lons & lats 
        xticklbs.append(lat2str(list_lat[i], "$\degree$") + "\n"
        + lon2str(list_lon[i], "$\degree$"))
#Pour le modele
lat_tick0=[]
lon_tick0=[]
lat_m_p_s=latlon_m_p_s[:,0]
lon_m_p_s=latlon_m_p_s[:,1] 
for i in range(0,len(xtick)):
        a0=lat_m_p_s[[xtick[i]]]
        b0=lon_m_p_s[[xtick[i]]]
        lat_tick0.append(a0)
        lon_tick0.append(b0)
list_lat0=np.array(lat_tick0)
list_lon0=np.array(lon_tick0)           
xticklbs0=[]
for i in range(0,len(xtick)): # show the right lons & lats 
        xticklbs0.append(lat2str(list_lat0[i], "$\degree$") + "\n"
        + lon2str(list_lon0[i], "$\degree$"))
ax1.set_xticklabels(xticklbs)
ax1.get_xaxis().set_tick_params(direction='out', width=1,labelsize=20)
ax1.get_yaxis().set_tick_params(direction='out', width=1,labelsize=20)
# ax.get_yaxis().set_tick_params(direction='out', width=1)
ax1.set(ylim=[0,8],xlim=(min(x_latx),max(x_latx)))
ax1.set_xlabel('Latitude [$\degree$]\nLongitude [$\degree$]',fontsize=20)
text=cax1.set_label('Taux de chauffage (K/jour)',rotation=90,fontsize=20)
ax1.set_ylabel('Altitude (km)',fontsize=20)
TIME_FORMAT = '%e %b %Y %H:%M:%S UTC'
ax1.set_title('ClouSat:Coupe tranversale radiation  %s'%(dv.strftime(TIME_FORMAT)),y=1.05,size=20)
plt.subplots_adjust(left=0.08, bottom=0.27, right=1.07, top=0.90,wspace=None, hspace=None)

# ax1.set_fontsize(12)
plt.show() 
#la 2eme figure (modele)
#vmin0=-40
#vmax0=5
#the_norm=Normalize(vmin=vmin0,vmax=vmax0,clip=False)
#cmap_ref=cm.RdBu_r

def plot_field0(distance,height,field,ax,cmap=None,norm=None):
   if cmap is None:
      cmap=cm.jet
   col=ax.pcolormesh(distance,height,field,cmap=cmap,norm=the_norm)
   cax=fig0.colorbar(col,extend='both',ax=ax,pad=0.04)
   return ax,cax
fig0,ax0=plt.subplots(1,1,figsize=(25,5))
ax0,cax0=plot_field0(x_latx,lvl,86400*var_m_p_s,ax0,cmap=cmap_ref,norm=the_norm)
ax=ax0.twinx()
ax.set_ylabel('Altitude (km)',fontsize=20)
ax.set(ylim=[0,8])
ax0.set_xticks(xtick)
ax0.set_xticklabels(xticklbs0)
ax0.set(xlim=(min(x_latx),max(x_latx)))
ax0.invert_yaxis()
ax0.set_xlabel('Latitude [$\degree$]\nLongitude [$\degree$]',fontsize=20)
ax0.set_ylabel('Pression (mb)',fontsize=20)
cax0.set_label('Taux de chauffage ( K/jour)',rotation=90,fontsize=20)
ax0.get_xaxis().set_tick_params(direction='out', width=1,labelsize=20)
ax0.get_yaxis().set_tick_params(direction='out', width=1,labelsize=20)
ax.get_yaxis().set_tick_params(direction='out', width=1,labelsize=20)
#### title  ####
TIME_FORMAT = '%e %b %Y %H:%M:%S UTC' 
ax0.set_title('CRCM5:Coupe transversle radiation  %s'%(dv.strftime(TIME_FORMAT)),y=1.05,size=20)
plt.subplots_adjust(left=0.08, bottom=0.27, right=1.07, top=0.90,wspace=None, hspace=None)

rep_fig="/BIG1/sankare/Data_PhD/Calcul_rpn/Mes_figures/"
#fig0.savefig(rep_fig+"ov_no_diif_hr_rpn.png")
#fig.savefig(rep_fig+"Cloudsat_hr_rpn.png")

plt.show()
guziy commented 6 years ago

Thanks Sankare: What is the error message?

Cheers

guziy commented 6 years ago

Here is the alternative and more efficient way of doing what you want:

# This example reads precipitation variable metadata without actually reading of the precipitation data
# into memory. In order to get data in memory you have to slice the variable. 
from rpn.rpn import RPN
with RPN("pm1979010100_03506400p") as r:
    pr_var = r.variables["PR"]

    # you can also get the list of fields in the file as below
    print(r.variables)

    # get the shape of the data (still no field values are read)
    print(pr_var.shape)

    # get the dates correponding to the time dimension
    print([str(d) for d in pr_var.sorted_dates])

    # get the level values corresponing to the vertical dimension
    print([lev for lev in pr_var.sorted_levels])

    data_4d = pr_var[:] # the first dimension corresponds to pr_var.sorted_dates and the second to pr_var.sorted_levels
guziy commented 6 years ago

Sankare:

Could you please update? I've updated the method and it should now work for you. although it is not the most efficient way to do what you want to do, but this way you've discovered a problem and I think I've fixed it, so thanks for that.

Cheers

guziy commented 6 years ago

Please let me know if you still have problems with it, the version with the fix should be 0.0.39.

houssank commented 6 years ago

I have see that, But How can I get a specific level and date? When I do that I obtened for example data_4d (date.size, level.size, lon.size, lat.size) But I need for example data_4d( 2007-01-02 0000, 900hPa, lonsize, lat.size)

In the previous version we can do it like this. r.get_record_for_date_and_level(var_name=vname,date=dv,date_o=do,level=lvl[i],level_kind=level_kinds.PRESSURE)

houssank commented 6 years ago

Ok, Thanks Huziy, I will tell you if it ok for me. Thanks

houssank commented 6 years ago

Thanks Huziy, It works, all is ok now. Thanks very much.