Open binayakl opened 4 months ago
Hi @binayakl,
HDF5 is currently not supported in CaNS, and the io_field_hdf5
routine needs fixing. I will try to address the issue soon and get back to you. Thanks!
Hi again @binayakl,
I cannot test right now, unfortunately. But could you check if these changes work for you after you ensure proper linking against the HDF5 library?
diff --git a/src/load.f90 b/src/load.f90
index 4485ab2..29f1336 100644
--- a/src/load.f90
+++ b/src/load.f90
@@ -541,8 +541,8 @@ module mod_load
end select
end subroutine load_one_local
!
-#if defined(_USE_HDF5)
subroutine io_field_hdf5(io,filename,varname,ng,nh,lo,hi,var,meta,x_g,y_g,z_g)
+ use hdf5
!
! collective single field data I/O using HDF5
!
@@ -575,7 +575,6 @@ module mod_load
integer(HSIZE_T) , dimension(3) :: data_count
integer(HSSIZE_T), dimension(3) :: data_offset
integer(HSSIZE_T), dimension(3) :: halo_offset
- type(MPI_INFO) :: info = MPI_INFO_NULL
!
n(:) = hi(:)-lo(:)+1
sizes(:) = ng(:)
@@ -591,7 +590,7 @@ module mod_load
select case(io)
case('r')
call h5pcreate_f(H5P_FILE_ACCESS_F,plist_id,ierr)
- call h5pset_fapl_mpio_f(plist_id,MPI_COMM_WORLD%MPI_VAL,info%MPI_VAL,ierr)
+ call h5pset_fapl_mpio_f(plist_id,MPI_COMM_WORLD,MPI_INFO_NULL,ierr)
call h5fopen_f(filename,H5F_ACC_RDONLY_F,file_id,ierr,access_prp=plist_id)
call h5pclose_f(plist_id,ierr)
!
@@ -618,11 +617,11 @@ module mod_load
call h5dclose_f(dset,ierr)
call h5fclose_f(file_id,ierr)
end if
- call MPI_Bcast(meta,2,MPI_REAL_RP,0,MPI_COMM_WORLD)
+ call MPI_Bcast(meta,2,MPI_REAL_RP,0,MPI_COMM_WORLD,ierr)
case('w')
call h5screate_simple_f(ndims,dims,filespace,ierr)
call h5pcreate_f(H5P_FILE_ACCESS_F,plist_id,ierr)
- call h5pset_fapl_mpio_f(plist_id,MPI_COMM_WORLD%MPI_VAL,info%MPI_VAL,ierr)
+ call h5pset_fapl_mpio_f(plist_id,MPI_COMM_WORLD,MPI_INFO_NULL,ierr)
call h5fcreate_f(filename,H5F_ACC_TRUNC_F,file_id,ierr,access_prp=plist_id)
call h5pclose_f(plist_id,ierr)
!
@@ -678,5 +677,4 @@ module mod_load
end if
end select
end subroutine io_field_hdf5
-#endif
end module mod_load
Hi Pedro, Thanks for your reply. Yes, i tried these changes earlier and it works(compiles). But, currently i am facing issue changing output.f90 to implement hdf5 output.
subroutine out3d(fname,nskip,p,varname)
use mod_common_mpi, only: ipencil => ipencil_axis
use hdf5
use decomp_2d
use mod_load, only: io_field,io_field_hdf5
!
! saves a 3D scalar field into a binary file
!
! fname -> name of the output file
! nskip -> array with the step size for which the
! field is written; i.e.: [1,1,1]
! writes the full field
! p -> 3D input scalar field
!
implicit none
character(len=*), intent(in) :: fname,varname
integer , intent(in), dimension(3) :: nskip
real(rp), intent(in), dimension(:,:,:) :: p
integer, dimension(3) :: ng,lo,hi
ng(:) = [nx_global,ny_global,nz_global]
select case(ipencil)
case(1)
lo(:) = xstart(:)
hi(:) = xend(:)
case(2)
lo(:) = ystart(:)
hi(:) = yend(:)
case(3)
lo(:) = zstart(:)
hi(:) = zend(:)
end select
if(any(nskip /= 1) .and. myid == 0) &
print*, 'Warning: `nskip` should be `[1,1,1]` if `io_field()` is used to output 3D field data'
call io_field_hdf5('w',fname,varname,ng,[1,1,1],lo,hi,p)
!call io_field_hdf5('w',trim(datadir)//trim(fname),trim(datadir)//trim(varname),ng,[1,1,1],lo,hi,p) !added
end subroutine out3d
Doing this compiles the code, should i make any changes to other part of code? While running the case, i got these errors
*** Initial condition succesfully set ***
HDF5-DIAG: Error detected in HDF5 (1.14.2) MPI-process 0:
#000: H5P.c line 238 in H5Pcreate(): not a property list class
major: Invalid arguments to routine
minor: Inappropriate type
Could you indicate what might be the problem?
I am using HDF4/1.14.2 built with MPI support.
In load.f90, is subroutine io_field_hdf5 complete? I am having an issue compiling while enabling this subroutine. My system has modules loaded for HDF5, i added "use hdf5" also; yet, i am encountering errors. I would really appreciate if further reading for HDF5 is given in readme if possible.