Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Hdf5 update #1738

Merged
merged 8 commits into from
Sep 16, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
399 changes: 202 additions & 197 deletions src/generate_databases/save_arrays_solver_hdf5.F90

Large diffs are not rendered by default.

10 changes: 5 additions & 5 deletions src/shared/check_mesh_resolution.f90
Original file line number Diff line number Diff line change
Expand Up @@ -524,12 +524,12 @@
if (DT_PRESENT) then
filename = 'res_Courant_number'
call write_checkmesh_data_hdf5(filename,tmp1)
else
! minimum period estimate
filename = 'res_minimum_period'
call write_checkmesh_data_hdf5(filename,tmp2)
call write_checkmesh_xdmf_hdf5(NSPEC_AB)
endif

! minimum period estimate
filename = 'res_minimum_period'
call write_checkmesh_data_hdf5(filename,tmp2)
call write_checkmesh_xdmf_hdf5(NSPEC_AB)

Check warning on line 532 in src/shared/check_mesh_resolution.f90

View check run for this annotation

Codecov / codecov/patch

src/shared/check_mesh_resolution.f90#L530-L532

Added lines #L530 - L532 were not covered by tests
else
! default output
call create_name_database(prname,myrank,LOCAL_PATH)
Expand Down
230 changes: 209 additions & 21 deletions src/shared/hdf5_manager.F90
Original file line number Diff line number Diff line change
Expand Up @@ -194,6 +194,7 @@ module manager_hdf5
interface h5_read_dataset_collect_hyperslab
module procedure h5_read_dataset_1d_l_collect_hyperslab ! logical
module procedure h5_read_dataset_1d_i_collect_hyperslab ! integer
module procedure h5_read_dataset_1d_i64_collect_hyperslab ! integer 64-bit
module procedure h5_read_dataset_2d_i_collect_hyperslab
module procedure h5_read_dataset_3d_i_collect_hyperslab
module procedure h5_read_dataset_4d_i_collect_hyperslab
Expand Down Expand Up @@ -254,6 +255,7 @@ module manager_hdf5
interface h5_write_dataset_collect_hyperslab
module procedure h5_write_dataset_1d_l_collect_hyperslab ! logical
module procedure h5_write_dataset_1d_i_collect_hyperslab ! integer
module procedure h5_write_dataset_1d_i64_collect_hyperslab ! integer 64-bit
module procedure h5_write_dataset_2d_i_collect_hyperslab
module procedure h5_write_dataset_3d_i_collect_hyperslab
module procedure h5_write_dataset_4d_i_collect_hyperslab
Expand All @@ -265,6 +267,13 @@ module manager_hdf5
module procedure h5_write_dataset_2d_d_collect_hyperslab ! double
end interface h5_write_dataset_collect_hyperslab

! generic interface to create dataset
interface h5_create_dataset_gen
module procedure h5_create_dataset_gen_int
module procedure h5_create_dataset_gen_int64
end interface h5_create_dataset_gen


! object-oriented interface
! (Fortran 2003 standard style)
!
Expand Down Expand Up @@ -410,7 +419,7 @@ end subroutine h5_initialize
subroutine write_attenuation_file_hdf5(factor_common, scale_factor, factor_common_kappa, scale_factor_kappa)

#if defined(USE_HDF5)
use shared_parameters, only: NPROC, LOCAL_PATH
use shared_parameters, only: NPROC, LOCAL_PATH,H5_COL
use constants, only: myrank,N_SLS,NGLLX,NGLLY,NGLLZ
#endif

Expand Down Expand Up @@ -463,16 +472,16 @@ subroutine write_attenuation_file_hdf5(factor_common, scale_factor, factor_commo
call h5_open_file_p_collect(filename)
dset_name = "scale_factor"
call h5_write_dataset_4d_r_collect_hyperslab(dset_name, &
scale_factor,(/0,0,0,sum(offset_nspec(0:myrank-1))/),.true.)
scale_factor,(/0,0,0,sum(offset_nspec(0:myrank-1))/),H5_COL)
dset_name = "scale_factor_kappa"
call h5_write_dataset_4d_r_collect_hyperslab(dset_name, &
scale_factor_kappa,(/0,0,0,sum(offset_nspec(0:myrank-1))/),.true.)
scale_factor_kappa,(/0,0,0,sum(offset_nspec(0:myrank-1))/),H5_COL)
dset_name = "factor_common"
call h5_write_dataset_5d_r_collect_hyperslab(dset_name, &
factor_common,(/0,0,0,0,sum(offset_nspec(0:myrank-1))/),.true.)
factor_common,(/0,0,0,0,sum(offset_nspec(0:myrank-1))/),H5_COL)
dset_name = "factor_common_kappa"
call h5_write_dataset_5d_r_collect_hyperslab(dset_name, &
factor_common_kappa,(/0,0,0,0,sum(offset_nspec(0:myrank-1))/),.true.)
factor_common_kappa,(/0,0,0,0,sum(offset_nspec(0:myrank-1))/),H5_COL)

call h5_close_file_p()
call h5_finalize()
Expand All @@ -499,7 +508,7 @@ end subroutine write_attenuation_file_hdf5
subroutine read_attenuation_file_hdf5(factor_common, scale_factor, factor_common_kappa, scale_factor_kappa)

#if defined(USE_HDF5)
use shared_parameters, only: NPROC, LOCAL_PATH
use shared_parameters, only: NPROC, LOCAL_PATH, H5_COL
use constants, only: myrank
#endif

Expand All @@ -526,16 +535,16 @@ subroutine read_attenuation_file_hdf5(factor_common, scale_factor, factor_common
! open file
call h5_open_file_p_collect(fname)
! read offset array
call h5_read_dataset_1d_i_collect_hyperslab("offset_nspec",offset_nspec, (/0/), .true.)
call h5_read_dataset_collect_hyperslab("offset_nspec",offset_nspec, (/0/), H5_COL)

call h5_read_dataset_4d_r_collect_hyperslab("scale_factor", scale_factor, &
(/0,0,0,sum(offset_nspec(0:myrank-1))/), .true.)
call h5_read_dataset_4d_r_collect_hyperslab("scale_factor_kappa", scale_factor_kappa, &
(/0,0,0,sum(offset_nspec(0:myrank-1))/), .true.)
call h5_read_dataset_5d_r_collect_hyperslab("factor_common", factor_common, &
(/0,0,0,0,sum(offset_nspec(0:myrank-1))/), .true.)
call h5_read_dataset_5d_r_collect_hyperslab("factor_common_kappa", factor_common_kappa, &
(/0,0,0,0,sum(offset_nspec(0:myrank-1))/), .true.)
call h5_read_dataset_collect_hyperslab("scale_factor", scale_factor, &
(/0,0,0,sum(offset_nspec(0:myrank-1))/), H5_COL)
call h5_read_dataset_collect_hyperslab("scale_factor_kappa", scale_factor_kappa, &
(/0,0,0,sum(offset_nspec(0:myrank-1))/), H5_COL)
call h5_read_dataset_collect_hyperslab("factor_common", factor_common, &
(/0,0,0,0,sum(offset_nspec(0:myrank-1))/), H5_COL)
call h5_read_dataset_collect_hyperslab("factor_common_kappa", factor_common_kappa, &
(/0,0,0,0,sum(offset_nspec(0:myrank-1))/), H5_COL)

call h5_close_file_p()
call h5_finalize()
Expand All @@ -562,7 +571,7 @@ end subroutine read_attenuation_file_hdf5
subroutine write_checkmesh_data_hdf5(dset_name,dump_array)

#if defined(USE_HDF5)
use shared_parameters, only: LOCAL_PATH, NPROC
use shared_parameters, only: LOCAL_PATH, NPROC, H5_COL
use constants, only: myrank
#endif

Expand Down Expand Up @@ -612,9 +621,14 @@ subroutine write_checkmesh_data_hdf5(dset_name,dump_array)
endif
call synchronize_all()

! open file
call h5_open_file_p_collect(filename)
call h5_write_dataset_1d_r_collect_hyperslab(dset_name, dump_array, (/sum(offset(0:myrank-1))/),.true.)
! open file (usually *_collect can be used with H5_COL = .false.)
! but I got a strange error on some systems, so I use if statement here
if (H5_COL) then
call h5_open_file_p_collect(filename)
else
call h5_open_file_p(filename)
endif
call h5_write_dataset_collect_hyperslab(dset_name, dump_array, (/sum(offset(0:myrank-1))/), H5_COL)
call h5_close_file_p()

call h5_finalize()
Expand Down Expand Up @@ -1751,7 +1765,7 @@ end subroutine h5_open_file_p_collect
!-------------------------------------------------------------------------------
!

subroutine h5_create_dataset_gen(dataset_name, dim_in, rank, dtype_id)
subroutine h5_create_dataset_gen_int(dataset_name, dim_in, rank, dtype_id)
implicit none
character(len=*), intent(in) :: dataset_name
integer, dimension(:), intent(in) :: dim_in
Expand Down Expand Up @@ -1812,7 +1826,75 @@ subroutine h5_create_dataset_gen(dataset_name, dim_in, rank, dtype_id)
if (error /= 0) write(*,*) 'hdf5 dataspace close failed for ', dataset_name
call check_error()

end subroutine h5_create_dataset_gen
end subroutine h5_create_dataset_gen_int

!
!-------------------------------------------------------------------------------
!

subroutine h5_create_dataset_gen_int64(dataset_name, dim_in, rank, dtype_id)
implicit none
character(len=*), intent(in) :: dataset_name
integer(kind=8), dimension(:), intent(in) :: dim_in

integer(HSIZE_T), dimension(size(dim_in)) :: dim
integer, intent(in) :: dtype_id ! 1:int, 4:real4, 8:real8,
integer, intent(in) :: rank

integer(HID_T) :: dspace_id
!logical :: if_chunk = .true.
!integer :: i

dim = dim_in ! convert data type

call h5screate_simple_f(rank, dim, dspace_id, error)
if (error /= 0) write(*,*) 'hdf5 dataspace create failed for ', dataset_name
call check_error()
call h5pcreate_f(H5P_DATASET_CREATE_F, plist_id, error)
call check_error()

! chunk size setting
!do i = 1, rank
! if (dim(i) <= 0) then
! if_chunk = .false.
! print *, "dataset not chunk set: ", dataset_name
! endif
!enddo
!if (if_chunk) call h5pset_chunk_f(plist_id,rank,dim,error)

if (dtype_id == 0) then ! bool uses integer
call h5dcreate_f(file_id, trim(dataset_name), H5T_NATIVE_INTEGER, dspace_id, dataset_id, error, &
dcpl_id=plist_id)
else if (dtype_id == 1) then ! integer
call h5dcreate_f(file_id, trim(dataset_name), H5T_NATIVE_INTEGER, dspace_id, dataset_id, error, &
dcpl_id=plist_id)
else if (dtype_id == 2) then ! character
call h5dcreate_f(file_id, trim(dataset_name), str_type, dspace_id, dataset_id, error, &
dcpl_id=plist_id)
else if (dtype_id == 4) then ! real
call h5dcreate_f(file_id, trim(dataset_name), H5T_NATIVE_REAL, dspace_id, dataset_id, error, &
dcpl_id=plist_id)
else if (dtype_id == 8) then ! double
call h5dcreate_f(file_id, trim(dataset_name), H5T_NATIVE_DOUBLE, dspace_id, dataset_id, error, &
dcpl_id=plist_id)
else
print *, "specified dtype_id is not implemented yet for hdf5 io. aborting..."
stop 'Invalid dtype_id, not implemented yet in h5_create_dataset_gen() routine'
endif
if (error /= 0) write(*,*) 'hdf5 dataset create failed for ', dataset_name
call check_error()

call h5_close_prop_list_nocheck(dataset_name)

call h5dclose_f(dataset_id,error)
if (error /= 0) write(*,*) 'hdf5 dataset close failed for ', dataset_name
call check_error()
call h5sclose_f(dspace_id, error)
if (error /= 0) write(*,*) 'hdf5 dataspace close failed for ', dataset_name
call check_error()

end subroutine h5_create_dataset_gen_int64


!
!-------------------------------------------------------------------------------
Expand Down Expand Up @@ -2524,7 +2606,60 @@ subroutine h5_read_dataset_1d_i_collect_hyperslab(dataset_name, data, offset_in,
call h5_close_dataset()
end subroutine h5_read_dataset_1d_i_collect_hyperslab

!
!-------------------------------------------------------------------------------
!

subroutine h5_read_dataset_1d_i64_collect_hyperslab(dataset_name, data, offset_in, if_collective)
implicit none
character(len=*), intent(in) :: dataset_name
integer, dimension(:), intent(inout), target :: data
integer(kind=8), dimension(:), intent(in) :: offset_in
logical, intent(in) :: if_collective
! local parameters
integer :: rank = 1
integer(HSIZE_T), dimension(1) :: dim
integer(HSIZE_T), dimension(1) :: count ! size of hyperslab
integer(HSSIZE_T), dimension(1) :: offset ! the position where the datablock is inserted

dim = shape(data)
offset = offset_in ! convert data type

! open dataset
call h5_open_dataset2(trim(dataset_name))

! select a place where data is inserted.
count(1) = dim(1)

! select hyperslab in the file
call h5screate_simple_f(rank,count, mem_dspace_id, error)
call check_error()
call h5dget_space_f(dataset_id, file_dspace_id, error)
call check_error()
call h5sselect_hyperslab_f(file_dspace_id, H5S_SELECT_SET_F, offset, count, error)
call check_error()
call h5_create_dataset_prop_list(if_collective)

call h5_check_arr_dim(dim)

! write array using Fortran pointer
!call h5dread_f(dataset_id, H5T_NATIVE_INTEGER, data, dim, error, &
! file_space_id=file_dspace_id, mem_space_id=mem_dspace_id, xfer_prp=plist_id)
! use F2003 API
f_ptr = c_loc(data(1))
call h5dread_f(dataset_id, H5T_NATIVE_INTEGER, f_ptr, error, &
file_space_id=file_dspace_id, mem_space_id=mem_dspace_id, xfer_prp=plist_id)
if (error /= 0) write(*,*) 'hdf5 dataset write failed for ', dataset_name
call check_error()
call h5_close_prop_list(dataset_name)
call h5sclose_f(mem_dspace_id, error)
call check_error()
call h5sclose_f(file_dspace_id, error)
call check_error()
call h5_close_dataset()
end subroutine h5_read_dataset_1d_i64_collect_hyperslab


!-------------------------------------------------------------------------------
!

Expand Down Expand Up @@ -3976,6 +4111,59 @@ end subroutine h5_write_dataset_1d_i_collect_hyperslab
!-------------------------------------------------------------------------------
!

! store local 1d array to global 1d array
subroutine h5_write_dataset_1d_i64_collect_hyperslab(dataset_name, data, offset_in, if_collective)
implicit none
character(len=*), intent(in) :: dataset_name
integer, dimension(:), intent(in), target :: data
integer(kind=8), dimension(:), intent(in) :: offset_in
logical, intent(in) :: if_collective
! local parameters
integer :: rank = 1
integer(HSIZE_T), dimension(1) :: dim
integer(HSIZE_T), dimension(1) :: count ! size of hyperslab
integer(HSSIZE_T), dimension(1) :: offset ! the position where the datablock is inserted

dim = shape(data)
offset = offset_in ! convert data type

! open dataset
call h5_open_dataset2(trim(dataset_name))

! select a place where data is inserted.
count(1) = dim(1)

! select hyperslab in the file
call h5screate_simple_f(rank,count, mem_dspace_id, error)
call check_error()
call h5dget_space_f(dataset_id, file_dspace_id, error)
call check_error()
call h5sselect_hyperslab_f(file_dspace_id, H5S_SELECT_SET_F, offset, count, error)
call check_error()
call h5_create_dataset_prop_list(if_collective)

call h5_check_arr_dim(dim)
! write array using Fortran pointer
!call h5dwrite_f(dataset_id, H5T_NATIVE_INTEGER, data, dim, error, &
! file_space_id=file_dspace_id, mem_space_id=mem_dspace_id, xfer_prp=plist_id)
! use F2003 API
call h5dwrite_f(dataset_id, H5T_NATIVE_INTEGER, c_loc(data(1)), error, &
file_space_id=file_dspace_id, mem_space_id=mem_dspace_id, xfer_prp=plist_id)
if (error /= 0) write(*,*) 'hdf5 dataset write failed for ', dataset_name
call check_error()
call h5_close_prop_list(dataset_name)
call h5sclose_f(mem_dspace_id, error)
call check_error()
call h5sclose_f(file_dspace_id, error)
call check_error()
call h5_close_dataset()
end subroutine h5_write_dataset_1d_i64_collect_hyperslab

!
!-------------------------------------------------------------------------------
!


subroutine h5_write_dataset_1d_r_collect_hyperslab(dataset_name, data, offset_in, if_collective)
implicit none
character(len=*), intent(in) :: dataset_name
Expand Down
3 changes: 3 additions & 0 deletions src/shared/shared_par.F90
Original file line number Diff line number Diff line change
Expand Up @@ -192,6 +192,9 @@ module shared_input_parameters
! number of io dedicated nodes
integer :: HDF5_IO_NODES = 0

! HDF5 IO writing mode (collective or independent)
logical :: H5_COL = .true.

! flag for io-dedicated/compute node.
logical :: IO_storage_task = .false.
logical :: IO_compute_task = .true.
Expand Down
Loading
Loading