program benchmark_helmholtz_solvers !! Program to benchmark helmholtz problem use MPI #ifdef ENABLE_PETSC use petscksp #endif use iso_fortran_env !$ use omp_lib use netcdf use parallax_build_info_m, only: git_hash, git_date use perf_m use precision_m, only: FP, FP_EPS use error_handling_m, only: handle_error, handle_error_netcdf use status_codes_m, only: PARALLAX_ERR_PARAMETERS, PARALLAX_WRN_GENERAL use screen_io_m, only: get_stdout use comm_handling_m, only: is_master, get_communicator use descriptors_m, only: convert_descriptor_char_int, & BND_TYPE_DIRICHLET_ZERO, BND_TYPE_NEUMANN, & DISTRICT_CORE use equilibrium_m, only: equilibrium_t use equilibrium_factory_m, only: create_equilibrium, & get_equilibrium_identifier use mesh_cart_m, only: mesh_cart_t use multigrid_m, only: multigrid_t use helmholtz_solver_m, only: helmholtz_solver_t use helmholtz_solver_factory_m, only: parameters_helmholtz_solver_factory, & helmholtz_solver_factory use helmholtz_netcdfio_m, only: read_netcdf_helmholtz, & write_netcdf_helmholtz use device_handling_m, only: impose_default_device_affinity use testfunctions_m, only: testfun_lambda, testfun_xi, & testfun_co, testfun_u, testfun_helm_u, & testfun_dudrhon implicit none integer :: ierr, rank, nprocs logical :: omp_on integer :: num_threads integer :: funit, io_error, nf90_id, nf90_stat character(len=150) :: errmsg class(equilibrium_t), allocatable :: equi type(mesh_cart_t) :: mesh type(multigrid_t) :: multigrid integer :: bnd_type_core, bnd_type_wall, bnd_type_dome, bnd_type_out real(FP), allocatable, dimension(:,:) :: co, lambda, xi real(FP), allocatable, dimension(:), target :: rhs, sol, guess class(helmholtz_solver_t), allocatable :: hsolver integer, allocatable, dimension(:) :: reorders real(FP) :: x, y, res, err_l2nrm, err_sup, nrm_l2, df integer :: eq_id, l, ki, kb, kg, district, hinfo logical :: run_default_case = .false. ! Parameters controling the case character(len=64) :: geometry = 'CIRCULAR' logical :: read_case_from_files = .false. logical :: write_case_to_files = .false. logical :: write_log_file = .false. ! Solver parameters type (parameters_helmholtz_solver_factory) :: parslv character(len=16) :: solver_type = 'MGMRES' character(len=16) :: dirsolver_type = 'MKL' character(len=16) :: smoother_type = 'GSRB' real(FP) :: rtol = 1.0E-8_FP real(FP) :: restol_zero = FP_EPS integer :: gmres_maxiter = 15 integer :: gmres_nrestart = 15 character(len=1) :: mgrid_cycletype = 'V' integer :: mgrid_npresmooth = 5 integer :: mgrid_npostsmooth = 5 integer :: dbgout = 1 ! Grid parameters (only for read_case_from_files=.false.) integer :: nlvls = 4 real(FP) :: spacing_f = 4.0E-3_FP integer :: size_neighbor = 2 integer :: size_ghost_layer = 2 integer :: reorder_size = 8 logical :: extend_beyond_wall = .false. real(FP) :: phi = 0.0_FP ! Parameters controling the state (solution, rhs, etc) character(len=32) :: bnd_type_core_char = 'BND_TYPE_DIRICHLET_ZERO' namelist / case_params / geometry, read_case_from_files, & write_case_to_files, write_log_file namelist / solver_params / solver_type, dirsolver_type, smoother_type, & rtol, restol_zero, gmres_maxiter, & gmres_nrestart, mgrid_cycletype, & mgrid_npresmooth, mgrid_npostsmooth, dbgout namelist / grid_params / nlvls, spacing_f, size_neighbor, & size_ghost_layer, reorder_size, extend_beyond_wall namelist / state_params / bnd_type_core_char ! Initialisation of MPI, OpenMP framework #ifdef ENABLE_PETSC call PetscInitialize(PETSC_NULL_CHARACTER, ierr) #else call MPI_init(ierr) #endif call MPI_comm_rank(get_communicator(), rank, ierr) call MPI_comm_size(get_communicator(), nprocs, ierr) ! Get OMP information omp_on = .false. num_threads = 0 !$omp parallel !$omp single !$ omp_on = .true. !$ num_threads = omp_get_num_threads() !$omp end single !$omp end parallel ! Write out MPI and OMP information if (is_master()) then write(get_stdout(),*)'' write(get_stdout(),*)'Running benchmark_helmholtz_solvers' write(get_stdout(),*)'git hash: ', git_hash write(get_stdout(),*)'git date: ', git_date write(get_stdout(),*)'' write(get_stdout(),*)'' write(get_stdout(),*)'This file was compiled by ', & compiler_version(), ' using the options ', & compiler_options() write(get_stdout(),*)'' write(get_stdout(),*)'' write(get_stdout(),*)'Number of MPI-processes = ',nprocs if (omp_on) then write(get_stdout(),*)'OMP switched on:' write(get_stdout(),*)' Number of OMP-threads = ',num_threads else write(get_stdout(),*)'OMP switched off' endif write(get_stdout(),*)'' endif ! Read and process input parameters open(newunit=funit, file='params.in', status='old', action='read', & iostat=io_error) if (io_error /= 0) then run_default_case = .true. call handle_error('No parameter file provided, running default case', & PARALLAX_WRN_GENERAL, __LINE__, __FILE__) else read(funit, nml=case_params, iostat=io_error, iomsg=errmsg) if (io_error /= 0) then call handle_error(errmsg, PARALLAX_ERR_PARAMETERS, & __LINE__, __FILE__) endif rewind(funit) read(funit, nml=solver_params, iostat=io_error, iomsg=errmsg) if (io_error /= 0) then call handle_error(errmsg, PARALLAX_ERR_PARAMETERS, & __LINE__, __FILE__) endif if (.not.read_case_from_files) then rewind(funit) read(funit, nml=grid_params, iostat=io_error, iomsg=errmsg) if (io_error /= 0) then call handle_error(errmsg, PARALLAX_ERR_PARAMETERS, & __LINE__, __FILE__) endif rewind(funit) read(funit, nml=state_params, iostat=io_error, iomsg=errmsg) if (io_error /= 0) then call handle_error(errmsg, PARALLAX_ERR_PARAMETERS, & __LINE__, __FILE__) endif endif close(funit) endif parslv%dirsolver_type = dirsolver_type parslv%smoother_type = smoother_type parslv%rtol = rtol parslv%restol_zero = restol_zero parslv%gmres_maxiter = gmres_maxiter parslv%gmres_nrestart = gmres_nrestart parslv%mgrid_cycletype = mgrid_cycletype parslv%mgrid_npresmooth = mgrid_npresmooth parslv%mgrid_npostsmooth = mgrid_npostsmooth parslv%dbgout = dbgout if (is_master()) then write(get_stdout(),nml=case_params) write(get_stdout(),nml=solver_params) if (.not.read_case_from_files) then write(get_stdout(),nml=grid_params) write(get_stdout(),nml=state_params) endif endif call parslv%display() ! Choose backend device as early as possible call impose_default_device_affinity(solver_type) ! Create eqilibrium eq_id = get_equilibrium_identifier(geometry) if (run_default_case) then write(get_stdout(),*)'Using default equilibrium' call create_equilibrium(equi, eq_id) else call create_equilibrium(equi, eq_id, 'params.in') endif if (read_case_from_files) then ! Read multigrid nf90_stat = nf90_open('multigrid.nc', NF90_NETCDF4, nf90_id ) call handle_error_netcdf(nf90_stat, __LINE__, __FILE__) call multigrid%read_netcdf(nf90_id, mesh) nf90_stat = nf90_close(nf90_id) call handle_error_netcdf(nf90_stat, __LINE__, __FILE__) else ! Build multigrid from scratch write(get_stdout(),*)'No case provided, building multigrid from scratch' allocate(reorders(nlvls), source=reorder_size) call multigrid%init(equi, phi, nlvls, spacing_f, & size_neighbor, size_ghost_layer, mesh, & reorders, extend_beyond_wall, dbgout=0) deallocate(reorders) endif call mesh%display() ! Allocate fields with dummy data allocate(co(mesh%get_n_points(),1), source=1.0_FP) allocate(lambda(mesh%get_n_points_inner(),1), source=0.0_FP) allocate(xi(mesh%get_n_points_inner(),1), source=1.0_FP) allocate(rhs(mesh%get_n_points())) allocate(sol(mesh%get_n_points())) allocate(guess(mesh%get_n_points())) ! Factorise helmholtz solver with template data call perf_start('solve_total') call perf_start('../factory') call helmholtz_solver_factory(multigrid, & BND_TYPE_DIRICHLET_ZERO, & BND_TYPE_DIRICHLET_ZERO, & BND_TYPE_DIRICHLET_ZERO, & BND_TYPE_DIRICHLET_ZERO, & co(:,1), lambda(:,1), xi(:,1), & parslv, solver_type, hsolver) call perf_stop('../factory') if (read_case_from_files) then ! Read data for elliptic problem nf90_stat = nf90_open('helmholtz_data.nc', NF90_NETCDF4, nf90_id) call handle_error_netcdf(nf90_stat, __LINE__, __FILE__) call read_netcdf_helmholtz(nf90_id, mesh, bnd_type_core, & bnd_type_wall, bnd_type_dome, & bnd_type_out, & co, lambda, xi, & rhs, guess, sol) nf90_stat = nf90_close(nf90_id) call handle_error_netcdf(nf90_stat, __LINE__, __FILE__) else ! Create a state based on alaytically prescribed test functions call convert_descriptor_char_int(bnd_type_core_char, bnd_type_core) bnd_type_wall = BND_TYPE_DIRICHLET_ZERO bnd_type_dome = BND_TYPE_DIRICHLET_ZERO bnd_type_out = BND_TYPE_DIRICHLET_ZERO !$omp parallel default(none) private(ki, kb, kg, l, x, y, district) & !$omp shared(equi, mesh, phi, bnd_type_core, & !$omp lambda, xi, co, sol, rhs, guess) !$omp do do ki = 1, mesh%get_n_points_inner() l = mesh%inner_indices(ki) x = mesh%get_x(l) y = mesh%get_y(l) lambda(ki,1) = testfun_lambda(equi, x, y, phi) xi(ki,1) = testfun_xi(equi, x, y, phi) co(l,1) = testfun_co(equi, x, y, phi) sol(l) = testfun_u(equi, x, y, phi) rhs(l) = testfun_helm_u(equi, x, y, phi) guess(l) = 0.0_FP enddo !$omp end do !$omp do do kb = 1, mesh%get_n_points_boundary() l = mesh%boundary_indices(kb) x = mesh%get_x(l) y = mesh%get_y(l) co(l,1) = testfun_co(equi, x, y, phi) sol(l) = testfun_u(equi, x, y, phi) guess(l) = 0.0_FP ! Depending on boundary condition type set rhs to solution ! or to radial derivative of solution district = mesh%get_district(l) if ( (district == DISTRICT_CORE) .and. & (bnd_type_core == BND_TYPE_NEUMANN) ) then rhs(l) = testfun_dudrhon(equi, x, y, phi) else rhs(l) = sol(l) endif enddo !$omp end do !$omp do do kg = 1, mesh%get_n_points_ghost() l = mesh%ghost_indices(kg) x = mesh%get_x(l) y = mesh%get_y(l) co(l,1) = testfun_co(equi, x, y, phi) sol(l) = testfun_u(equi, x, y, phi) rhs(l) = sol(l) guess(l) = 0.0_FP enddo !$omp end do !$omp end parallel endif call perf_start('../update') call hsolver%update(co(:,1), lambda(:,1), xi(:,1), & bnd_type_core, bnd_type_wall, & bnd_type_dome, bnd_type_out) call perf_stop('../update') call perf_start('../solve') call hsolver%solve(rhs, guess, res, hinfo) call perf_stop('../solve') call perf_stop('solve_total') write(get_stdout(),*)'res = ',res write(get_stdout(),*)'hinfo = ',hinfo ! Assess error between obtained and stored solution err_l2nrm = 0.0_FP err_sup = 0.0_FP nrm_l2 = 0.0_FP do l = 1, mesh%get_n_points() nrm_l2 = nrm_l2 + sol(l)**2 df = abs(guess(l) - sol(l)) err_sup = max(err_sup, df) err_l2nrm = err_l2nrm + df**2 enddo nrm_l2 = sqrt(nrm_l2) err_l2nrm = sqrt(err_l2nrm) / nrm_l2 write(get_stdout(),*)'nrm_l2 = ',nrm_l2 write(get_stdout(),*)'err_l2nrm = ',err_l2nrm write(get_stdout(),*)'err_sup = ',err_sup call perf_print() ! Write case to file for possible postprocessing analysis if (write_case_to_files) then nf90_stat = nf90_create('multigrid_out.nc', NF90_NETCDF4, nf90_id) call handle_error_netcdf(nf90_stat, __LINE__, __FILE__) call multigrid%write_netcdf(nf90_id) nf90_stat = nf90_close(nf90_id) call handle_error_netcdf(nf90_stat, __LINE__, __FILE__) nf90_stat = nf90_create('helmholtz_data_out.nc', NF90_NETCDF4, nf90_id) call handle_error_netcdf(nf90_stat, __LINE__, __FILE__) call write_netcdf_helmholtz(nf90_id, mesh, & bnd_type_core, bnd_type_wall, bnd_type_dome, bnd_type_out, & co(:,1), lambda(:,1), xi(:,1), rhs, guess, sol, & hcsr_write_on=.true.) nf90_stat = nf90_close(nf90_id) call handle_error_netcdf(nf90_stat, __LINE__, __FILE__) endif ! Writes logs to job.out for integration test on pipeline if (write_log_file .and.is_master()) then open(newunit=funit, file='job.out', action='write', status='replace', & iostat=io_error) if (io_error /= 0) then call handle_error("Creating job.out file failed", & PARALLAX_WRN_GENERAL, __LINE__, __FILE__) endif write(funit,300)hinfo write(funit,301)res write(funit,301)nrm_l2 write(funit,301)err_l2nrm write(funit,301)err_sup 300 FORMAT(I8) 301 FORMAT(ES22.14E3) close(funit) endif deallocate(guess) deallocate(sol) deallocate(rhs) deallocate(xi) deallocate(lambda) deallocate(co) #ifdef ENABLE_PETSC call PetscFinalize(ierr) #else call MPI_finalize(ierr) #endif end program