Skip to content
Snippets Groups Projects
Commit c4af1611 authored by Bastian Schütrumpf's avatar Bastian Schütrumpf
Browse files

Revert "new modifications removing communication,calc matrix and recombine"

This reverts commit ad20b904
parent ad20b904
No related branches found
No related tags found
1 merge request!1Revert "new modifications removing communication,calc matrix and recombine"
MODULE LINALG
USE Params, ONLY: db,cmplxzero,cmplxone
USE Levels
USE Parallel, ONLY: nb,mb,contxt,nstloc_x,nstloc_y,globalindex_x,globalindex_y,nb_psi
!
IMPLICIT NONE
INTEGER :: nlin(2)
INTEGER :: desca(2,10),descz(2,10),descc(2,10),desc_t(2,10)&
, work_t_size(2),iwork_t_size(2),rwork_t_size(2)
!
REAL(db) ,ALLOCATABLE :: rwork_t(:),evals(:)
COMPLEX(db),ALLOCATABLE :: work_t(:),matr_lin(:,:),unitary(:,:)
INTEGER ,ALLOCATABLE :: iwork_t(:)
!
CONTAINS
!************************************************************
SUBROUTINE init_linalg
INTEGER :: iq,infoconv
DO iq=1,2
nlin(iq)=npsi(iq)-npmin(iq)+1
CALL DESCINIT(DESCA(iq,1:10),npsi(iq)-npmin(iq)+1,npsi(iq)-npmin(iq)+1,&
NB,MB,0,0,CONTXT,nstloc_x(iq),infoconv)
CALL DESCINIT(DESCZ(iq,1:10),npsi(iq)-npmin(iq)+1,npsi(iq)-npmin(iq)+1,&
NB,MB,0,0,CONTXT,nstloc_x(iq),infoconv)
CALL DESCINIT(DESCC(iq,1:10),npsi(iq)-npmin(iq)+1,npsi(iq)-npmin(iq)+1,&
NB,MB,0,0,CONTXT,nstloc_x(iq),infoconv)
CALL DESCINIT(DESC_T(iq,1:10),nx*ny*nz*2,npsi(iq)-npmin(iq)+1,nx*ny*nz*2,&
nb_psi,0,0,CONTXT,nx*ny*nz*2,infoconv)
work_t_size(iq) = -1
iwork_t_size(iq) = -1
rwork_t_size(iq) = -1
ALLOCATE(work_t(1),iwork_t(1),rwork_t(1),matr_lin(nstloc_x(iq),nstloc_y(iq)),&
unitary(nstloc_x(iq),nstloc_y(iq)),evals(nlin(iq)))
CALL PZHEEVD('V','L',nlin(iq),matr_lin,1,1,DESCA(iq,1:10),evals,&
unitary,1,1,DESCZ(iq,1:10),work_t,work_t_size(iq),rwork_t,&
rwork_t_size(iq),iwork_t,iwork_t_size(iq),infoconv)
work_t_size(iq) = INT(ABS(work_t(1)))
iwork_t_size(iq) = INT(ABS(iwork_t(1)))
rwork_t_size(iq) = INT(ABS(rwork_t(1)))
DEALLOCATE(work_t,iwork_t,rwork_t,matr_lin,unitary,evals)
END DO
END SUBROUTINE init_linalg
!************************************************************
SUBROUTINE eigenvecs(matr_in,evecs,evals_out,iq)
INTEGER, INTENT(IN) :: iq
COMPLEX(db), INTENT(IN) :: matr_in(:,:)
COMPLEX(db), INTENT(OUT) :: evecs(:,:)
REAL(db), INTENT(OUT),OPTIONAL :: evals_out(:)
INTEGER :: infoconv
ALLOCATE(work_t(work_t_size(iq)),rwork_t(rwork_t_size(iq)),&
iwork_t(iwork_t_size(iq)),evals(nlin(iq)))
CALL PZHEEVD('V','L',nlin(iq),matr_in,1,1,DESCA(iq,1:10),evals,evecs,1,1,DESCZ(iq,1:10),&
work_t,work_t_size(iq),rwork_t,rwork_t_size(iq),iwork_t,iwork_t_size(iq),infoconv)
IF (PRESENT(evals_out))evals_out=evals
DEALLOCATE(evals,work_t,iwork_t,rwork_t)
END SUBROUTINE eigenvecs
!************************************************************
SUBROUTINE loewdin(imatr,smatr,iq)
INTEGER, INTENT(IN) :: iq
COMPLEX(db),INTENT(IN) :: imatr(:,:)
COMPLEX(db),INTENT(OUT) :: smatr(:,:)
COMPLEX(db),ALLOCATABLE :: tmatr(:,:)
REAL(db) ,ALLOCATABLE :: eigen_h(:)
INTEGER :: i,it,jt,iy
ALLOCATE(tmatr(nstloc_x(iq),nstloc_y(iq)),eigen_h(nlin(iq)))
CALL eigenvecs(imatr,tmatr,eigen_h,iq)
eigen_h=1.0d0/sqrt(eigen_h)
DO it = 1,nstloc_x(iq)
DO jt = 1,nstloc_y(iq)
iy = globalindex_y(jt,iq)-npmin(iq)+1
tmatr(it,jt) = tmatr(it,jt)*sqrt(eigen_h(iy))
ENDDO
ENDDO
CALL PZGEMM('N','C',nlin(iq),nlin(iq),nlin(iq),cmplxone,tmatr,1,1,DESCA(iq,1:10),&
tmatr,1,1,DESCZ(iq,1:10),cmplxzero,smatr,1,1,DESCC(iq,1:10))
DEALLOCATE(tmatr,eigen_h)
END SUBROUTINE
!************************************************************
SUBROUTINE comb_orthodiag(unitary_1,unitary_2,unitary,iq)
INTEGER, INTENT(IN) :: iq
COMPLEX(db),INTENT(IN) :: unitary_1(:,:),unitary_2(:,:)
COMPLEX(db),INTENT(OUT) :: unitary(:,:)
CALL PZGEMM('T','T',nlin(iq),nlin(iq),nlin(iq),cmplxone,unitary_1,1,1,DESCA(iq,1:10),unitary_2,&
1,1,DESCZ(iq,1:10),cmplxzero,unitary,1,1,DESCC(iq,1:10))
END SUBROUTINE
!************************************************************
SUBROUTINE recombine(matrix,psi_in,psi_out,iq)
INTEGER, INTENT(IN) :: iq
COMPLEX(db),INTENT(IN) :: psi_in(:),matrix(:,:)
COMPLEX(db),INTENT(OUT) :: psi_out(:)
CALL zgemv('N',nstloc_x(iq),nstloc_y(iq),cmplxone,matrix,nstloc_x(iq),psi_in,1,cmplxzero,&
psi_out,1)
END SUBROUTINE
END MODULE LINALG
...@@ -5,24 +5,13 @@ MODULE Parallel ...@@ -5,24 +5,13 @@ MODULE Parallel
IMPLICIT NONE IMPLICIT NONE
INCLUDE 'mpif.h' INCLUDE 'mpif.h'
SAVE SAVE
INTEGER, PARAMETER :: NB=32,MB=32,NB_psi = 32 LOGICAL,PARAMETER :: tmpi=.TRUE.
LOGICAL, PARAMETER :: tmpi=.TRUE.,ttabc=.FALSE. INTEGER, ALLOCATABLE :: node(:),localindex(:),globalindex(:)
INTEGER, ALLOCATABLE :: node(:),localindex(:),globalindex(:),& INTEGER :: mpi_nprocs,mpi_ierror,mpi_myproc
node_x(:),node_y(:),localindex_x(:),localindex_y(:),&
globalindex_x(:,:),globalindex_y(:,:)
INTEGER, ALLOCATABLE :: recvcounts(:,:),displs(:,:)
INTEGER, ALLOCATABLE :: node_2dto1d(:,:),node_1dto2d_x(:),node_1dto2d_y(:)
INTEGER :: mpi_nprocs,mpi_ierror,mpi_myproc
INTEGER :: comm2d,mpi_dims(2),mpi_mycoords(2),nstloc_x(2),nstloc_y(2),first(2)
INTEGER :: comm2d_x,comm2d_y,mpi_size_x,mpi_size_y,mpi_rank_x,mpi_rank_y
INTEGER :: NPROCS,NPROW,NPCOL,MYPROW,MYPCOL,CONTXT,IAM
INTEGER, EXTERNAL :: NUMROC,INDXL2G,INDXG2L,INDXG2P
REAL(db) :: timer(20)
CONTAINS CONTAINS
!*********************************************************************** !***********************************************************************
SUBROUTINE alloc_nodes SUBROUTINE alloc_nodes
ALLOCATE(node(nstmax),localindex(nstmax),globalindex(nstmax),& ALLOCATE(node(nstmax),localindex(nstmax),globalindex(nstmax))
localindex_x(nstmax),localindex_y(nstmax))
END SUBROUTINE alloc_nodes END SUBROUTINE alloc_nodes
!*********************************************************************** !***********************************************************************
SUBROUTINE init_all_mpi SUBROUTINE init_all_mpi
...@@ -33,123 +22,15 @@ CONTAINS ...@@ -33,123 +22,15 @@ CONTAINS
IF(wflag) WRITE(*,'(a,i5)') ' number of nodes=',mpi_nprocs IF(wflag) WRITE(*,'(a,i5)') ' number of nodes=',mpi_nprocs
CALL mpi_barrier (mpi_comm_world, mpi_ierror) CALL mpi_barrier (mpi_comm_world, mpi_ierror)
END SUBROUTINE init_all_mpi END SUBROUTINE init_all_mpi
!************************************************************************
SUBROUTINE mpi_init_filename
CONTINUE
END SUBROUTINE mpi_init_filename
!************************************************************************
FUNCTION tabc_av(val)
REAL(db),INTENT(IN) :: val
REAL(db) :: tabc_av
tabc_av=val
END FUNCTION tabc_av
!************************************************************************
FUNCTION tabc_dens(density)
REAL(db),INTENT(IN) :: density(nx,ny,nz,2)
REAL(db) :: tabc_dens(nx,ny,nz,2)
tabc_dens=density
END FUNCTION tabc_dens
!************************************************************************
FUNCTION tabc_vec_dens(density)
REAL(db),INTENT(IN) :: density(nx,ny,nz,3,2)
REAL(db) :: tabc_vec_dens(nx,ny,nz,3,2)
tabc_vec_dens=density
END FUNCTION tabc_vec_dens
!************************************************************************
FUNCTION tabc_filename(filename)
CHARACTER(64),INTENT(IN) :: filename
CHARACTER(64) :: tabc_filename
tabc_filename=filename
END FUNCTION tabc_filename
!***********************************************************************
SUBROUTINE init_mpi_2d
!***********************************************************************
! calculates 2d wave function distribution over nodes
!***********************************************************************
LOGICAL :: isperiodic(2),reorder
INTEGER :: i,is,noffset
isperiodic=.FALSE.
reorder=.FALSE.
!Calculate best dimensions for the given number of processes
CALL mpi_dims_create(mpi_nprocs,2,mpi_dims,mpi_ierror)
!Create 2-dimensional grid of processes to calculate the matrices in diagstep
CALL mpi_cart_create(mpi_comm_world,2,mpi_dims,isperiodic,reorder,comm2d,mpi_ierror)
!get my coordinates on the grid
CALL mpi_cart_get(comm2d,2,mpi_dims,isperiodic,mpi_mycoords,mpi_ierror)
IF(wflag)WRITE(*,*)'Initialized 2d-grid with dimensions ',mpi_dims(1),' and',mpi_dims(2)
!Create communicators for x- and y-directions
CALL mpi_comm_split(comm2d,mpi_mycoords(2),mpi_mycoords(1),comm2d_x,mpi_ierror)
CALL mpi_comm_split(comm2d,mpi_mycoords(1),mpi_mycoords(2),comm2d_y,mpi_ierror)
!determine their sizes and ranks (is already known, just to make sure)
CALL mpi_comm_size(comm2d_x,mpi_size_x,mpi_ierror)
CALL mpi_comm_rank(comm2d_x,mpi_rank_x,mpi_ierror)
CALL mpi_comm_size(comm2d_y,mpi_size_y,mpi_ierror)
CALL mpi_comm_rank(comm2d_y,mpi_rank_y,mpi_ierror)
CALL init_blacs
DO is=1,2
nstloc_x(is) = NUMROC(npsi(is)-npmin(is)+1,NB,MYPROW,0,NPROW)
nstloc_y(is) = NUMROC(npsi(is)-npmin(is)+1,MB,MYPCOL,0,NPCOL)
END DO
ALLOCATE(node_x(nstmax),node_y(nstmax),globalindex_x(nstmax,2),globalindex_y(nstmax,2))
globalindex_x=0
globalindex_y=0
DO is=1,2
noffset=npmin(is)-1
DO i=npmin(is),npsi(is)
localindex_x(i) = INDXG2L(i-noffset, NB, MYPROW, 0, NPROW)
localindex_y(i) = INDXG2L(i-noffset, MB, MYPCOL, 0, NPCOL)
node_x(i) = INDXG2P(i-noffset, NB, MYPROW, 0, NPROW)
node_y(i) = INDXG2P(i-noffset, MB, MYPCOL, 0, NPCOL)
IF(node_x(i)==mpi_rank_x) globalindex_x(localindex_x(i),is)=i
IF(node_y(i)==mpi_rank_y) globalindex_y(localindex_y(i),is)=i
END DO
END DO
END SUBROUTINE init_mpi_2d
!***************************************************************************
SUBROUTINE init_blacs
CALL BLACS_PINFO(IAM,NPROCS)
IF (NPROCS.LT.1) THEN
CALL BLACS_SETUP(IAM,NPROCS)
END IF
NPROW=mpi_dims(1)
NPCOL=mpi_dims(2)
CALL BLACS_GET(0,0,CONTXT)
CALL BLACS_GRIDINIT(CONTXT,'Row',NPROW,NPCOL)
CALL BLACS_GRIDINFO(CONTXT,NPROW,NPCOL,MYPROW,MYPCOL)
IF(mpi_rank_x/=MYPROW.OR.mpi_rank_y/=MYPCOL) STOP 'BLACS and MPI init is different'
END SUBROUTINE init_blacs
!*********************************************************************** !***********************************************************************
SUBROUTINE associate_nodes SUBROUTINE associate_nodes
!*********************************************************************** INTEGER :: ncount,nst,ip,iloc
! calculates 1d wave function distribution over nodes
!***********************************************************************
INTEGER :: ncount,nst,ip,iloc,iq,mpi_ierror
ncount=0 ncount=0
globalindex=0 DO nst=1,nstmax
node=0 ncount=MOD(ncount,mpi_nprocs)
! DO iq=1,2 node(nst)=ncount
! DO nst=1,nstloc_x(iq) ncount=ncount+1
! IF(INT(REAL(nst-1)/nstloc_x(iq)*mpi_size_y)==mpi_rank_y) & ENDDO
! node(globalindex_x(nst,iq))=mpi_myproc
! END DO
! END DO
! CALL mpi_allreduce(MPI_IN_PLACE,node,nstmax,mpi_integer,mpi_sum,mpi_comm_world,mpi_ierror)
DO iq=1,2
DO nst=npmin(iq),npsi(iq)
node(nst)=MOD((nst-npmin(iq))/nb_psi,mpi_nprocs)
END DO
END DO
nstloc=0 nstloc=0
DO nst=1,nstmax DO nst=1,nstmax
IF(node(nst)==mpi_myproc) THEN IF(node(nst)==mpi_myproc) THEN
...@@ -158,35 +39,25 @@ CONTAINS ...@@ -158,35 +39,25 @@ CONTAINS
ENDIF ENDIF
ENDDO ENDDO
DO ip=0,mpi_nprocs-1 DO ip=0,mpi_nprocs-1
iloc=0 iloc=0
DO nst=1,nstmax DO nst=1,nstmax
IF(node(nst)==ip) THEN IF(node(nst)==ip) THEN
iloc=iloc+1 iloc=iloc+1
localindex(nst)=iloc localindex(nst)=iloc
ENDIF ENDIF
ENDDO ENDDO
ENDDO ENDDO
IF(wflag) THEN
WRITE(*,'(A/(1X,20I4))') &
' sorting of wfs on nodes:',node(1:nstmax)
ENDIF
CALL mpi_barrier (mpi_comm_world, mpi_ierror) CALL mpi_barrier (mpi_comm_world, mpi_ierror)
CALL mpi_relate_comm
END SUBROUTINE associate_nodes END SUBROUTINE associate_nodes
!***********************************************************************
SUBROUTINE mpi_relate_comm
ALLOCATE(node_1dto2d_x(0:mpi_nprocs-1),node_1dto2d_y(0:mpi_nprocs-1),&
node_2dto1d(0:mpi_size_x-1,0:mpi_size_y-1))
node_1dto2d_x=0
node_1dto2d_y=0
node_2dto1d=0
node_1dto2d_x(mpi_myproc)=mpi_rank_x
node_1dto2d_y(mpi_myproc)=mpi_rank_y
node_2dto1d(mpi_rank_x,mpi_rank_y)=mpi_myproc
CALL mpi_allreduce(MPI_IN_PLACE,node_1dto2d_x,mpi_nprocs,mpi_integer,mpi_sum,mpi_comm_world,mpi_ierror)
CALL mpi_allreduce(MPI_IN_PLACE,node_1dto2d_y,mpi_nprocs,mpi_integer,mpi_sum,mpi_comm_world,mpi_ierror)
CALL mpi_allreduce(MPI_IN_PLACE,node_2dto1d,mpi_nprocs,mpi_integer,mpi_sum,mpi_comm_world,mpi_ierror)
END SUBROUTINE mpi_relate_comm
!*********************************************************************** !***********************************************************************
SUBROUTINE collect_densities SUBROUTINE collect_densities
USE Densities, ONLY : rho,tau,current,sodens,sdens USE Densities, ONLY : rho,tau,current,sodens,sdens
REAL(db) :: tmp_rho(nx,ny,nz,2),tmp_current(nx,ny,nz,3,2) REAL(db) :: tmp_rho(nx,ny,nz,2),tmp_current(nx,ny,nz,3,2)
REAL(db) :: rsum
CALL mpi_barrier (mpi_comm_world, mpi_ierror) CALL mpi_barrier (mpi_comm_world, mpi_ierror)
CALL mpi_allreduce(rho,tmp_rho,2*nx*ny*nz, & CALL mpi_allreduce(rho,tmp_rho,2*nx*ny*nz, &
mpi_double_precision,mpi_sum,mpi_comm_world,mpi_ierror) mpi_double_precision,mpi_sum,mpi_comm_world,mpi_ierror)
...@@ -237,137 +108,9 @@ CONTAINS ...@@ -237,137 +108,9 @@ CONTAINS
sp_parity=tmpgat sp_parity=tmpgat
CALL mpi_barrier (mpi_comm_world,mpi_ierror) CALL mpi_barrier (mpi_comm_world,mpi_ierror)
END SUBROUTINE collect_sp_properties END SUBROUTINE collect_sp_properties
!***********************************************************************
SUBROUTINE collect_energies(delesum,sumflu)
!***********************************************************************
! collects s.p. energies, fluctuation measure and energy differences
!***********************************************************************
REAL(db),INTENT(INOUT) :: delesum,sumflu
REAL(db) :: tmpgat(nstmax),tmpgat3(3,nstmax)
INTEGER :: nst
CALL mpi_barrier (mpi_comm_world,mpi_ierror)
DO nst=1,nstmax
IF(node(nst)/=mpi_myproc) THEN
sp_energy(nst)=0.0d0
sp_efluct1(nst)=0.0d0
sp_efluct2(nst)=0.0d0
END IF
ENDDO
CALL mpi_allreduce(sp_energy,tmpgat,nstmax,mpi_double_precision,mpi_sum, &
mpi_comm_world,mpi_ierror)
sp_energy=tmpgat
CALL mpi_allreduce(sumflu,tmpgat(1),1,mpi_double_precision,mpi_sum, &
mpi_comm_world,mpi_ierror)
sumflu=tmpgat(1)
CALL mpi_allreduce(delesum,tmpgat(1),1,mpi_double_precision,mpi_sum, &
mpi_comm_world,mpi_ierror)
delesum=tmpgat(1)
CALL mpi_barrier (mpi_comm_world,mpi_ierror)
END SUBROUTINE collect_energies
!***********************************************************************i
SUBROUTINE mpi_wf_1d2x(psi,psi_x,iq)
USE Trivial
COMPLEX(db),INTENT(IN) :: psi(:,:,:,:,:)
COMPLEX(db),INTENT(OUT) :: psi_x(:,:)
INTEGER, INTENT(IN) :: iq
INTEGER :: nst,ierr,is
IF(.NOT.ALLOCATED(recvcounts)) THEN
ALLOCATE(recvcounts(0:mpi_size_y-1,2),displs(0:mpi_size_y-1,2))
recvcounts=0
displs=0
first(1)=1
DO is=1,2
DO nst=1,nstloc_x(is)
recvcounts(node_1dto2d_y(node(globalindex_x(nst,is))),is)=&
recvcounts(node_1dto2d_y(node(globalindex_x(nst,is))),is)+1
END DO
END DO
first(2)=recvcounts(mpi_rank_y,1)+1
recvcounts=recvcounts*size(psi(:,:,:,:,1))
DO is=1,2
DO nst=1,mpi_size_y-1
displs(nst,is)=SUM(recvcounts(0:nst-1,is))
END DO
END DO
END IF
psi_x=0.0d0
CALL mpi_allgatherv(psi(:,:,:,:,first(iq)),recvcounts(mpi_rank_y,iq),mpi_double_complex,&
psi_x,recvcounts(:,iq),displs(:,iq),mpi_double_complex,comm2d_y,ierr)
END SUBROUTINE mpi_wf_1d2x
!***********************************************************************
SUBROUTINE mpi_wf_x2y(psi_x,psi_y,iq)
COMPLEX(db),INTENT(IN) :: psi_x(:,:)
COMPLEX(db),INTENT(OUT) :: psi_y(:,:)
INTEGER, INTENT(IN) :: iq
INTEGER :: nst,ierr,is,lastnode,ip,first,last
INTEGER,ALLOCATABLE,SAVE :: rootnode(:,:),firstwf(:,:),nwf(:,:)
IF(.NOT.ALLOCATED(rootnode)) THEN
nst=MAX(nstloc_y(1)+1,nstloc_y(2)+1)
ALLOCATE(rootnode(nst,2),firstwf(nst,2),nwf(nst,2))
rootnode=0
firstwf=0
nwf=0
DO is=1,2
lastnode=-1
ip=0
DO nst=1,nstloc_y(is)
IF(lastnode==node_x(globalindex_y(nst,is))) THEN
nwf(ip,is)=nwf(ip,is)+1
ELSE
ip=ip+1
firstwf(ip,is)=nst
nwf(ip,is)=1
lastnode=node_x(globalindex_y(nst,is))
rootnode(ip,is)=lastnode
END IF
END DO
END DO
END IF
psi_y=0.0d0
ip=1
DO WHILE (nwf(ip,iq)>0)
first=firstwf(ip,iq)
last=firstwf(ip,iq)+nwf(ip,iq)-1
IF(rootnode(ip,iq)==mpi_rank_x) &
psi_y(:,first:last)=&
psi_x(:,localindex_x(globalindex_y(first,iq)):localindex_x(globalindex_y(last,iq)))
CALL mpi_bcast(psi_y(:,first:last),size(psi_y(:,first:last)),mpi_double_complex,&
rootnode(ip,iq),comm2d_x,ierr)
ip=ip+1
END DO
END SUBROUTINE mpi_wf_x2y
!***********************************************************************
SUBROUTINE collect_wf_1d_x(psi,psi_x,iq)
!*********************************************************************************
! adds all |psi> together and copies them to 1d distribution over nodes.
! Copies each wave function at a time
!*********************************************************************************
USE Trivial
INTEGER,INTENT(IN) :: iq
COMPLEX(db),INTENT(INOUT) :: psi_x(:,:)
COMPLEX(db),INTENT(OUT) :: psi(:,:,:,:,:)
INTEGER :: ierr
CALL mpi_reduce_scatter(psi_x,psi(:,:,:,:,first(iq)),recvcounts(:,iq),mpi_double_complex,&
mpi_sum,comm2d_y,ierr)
END SUBROUTINE collect_wf_1d_x
!*********************************************************************** !***********************************************************************
SUBROUTINE finish_mpi SUBROUTINE finish_mpi
INTEGER :: ierr INTEGER :: ierr
CALL mpi_finalize(ierr) CALL mpi_finalize(ierr)
END SUBROUTINE finish_mpi END SUBROUTINE finish_mpi
!***********************************************************************
SUBROUTINE mpi_start_timer(index)
INTEGER,INTENT(IN) :: index
INTEGER :: ierr
CALL mpi_barrier (mpi_comm_world,ierr)
timer(index)=mpi_wtime()
END SUBROUTINE mpi_start_timer
!***********************************************************************
SUBROUTINE mpi_stop_timer(index,textline)
INTEGER,INTENT(IN) :: index
CHARACTER(*),INTENT(IN) :: textline
INTEGER :: ierr
CALL mpi_barrier (mpi_comm_world,ierr)
IF(wflag)WRITE(*,'(A20,F10.4)')textline,mpi_wtime()-timer(index)
END SUBROUTINE mpi_stop_timer
END MODULE Parallel END MODULE Parallel
This diff is collapsed.
0% Loading or .
You are about to add 0 people to the discussion. Proceed with caution.
Finish editing this message first!
Please register or to comment