coupler.F90 11 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315
  1. !~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
  2. ! Math and Computer Science Division, Argonne National Laboratory !
  3. !-----------------------------------------------------------------------
  4. ! CVS coupler.F90,v 1.8 2004-04-23 20:57:10 jacob Exp
  5. ! CVS MCT_2_8_0
  6. !BOP -------------------------------------------------------------------
  7. !
  8. ! !ROUTINE: coupler -- coupler for unit tester
  9. !
  10. ! !DESCRIPTION:
  11. ! A coupler subroutine to test functionality of MCT.
  12. !
  13. ! !INTERFACE:
  14. !
  15. subroutine coupler (comm,ncomps,compid)
  16. !
  17. ! !USES:
  18. !
  19. ! Get the things needed from MCT by "Use,only" with renaming:
  20. !
  21. ! ---------- first group is identical to what model.F90 uses ----
  22. !
  23. !---Component Model Registry
  24. use m_MCTWorld,only: MCTWorld_init => init
  25. use m_MCTWorld,only: MCTWorld_clean => clean
  26. !---Domain Decomposition Descriptor DataType and associated methods
  27. use m_GlobalSegMap,only: GlobalSegMap
  28. use m_GlobalSegMap,only: GlobalSegMap_init => init
  29. use m_GlobalSegMap,only: GlobalSegMap_lsize => lsize
  30. use m_GlobalSegMap,only: GlobalSegMap_clean => clean
  31. use m_GlobalSegMap,only: GlobalSegMap_Ordpnts => OrderedPoints
  32. !---Field Storage DataType and associated methods
  33. use m_AttrVect,only : AttrVect
  34. use m_AttrVect,only : AttrVect_init => init
  35. use m_AttrVect,only : AttrVect_clean => clean
  36. use m_AttrVect,only : AttrVect_importRAttr => importRAttr
  37. !---Intercomponent communications scheduler
  38. use m_Router,only: Router
  39. use m_Router,only: Router_init => init
  40. use m_Router,only: Router_clean => clean
  41. !---Intercomponent transfer
  42. use m_Transfer,only : MCT_Send => send
  43. use m_Transfer,only : MCT_Recv => recv
  44. ! ---------- because coupler will do the interpolation ---------
  45. ! it needs more methods
  46. !
  47. !---Sparse Matrix DataType and associated methods
  48. use m_SparseMatrix, only : SparseMatrix
  49. use m_SparseMatrix, only : SparseMatrix_init => init
  50. use m_SparseMatrix, only : SparseMatrix_importGRowInd => &
  51. importGlobalRowIndices
  52. use m_SparseMatrix, only : SparseMatrix_importGColInd => &
  53. importGlobalColumnIndices
  54. use m_SparseMatrix, only : SparseMatrix_importMatrixElts => &
  55. importMatrixElements
  56. use m_SparseMatrixPlus, only : SparseMatrixPlus
  57. use m_SparseMatrixPlus, only : SparseMatrixPlus_init => init
  58. use m_SparseMatrixPlus, only : SparseMatrixPlus_clean => clean
  59. use m_SparseMatrixPlus, only : Xonly ! Decompose matrix by row
  60. !---Matrix-Vector multiply methods
  61. use m_MatAttrVectMul, only: MCT_MatVecMul => sMatAvMult
  62. !---MPEU I/O utilities
  63. use m_stdio
  64. use m_ioutil
  65. implicit none
  66. include "mpif.h"
  67. ! !INPUT PARAMETERS:
  68. integer,intent(in) :: comm
  69. integer,intent(in) :: ncomps
  70. integer,intent(in) :: compid
  71. !
  72. !EOP ___________________________________________________________________
  73. ! Local variables
  74. character(len=*), parameter :: cplname='coupler.F90'
  75. integer :: nxa ! number of points in x-direction, atmos
  76. integer :: nya ! number of points in y-direction, atmos
  77. integer :: nxo ! number of points in x-direction, ocean
  78. integer :: nyo ! number of points in y-direction, ocean
  79. character(len=100),parameter :: &
  80. RemapMatrixFile='../../data/t42_to_popx1_c_mat.asc'
  81. ! Loop indicies
  82. integer :: i,j,k,n
  83. logical :: match
  84. ! MPI variables
  85. integer :: rank, nprocs, root, ierr
  86. ! MCTWorld variables
  87. integer :: AtmID
  88. ! Grid variables
  89. integer :: localsize
  90. ! GlobalSegMap variables
  91. type(GlobalSegMap) :: AtmGSMap, OcnGSMap
  92. integer,dimension(1) :: start,length
  93. integer, dimension(:), pointer :: points
  94. integer :: latsize, lonsize
  95. integer :: rowindex, colindex, boxvertex
  96. ! AttVect variables
  97. type(AttrVect) :: AtmAV, OcnAV
  98. integer :: aavsize,oavsize
  99. ! Router variables
  100. type(Router) :: Rout
  101. ! SparseMatrix variables
  102. integer :: mdev
  103. integer :: num_elements, nRows, nColumns
  104. integer, dimension(2) :: src_dims, dst_dims
  105. integer, dimension(:), pointer :: rows, columns
  106. real, dimension(:), pointer :: weights
  107. ! A2O SparseMatrix elements on root
  108. type(SparseMatrix) :: sMat
  109. ! A2O distributed SparseMatrixPlus variables
  110. type(SparseMatrixPlus) :: A2OMatPlus
  111. ! _____________________________________________________________________
  112. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  113. ! INITIALIZATION PHASE
  114. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  115. ! LOCAL RANK AND SIZE
  116. call MPI_COMM_RANK(comm,rank,ierr)
  117. call MPI_COMM_SIZE(comm,nprocs,ierr)
  118. root = 0
  119. if(rank==0) write(6,*) cplname,' MyID ', compid
  120. if(rank==0) write(6,*) cplname,' Num procs ', nprocs
  121. ! Initialize MCTworld
  122. call MCTWorld_init(ncomps,MPI_COMM_WORLD,comm,compid)
  123. ! Set the atm component id. Must be known to this
  124. ! component. (MCT doesn't handle that).
  125. AtmID=1
  126. ! Set grid dimensions for atmosphere and ocean grids.
  127. ! MCT could be used for this (by defining a GeneralGrid in
  128. ! each and sending them to the coupler) but for this simple
  129. ! example, we'll assume they're known to the coupler
  130. nxa = 128
  131. nya = 64
  132. nxo = 320
  133. nyo = 384
  134. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  135. ! Read matrix weights for interpolation from a file.
  136. if (rank == root) then
  137. mdev = luavail()
  138. open(mdev, file=trim(RemapMatrixFile), status="old")
  139. read(mdev,*) num_elements
  140. read(mdev,*) src_dims(1), src_dims(2)
  141. read(mdev,*) dst_dims(1), dst_dims(2)
  142. allocate(rows(num_elements), columns(num_elements), &
  143. weights(num_elements), stat=ierr)
  144. do n=1, num_elements
  145. read(mdev,*) rows(n), columns(n), weights(n)
  146. end do
  147. close(mdev)
  148. ! Initialize a Sparsematrix
  149. nRows = dst_dims(1) * dst_dims(2)
  150. nColumns = src_dims(1) * src_dims(2)
  151. call SparseMatrix_init(sMat,nRows,nColumns,num_elements)
  152. call SparseMatrix_importGRowInd(sMat, rows, size(rows))
  153. call SparseMatrix_importGColInd(sMat, columns, size(columns))
  154. call SparseMatrix_importMatrixElts(sMat, weights, size(weights))
  155. deallocate(rows, columns, weights, stat=ierr)
  156. endif
  157. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  158. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  159. ! Initialize a Global Segment Map for the Ocean
  160. ! Set up a 1-d decomposition.
  161. ! There is just 1 segment per processor
  162. localsize = nxo*nyo / nprocs
  163. ! we'll use the distributed init of GSMap so
  164. ! initialize start and length arrays for this processor
  165. start(1) = (rank*localsize) + 1
  166. length(1) = localsize
  167. ! initialize the GSMap
  168. call GlobalSegMap_init(OcnGSMap,start,length,root,comm,compid)
  169. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  170. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  171. ! Initialize a Global Segment Map for the Atmosphere
  172. ! Set up a 1-d decomposition.
  173. ! There is just 1 segment per processor
  174. localsize = nxa*nya / nprocs
  175. ! we'll use the distributed init of GSMap so
  176. ! initialize start and length arrays for this processor
  177. start(1) = (rank*localsize) + 1
  178. length(1) = localsize
  179. ! initialize the GSMap
  180. call GlobalSegMap_init(AtmGSMap,start,length,root,comm,compid)
  181. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  182. ! Use a GSMap function:
  183. ! return the points local to this processor
  184. ! in their assumed order.
  185. call GlobalSegMap_Ordpnts(AtmGSMap,rank,points)
  186. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  187. ! Build a SparseMatrixPlus for doing the interpolation
  188. ! Specify matrix decomposition to be by row.
  189. ! following the atmosphere's decomposition.
  190. call SparseMatrixPlus_init(A2OMatPlus, sMat, AtmGSMap, OcnGSMap, &
  191. Xonly, root, comm, compid)
  192. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  193. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  194. ! Initialize and Attribute vector the atmosphere grid
  195. aavsize = GlobalSegMap_lsize(AtmGSMap,comm)
  196. if(rank==0) write(6,*) cplname, ' localsize: Atm ', aavsize
  197. call AttrVect_init(AtmAV,rList="field1:field2",lsize=aavsize)
  198. ! Initialize and Attribute vector the ocean grid
  199. oavsize = GlobalSegMap_lsize(OcnGSMap,comm)
  200. if(rank==0) write(6,*) cplname, ' localsize: Ocn ', oavsize
  201. call AttrVect_init(OcnAV,rList="field1:field2",lsize=oavsize)
  202. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  203. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  204. ! Initialize a Router
  205. call Router_init(AtmID,AtmGSMap,comm,Rout)
  206. !!! END OF INIT !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  207. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  208. ! RUN PHASE
  209. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  210. do j=1,10 ! "timestep" loop
  211. ! coupler calculations here
  212. match=.TRUE.
  213. ! Receive the data
  214. call MCT_Recv(AtmAV,Rout)
  215. ! The 2nd attribute has the values of each gridpoint in
  216. ! the index numbering scheme. Check the received values
  217. ! against the points on the this processor. They should
  218. ! match exactly.
  219. do i=1,aavsize
  220. if( int(AtmAV%rAttr(2,i)) .ne. points(i)) then
  221. write(6,*) cplname,rank, " Data doesn't match ",i
  222. match=.FALSE.
  223. endif
  224. enddo
  225. if(match .and. j==10) &
  226. write(6,*) cplname," Last step, All points match on ",rank
  227. if(rank==0) write(6,*) cplname, " Received data step ",j
  228. ! Interpolate by doing a parallel sparsematrix-attrvect multiply
  229. ! Note: it doesn't make much sense to interpolate "field2" which
  230. ! is the grid point indicies but MatVecMul will interpolate all
  231. ! real attributes.
  232. call MCT_MatVecMul(AtmAV, A2OMatPlus, OcnAV)
  233. if(rank==0) write(6,*) cplname," Data transformed step ",j
  234. ! pass interpolated data on to ocean model and/or
  235. ! do more calculations
  236. enddo
  237. !!! END OF RUN !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  238. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  239. ! FINALIZE PHASE
  240. !!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!
  241. ! deallocate memory
  242. call Router_clean(Rout)
  243. call AttrVect_clean(AtmAV)
  244. call AttrVect_clean(OcnAV)
  245. call GlobalSegMap_clean(AtmGSMap)
  246. call GlobalSegMap_clean(OcnGSMap)
  247. call MCTWorld_clean()
  248. if(rank==0) write(6,*) cplname, " done"
  249. end subroutine coupler