nemogcm.F90 31 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684
  1. MODULE nemogcm
  2. !!======================================================================
  3. !! *** MODULE nemogcm ***
  4. !! Ocean system : NEMO GCM (ocean dynamics, on-line tracers, biochemistry and sea-ice)
  5. !!======================================================================
  6. !! History : OPA ! 1990-10 (C. Levy, G. Madec) Original code
  7. !! 7.0 ! 1991-11 (M. Imbard, C. Levy, G. Madec)
  8. !! 7.1 ! 1993-03 (M. Imbard, C. Levy, G. Madec, O. Marti, M. Guyon, A. Lazar,
  9. !! P. Delecluse, C. Perigaud, G. Caniaux, B. Colot, C. Maes) release 7.1
  10. !! - ! 1992-06 (L.Terray) coupling implementation
  11. !! - ! 1993-11 (M.A. Filiberti) IGLOO sea-ice
  12. !! 8.0 ! 1996-03 (M. Imbard, C. Levy, G. Madec, O. Marti, M. Guyon, A. Lazar,
  13. !! P. Delecluse, L.Terray, M.A. Filiberti, J. Vialar, A.M. Treguier, M. Levy) release 8.0
  14. !! 8.1 ! 1997-06 (M. Imbard, G. Madec)
  15. !! 8.2 ! 1999-11 (M. Imbard, H. Goosse) LIM sea-ice model
  16. !! ! 1999-12 (V. Thierry, A-M. Treguier, M. Imbard, M-A. Foujols) OPEN-MP
  17. !! ! 2000-07 (J-M Molines, M. Imbard) Open Boundary Conditions (CLIPPER)
  18. !! NEMO 1.0 ! 2002-08 (G. Madec) F90: Free form and modules
  19. !! - ! 2004-06 (R. Redler, NEC CCRLE, Germany) add OASIS[3/4] coupled interfaces
  20. !! - ! 2004-08 (C. Talandier) New trends organization
  21. !! - ! 2005-06 (C. Ethe) Add the 1D configuration possibility
  22. !! - ! 2005-11 (V. Garnier) Surface pressure gradient organization
  23. !! - ! 2006-03 (L. Debreu, C. Mazauric) Agrif implementation
  24. !! - ! 2006-04 (G. Madec, R. Benshila) Step reorganization
  25. !! - ! 2007-07 (J. Chanut, A. Sellar) Unstructured open boundaries (BDY)
  26. !! 3.2 ! 2009-08 (S. Masson) open/write in the listing file in mpp
  27. !! 3.3 ! 2010-05 (K. Mogensen, A. Weaver, M. Martin, D. Lea) Assimilation interface
  28. !! - ! 2010-10 (C. Ethe, G. Madec) reorganisation of initialisation phase
  29. !! 3.3.1! 2011-01 (A. R. Porter, STFC Daresbury) dynamical allocation
  30. !! 3.4 ! 2011-11 (C. Harris) decomposition changes for running with CICE
  31. !!----------------------------------------------------------------------
  32. !!----------------------------------------------------------------------
  33. !! nemo_gcm : solve ocean dynamics, tracer, biogeochemistry and/or sea-ice
  34. !! nemo_init : initialization of the NEMO system
  35. !! nemo_ctl : initialisation of the contol print
  36. !! nemo_closefile : close remaining open files
  37. !! nemo_alloc : dynamical allocation
  38. !! nemo_partition : calculate MPP domain decomposition
  39. !! factorise : calculate the factors of the no. of MPI processes
  40. !!----------------------------------------------------------------------
  41. USE step_oce ! module used in the ocean time stepping module
  42. USE domcfg ! domain configuration (dom_cfg routine)
  43. USE mppini ! shared/distributed memory setting (mpp_init routine)
  44. USE domain ! domain initialization (dom_init routine)
  45. #if defined key_nemocice_decomp
  46. USE ice_domain_size, only: nx_global, ny_global
  47. #endif
  48. USE istate ! initial state setting (istate_init routine)
  49. USE phycst ! physical constant (par_cst routine)
  50. USE diaobs ! Observation diagnostics (dia_obs_init routine)
  51. USE lib_fortran ! Fortran utilities (allows no signed zero when 'key_nosignedzero' defined)
  52. USE step ! NEMO time-stepping (stp routine)
  53. USE icbini ! handle bergs, initialisation
  54. USE icbstp ! handle bergs, calving, themodynamics and transport
  55. USE cpl_oasis3 ! OASIS3 coupling
  56. USE lib_mpp ! distributed memory computing
  57. #if defined key_iomput
  58. USE xios
  59. #endif
  60. USE ooo_data ! Offline obs_oper data
  61. USE ooo_read ! Offline obs_oper read routines
  62. USE ooo_intp ! Offline obs_oper interpolation
  63. IMPLICIT NONE
  64. PRIVATE
  65. PUBLIC nemo_gcm ! called by nemo.f90
  66. PUBLIC nemo_init ! needed by AGRIF
  67. PUBLIC nemo_alloc ! needed by TAM
  68. CHARACTER(lc) :: cform_aaa="( /, 'AAAAAAAA', / ) " ! flag for output listing
  69. !!----------------------------------------------------------------------
  70. !! NEMO/OPA 4.0 , NEMO Consortium (2011)
  71. !! $Id: nemogcm.F90 4990 2014-12-15 16:42:49Z timgraham $
  72. !! Software governed by the CeCILL licence (NEMOGCM/NEMO_CeCILL.txt)
  73. !!----------------------------------------------------------------------
  74. CONTAINS
  75. SUBROUTINE nemo_gcm
  76. !!----------------------------------------------------------------------
  77. !! *** SUBROUTINE offline_obs_oper ***
  78. !!
  79. !! ** Purpose : To use NEMO components to interpolate model fields
  80. !! to observation space.
  81. !!
  82. !! ** Method : 1. Initialise NEMO
  83. !! 2. Initialise offline obs_oper
  84. !! 3. Cycle through match ups
  85. !! 4. Write results to file
  86. !!
  87. !!----------------------------------------------------------------------
  88. !! Class 4 output stream switch
  89. USE obs_fbm, ONLY: ln_cl4
  90. !! Initialise NEMO
  91. CALL nemo_init
  92. !! Initialise Offline obs_oper data
  93. CALL ooo_data_init( ln_cl4 )
  94. !! Loop over various model counterparts
  95. DO jimatch = 1, cl4_match_len
  96. IF (jimatch .GT. 1) THEN
  97. !! Initialise obs_oper
  98. CALL dia_obs_init
  99. END IF
  100. !! Interpolate to observation space
  101. CALL ooo_interp
  102. !! Pipe to output files
  103. CALL dia_obs_wri
  104. !! Reset the obs_oper between
  105. CALL dia_obs_dealloc
  106. END DO
  107. !! Safely stop MPI
  108. IF(lk_mpp) CALL mppstop ! end mpp communications
  109. END SUBROUTINE nemo_gcm
  110. SUBROUTINE nemo_init
  111. !!----------------------------------------------------------------------
  112. !! *** ROUTINE nemo_init ***
  113. !!
  114. !! ** Purpose : initialization of the NEMO GCM
  115. !!----------------------------------------------------------------------
  116. INTEGER :: ji ! dummy loop indices
  117. INTEGER :: ilocal_comm ! local integer
  118. CHARACTER(len=80), DIMENSION(16) :: cltxt
  119. !!
  120. NAMELIST/namctl/ ln_ctl, nn_print, nn_ictls, nn_ictle, &
  121. & nn_isplt, nn_jsplt, nn_jctls, nn_jctle, &
  122. & nn_bench, nn_timing
  123. NAMELIST/namcfg/ cp_cfg, cp_cfz, jp_cfg, jpidta, jpjdta, jpkdta, jpiglo, jpjglo, &
  124. & jpizoom, jpjzoom, jperio, ln_use_jattr
  125. !!----------------------------------------------------------------------
  126. !
  127. cltxt = ''
  128. cxios_context = 'nemo'
  129. !
  130. ! ! Open reference namelist and configuration namelist files
  131. CALL ctl_opn( numnam_ref, 'namelist_ref', 'OLD', 'FORMATTED', 'SEQUENTIAL', -1, 6, .FALSE. )
  132. CALL ctl_opn( numnam_cfg, 'namelist_cfg', 'OLD', 'FORMATTED', 'SEQUENTIAL', -1, 6, .FALSE. )
  133. !
  134. REWIND( numnam_ref ) ! Namelist namctl in reference namelist : Control prints & Benchmark
  135. READ ( numnam_ref, namctl, IOSTAT = ios, ERR = 901 )
  136. 901 IF( ios /= 0 ) CALL ctl_nam ( ios , 'namctl in reference namelist', .TRUE. )
  137. REWIND( numnam_cfg ) ! Namelist namctl in confguration namelist : Control prints & Benchmark
  138. READ ( numnam_cfg, namctl, IOSTAT = ios, ERR = 902 )
  139. 902 IF( ios /= 0 ) CALL ctl_nam ( ios , 'namctl in configuration namelist', .TRUE. )
  140. !
  141. REWIND( numnam_ref ) ! Namelist namcfg in reference namelist : Control prints & Benchmark
  142. READ ( numnam_ref, namcfg, IOSTAT = ios, ERR = 903 )
  143. 903 IF( ios /= 0 ) CALL ctl_nam ( ios , 'namcfg in reference namelist', .TRUE. )
  144. REWIND( numnam_cfg ) ! Namelist namcfg in confguration namelist : Control prints & Benchmark
  145. READ ( numnam_cfg, namcfg, IOSTAT = ios, ERR = 904 )
  146. 904 IF( ios /= 0 ) CALL ctl_nam ( ios , 'namcfg in configuration namelist', .TRUE. )
  147. ! !--------------------------------------------!
  148. ! ! set communicator & select the local node !
  149. ! ! NB: mynode also opens output.namelist.dyn !
  150. ! ! on unit number numond on first proc !
  151. ! !--------------------------------------------!
  152. #if defined key_iomput
  153. IF( Agrif_Root() ) THEN
  154. IF( lk_oasis ) THEN
  155. CALL cpl_init( ilocal_comm ) ! nemo local communicator given by oasis
  156. CALL xios_initialize( "oceanx",local_comm=ilocal_comm ) ! send nemo communicator to xios
  157. ELSE
  158. CALL xios_initialize( "for_xios_mpi_id",return_comm=ilocal_comm ) ! nemo local communicator given by xios
  159. ENDIF
  160. ENDIF
  161. ENDIF
  162. narea = mynode( cltxt, 'output.namelist.dyn', numnam_ref, numnam_cfg, numond , nstop, ilocal_comm ) ! Nodes selection
  163. #else
  164. IF( lk_oasis ) THEN
  165. IF( Agrif_Root() ) THEN
  166. CALL cpl_init( ilocal_comm ) ! nemo local communicator given by oasis
  167. ENDIF
  168. narea = mynode( cltxt, 'output.namelist.dyn', numnam_ref, numnam_cfg, numond , nstop, ilocal_comm ) ! Nodes selection (control print return in cltxt)
  169. ELSE
  170. ilocal_comm = 0
  171. narea = mynode( cltxt, 'output.namelist.dyn', numnam_ref, numnam_cfg, numond , nstop ) ! Nodes selection (control print return in cltxt)
  172. ENDIF
  173. #endif
  174. narea = narea + 1 ! mynode return the rank of proc (0 --> jpnij -1 )
  175. lwm = (narea == 1) ! control of output namelists
  176. lwp = (narea == 1) .OR. ln_ctl ! control of all listing output print
  177. IF(lwm) THEN
  178. ! write merged namelists from earlier to output namelist now that the
  179. ! file has been opened in call to mynode. nammpp has already been
  180. ! written in mynode (if lk_mpp_mpi)
  181. WRITE( numond, namctl )
  182. WRITE( numond, namcfg )
  183. ENDIF
  184. ! If dimensions of processor grid weren't specified in the namelist file
  185. ! then we calculate them here now that we have our communicator size
  186. IF( (jpni < 1) .OR. (jpnj < 1) )THEN
  187. #if defined key_mpp_mpi
  188. IF( Agrif_Root() ) CALL nemo_partition(mppsize)
  189. #else
  190. jpni = 1
  191. jpnj = 1
  192. jpnij = jpni*jpnj
  193. #endif
  194. END IF
  195. ! Calculate domain dimensions given calculated jpni and jpnj
  196. ! This used to be done in par_oce.F90 when they were parameters rather
  197. ! than variables
  198. IF( Agrif_Root() ) THEN
  199. #if defined key_nemocice_decomp
  200. jpi = ( nx_global+2-2*jpreci + (jpni-1) ) / jpni + 2*jpreci ! first dim.
  201. jpj = ( ny_global+2-2*jprecj + (jpnj-1) ) / jpnj + 2*jprecj ! second dim.
  202. #else
  203. jpi = ( jpiglo-2*jpreci + (jpni-1) ) / jpni + 2*jpreci ! first dim.
  204. jpj = ( jpjglo-2*jprecj + (jpnj-1) ) / jpnj + 2*jprecj ! second dim.
  205. #endif
  206. jpk = jpkdta ! third dim
  207. jpim1 = jpi-1 ! inner domain indices
  208. jpjm1 = jpj-1 ! " "
  209. jpkm1 = jpk-1 ! " "
  210. jpij = jpi*jpj ! jpi x j
  211. ENDIF
  212. IF(lwp) THEN ! open listing units
  213. !
  214. CALL ctl_opn( numout, 'ocean.output', 'REPLACE', 'FORMATTED', 'SEQUENTIAL', -1, 6, .FALSE., narea )
  215. !
  216. WRITE(numout,*)
  217. WRITE(numout,*) ' CNRS - NERC - Met OFFICE - MERCATOR-ocean - INGV - CMCC'
  218. WRITE(numout,*) ' NEMO team'
  219. WRITE(numout,*) ' Ocean General Circulation Model'
  220. WRITE(numout,*) ' version 3.6 (2015) '
  221. WRITE(numout,*)
  222. WRITE(numout,*)
  223. DO ji = 1, SIZE(cltxt)
  224. IF( TRIM(cltxt(ji)) /= '' ) WRITE(numout,*) cltxt(ji) ! control print of mynode
  225. END DO
  226. WRITE(numout,cform_aaa) ! Flag AAAAAAA
  227. !
  228. ENDIF
  229. ! Now we know the dimensions of the grid and numout has been set we can
  230. ! allocate arrays
  231. CALL nemo_alloc()
  232. ! !-------------------------------!
  233. ! ! NEMO general initialization !
  234. ! !-------------------------------!
  235. CALL nemo_ctl ! Control prints & Benchmark
  236. ! ! Domain decomposition
  237. IF( jpni*jpnj == jpnij ) THEN ; CALL mpp_init ! standard cutting out
  238. ELSE ; CALL mpp_init2 ! eliminate land processors
  239. ENDIF
  240. !
  241. IF( nn_timing == 1 ) CALL timing_init
  242. !
  243. ! ! General initialization
  244. CALL phy_cst ! Physical constants
  245. CALL eos_init ! Equation of state
  246. CALL dom_cfg ! Domain configuration
  247. CALL dom_init ! Domain
  248. IF( ln_nnogather ) CALL nemo_northcomms ! Initialise the northfold neighbour lists (must be done after the masks are defined)
  249. IF( ln_ctl ) CALL prt_ctl_init ! Print control
  250. CALL istate_init ! ocean initial state (Dynamics and tracers)
  251. IF( lk_diaobs ) THEN ! Observation & model comparison
  252. CALL dia_obs_init ! Initialize observational data
  253. CALL dia_obs( nit000 - 1 ) ! Observation operator for restart
  254. ENDIF
  255. END SUBROUTINE nemo_init
  256. SUBROUTINE nemo_ctl
  257. !!----------------------------------------------------------------------
  258. !! *** ROUTINE nemo_ctl ***
  259. !!
  260. !! ** Purpose : control print setting
  261. !!
  262. !! ** Method : - print namctl information and check some consistencies
  263. !!----------------------------------------------------------------------
  264. !
  265. IF(lwp) THEN ! control print
  266. WRITE(numout,*)
  267. WRITE(numout,*) 'nemo_ctl: Control prints & Benchmark'
  268. WRITE(numout,*) '~~~~~~~ '
  269. WRITE(numout,*) ' Namelist namctl'
  270. WRITE(numout,*) ' run control (for debugging) ln_ctl = ', ln_ctl
  271. WRITE(numout,*) ' level of print nn_print = ', nn_print
  272. WRITE(numout,*) ' Start i indice for SUM control nn_ictls = ', nn_ictls
  273. WRITE(numout,*) ' End i indice for SUM control nn_ictle = ', nn_ictle
  274. WRITE(numout,*) ' Start j indice for SUM control nn_jctls = ', nn_jctls
  275. WRITE(numout,*) ' End j indice for SUM control nn_jctle = ', nn_jctle
  276. WRITE(numout,*) ' number of proc. following i nn_isplt = ', nn_isplt
  277. WRITE(numout,*) ' number of proc. following j nn_jsplt = ', nn_jsplt
  278. WRITE(numout,*) ' benchmark parameter (0/1) nn_bench = ', nn_bench
  279. WRITE(numout,*) ' timing activated (0/1) nn_timing = ', nn_timing
  280. ENDIF
  281. !
  282. nprint = nn_print ! convert DOCTOR namelist names into OLD names
  283. nictls = nn_ictls
  284. nictle = nn_ictle
  285. njctls = nn_jctls
  286. njctle = nn_jctle
  287. isplt = nn_isplt
  288. jsplt = nn_jsplt
  289. nbench = nn_bench
  290. ! ! Parameter control
  291. !
  292. IF( ln_ctl ) THEN ! sub-domain area indices for the control prints
  293. IF( lk_mpp .AND. jpnij > 1 ) THEN
  294. isplt = jpni ; jsplt = jpnj ; ijsplt = jpni*jpnj ! the domain is forced to the real split domain
  295. ELSE
  296. IF( isplt == 1 .AND. jsplt == 1 ) THEN
  297. CALL ctl_warn( ' - isplt & jsplt are equal to 1', &
  298. & ' - the print control will be done over the whole domain' )
  299. ENDIF
  300. ijsplt = isplt * jsplt ! total number of processors ijsplt
  301. ENDIF
  302. IF(lwp) WRITE(numout,*)' - The total number of processors over which the'
  303. IF(lwp) WRITE(numout,*)' print control will be done is ijsplt : ', ijsplt
  304. !
  305. ! ! indices used for the SUM control
  306. IF( nictls+nictle+njctls+njctle == 0 ) THEN ! print control done over the default area
  307. lsp_area = .FALSE.
  308. ELSE ! print control done over a specific area
  309. lsp_area = .TRUE.
  310. IF( nictls < 1 .OR. nictls > jpiglo ) THEN
  311. CALL ctl_warn( ' - nictls must be 1<=nictls>=jpiglo, it is forced to 1' )
  312. nictls = 1
  313. ENDIF
  314. IF( nictle < 1 .OR. nictle > jpiglo ) THEN
  315. CALL ctl_warn( ' - nictle must be 1<=nictle>=jpiglo, it is forced to jpiglo' )
  316. nictle = jpiglo
  317. ENDIF
  318. IF( njctls < 1 .OR. njctls > jpjglo ) THEN
  319. CALL ctl_warn( ' - njctls must be 1<=njctls>=jpjglo, it is forced to 1' )
  320. njctls = 1
  321. ENDIF
  322. IF( njctle < 1 .OR. njctle > jpjglo ) THEN
  323. CALL ctl_warn( ' - njctle must be 1<=njctle>=jpjglo, it is forced to jpjglo' )
  324. njctle = jpjglo
  325. ENDIF
  326. ENDIF
  327. ENDIF
  328. !
  329. IF( nbench == 1 ) THEN ! Benchmark
  330. SELECT CASE ( cp_cfg )
  331. CASE ( 'gyre' ) ; CALL ctl_warn( ' The Benchmark is activated ' )
  332. CASE DEFAULT ; CALL ctl_stop( ' The Benchmark is based on the GYRE configuration:', &
  333. & ' key_gyre must be used or set nbench = 0' )
  334. END SELECT
  335. ENDIF
  336. !
  337. IF( lk_c1d .AND. .NOT.lk_iomput ) CALL ctl_stop( 'nemo_ctl: The 1D configuration must be used ', &
  338. & 'with the IOM Input/Output manager. ' , &
  339. & 'Compile with key_iomput enabled' )
  340. !
  341. IF( 1_wp /= SIGN(1._wp,-0._wp) ) CALL ctl_stop( 'nemo_ctl: The intrinsec SIGN function follows ', &
  342. & 'f2003 standard. ' , &
  343. & 'Compile with key_nosignedzero enabled' )
  344. !
  345. END SUBROUTINE nemo_ctl
  346. SUBROUTINE nemo_closefile
  347. !!----------------------------------------------------------------------
  348. !! *** ROUTINE nemo_closefile ***
  349. !!
  350. !! ** Purpose : Close the files
  351. !!----------------------------------------------------------------------
  352. !
  353. IF( lk_mpp ) CALL mppsync
  354. !
  355. CALL iom_close ! close all input/output files managed by iom_*
  356. !
  357. IF( numstp /= -1 ) CLOSE( numstp ) ! time-step file
  358. IF( numsol /= -1 ) CLOSE( numsol ) ! solver file
  359. IF( numnam /= -1 ) CLOSE( numnam ) ! oce namelist
  360. IF( numnam_ice /= -1 ) CLOSE( numnam_ice ) ! ice namelist
  361. IF( numevo_ice /= -1 ) CLOSE( numevo_ice ) ! ice variables (temp. evolution)
  362. IF( numout /= 6 ) CLOSE( numout ) ! standard model output file
  363. IF( numdct_vol /= -1 ) CLOSE( numdct_vol ) ! volume transports
  364. IF( numdct_heat /= -1 ) CLOSE( numdct_heat ) ! heat transports
  365. IF( numdct_salt /= -1 ) CLOSE( numdct_salt ) ! salt transports
  366. !
  367. numout = 6 ! redefine numout in case it is used after this point...
  368. !
  369. END SUBROUTINE nemo_closefile
  370. SUBROUTINE nemo_alloc
  371. !!----------------------------------------------------------------------
  372. !! *** ROUTINE nemo_alloc ***
  373. !!
  374. !! ** Purpose : Allocate all the dynamic arrays of the OPA modules
  375. !!
  376. !! ** Method :
  377. !!----------------------------------------------------------------------
  378. USE diawri , ONLY: dia_wri_alloc
  379. USE dom_oce , ONLY: dom_oce_alloc
  380. !
  381. INTEGER :: ierr
  382. !!----------------------------------------------------------------------
  383. !
  384. ierr = oce_alloc () ! ocean
  385. ierr = ierr + dia_wri_alloc ()
  386. ierr = ierr + dom_oce_alloc () ! ocean domain
  387. !
  388. ierr = ierr + lib_mpp_alloc (numout) ! mpp exchanges
  389. !
  390. IF( lk_mpp ) CALL mpp_sum( ierr )
  391. IF( ierr /= 0 ) CALL ctl_stop( 'STOP', 'nemo_alloc : unable to allocate standard ocean arrays' )
  392. !
  393. END SUBROUTINE nemo_alloc
  394. SUBROUTINE nemo_partition( num_pes )
  395. !!----------------------------------------------------------------------
  396. !! *** ROUTINE nemo_partition ***
  397. !!
  398. !! ** Purpose :
  399. !!
  400. !! ** Method :
  401. !!----------------------------------------------------------------------
  402. INTEGER, INTENT(in) :: num_pes ! The number of MPI processes we have
  403. !
  404. INTEGER, PARAMETER :: nfactmax = 20
  405. INTEGER :: nfact ! The no. of factors returned
  406. INTEGER :: ierr ! Error flag
  407. INTEGER :: ji
  408. INTEGER :: idiff, mindiff, imin ! For choosing pair of factors that are closest in value
  409. INTEGER, DIMENSION(nfactmax) :: ifact ! Array of factors
  410. !!----------------------------------------------------------------------
  411. ierr = 0
  412. CALL factorise( ifact, nfactmax, nfact, num_pes, ierr )
  413. IF( nfact <= 1 ) THEN
  414. WRITE (numout, *) 'WARNING: factorisation of number of PEs failed'
  415. WRITE (numout, *) ' : using grid of ',num_pes,' x 1'
  416. jpnj = 1
  417. jpni = num_pes
  418. ELSE
  419. ! Search through factors for the pair that are closest in value
  420. mindiff = 1000000
  421. imin = 1
  422. DO ji = 1, nfact-1, 2
  423. idiff = ABS( ifact(ji) - ifact(ji+1) )
  424. IF( idiff < mindiff ) THEN
  425. mindiff = idiff
  426. imin = ji
  427. ENDIF
  428. END DO
  429. jpnj = ifact(imin)
  430. jpni = ifact(imin + 1)
  431. ENDIF
  432. !
  433. jpnij = jpni*jpnj
  434. !
  435. END SUBROUTINE nemo_partition
  436. SUBROUTINE factorise( kfax, kmaxfax, knfax, kn, kerr )
  437. !!----------------------------------------------------------------------
  438. !! *** ROUTINE factorise ***
  439. !!
  440. !! ** Purpose : return the prime factors of n.
  441. !! knfax factors are returned in array kfax which is of
  442. !! maximum dimension kmaxfax.
  443. !! ** Method :
  444. !!----------------------------------------------------------------------
  445. INTEGER , INTENT(in ) :: kn, kmaxfax
  446. INTEGER , INTENT( out) :: kerr, knfax
  447. INTEGER, DIMENSION(kmaxfax), INTENT( out) :: kfax
  448. !
  449. INTEGER :: ifac, jl, inu
  450. INTEGER, PARAMETER :: ntest = 14
  451. INTEGER :: ilfax(ntest)
  452. ! lfax contains the set of allowed factors.
  453. data (ilfax(jl),jl=1,ntest) / 16384, 8192, 4096, 2048, 1024, 512, 256, &
  454. & 128, 64, 32, 16, 8, 4, 2 /
  455. !!----------------------------------------------------------------------
  456. ! Clear the error flag and initialise output vars
  457. kerr = 0
  458. kfax = 1
  459. knfax = 0
  460. ! Find the factors of n.
  461. IF( kn == 1 ) GOTO 20
  462. ! nu holds the unfactorised part of the number.
  463. ! knfax holds the number of factors found.
  464. ! l points to the allowed factor list.
  465. ! ifac holds the current factor.
  466. inu = kn
  467. knfax = 0
  468. DO jl = ntest, 1, -1
  469. !
  470. ifac = ilfax(jl)
  471. IF( ifac > inu ) CYCLE
  472. ! Test whether the factor will divide.
  473. IF( MOD(inu,ifac) == 0 ) THEN
  474. !
  475. knfax = knfax + 1 ! Add the factor to the list
  476. IF( knfax > kmaxfax ) THEN
  477. kerr = 6
  478. write (*,*) 'FACTOR: insufficient space in factor array ', knfax
  479. return
  480. ENDIF
  481. kfax(knfax) = ifac
  482. ! Store the other factor that goes with this one
  483. knfax = knfax + 1
  484. kfax(knfax) = inu / ifac
  485. !WRITE (*,*) 'ARPDBG, factors ',knfax-1,' & ',knfax,' are ', kfax(knfax-1),' and ',kfax(knfax)
  486. ENDIF
  487. !
  488. END DO
  489. 20 CONTINUE ! Label 20 is the exit point from the factor search loop.
  490. !
  491. END SUBROUTINE factorise
  492. #if defined key_mpp_mpi
  493. SUBROUTINE nemo_northcomms
  494. !!======================================================================
  495. !! *** ROUTINE nemo_northcomms ***
  496. !! nemo_northcomms : Setup for north fold exchanges with explicit peer to peer messaging
  497. !!=====================================================================
  498. !!----------------------------------------------------------------------
  499. !!
  500. !! ** Purpose : Initialization of the northern neighbours lists.
  501. !!----------------------------------------------------------------------
  502. !! 1.0 ! 2011-10 (A. C. Coward, NOCS & J. Donners, PRACE)
  503. !!----------------------------------------------------------------------
  504. INTEGER :: ji, jj, jk, ij, jtyp ! dummy loop indices
  505. INTEGER :: ijpj ! number of rows involved in north-fold exchange
  506. INTEGER :: northcomms_alloc ! allocate return status
  507. REAL(wp), ALLOCATABLE, DIMENSION ( :,: ) :: znnbrs ! workspace
  508. LOGICAL, ALLOCATABLE, DIMENSION ( : ) :: lrankset ! workspace
  509. IF(lwp) WRITE(numout,*)
  510. IF(lwp) WRITE(numout,*) 'nemo_northcomms : Initialization of the northern neighbours lists'
  511. IF(lwp) WRITE(numout,*) '~~~~~~~~~~'
  512. !!----------------------------------------------------------------------
  513. ALLOCATE( znnbrs(jpi,jpj), stat = northcomms_alloc )
  514. ALLOCATE( lrankset(jpnij), stat = northcomms_alloc )
  515. IF( northcomms_alloc /= 0 ) THEN
  516. WRITE(numout,cform_war)
  517. WRITE(numout,*) 'northcomms_alloc : failed to allocate arrays'
  518. CALL ctl_stop( 'STOP', 'nemo_northcomms : unable to allocate temporary arrays' )
  519. ENDIF
  520. nsndto = 0
  521. isendto = -1
  522. ijpj = 4
  523. !
  524. ! This routine has been called because ln_nnogather has been set true ( nammpp )
  525. ! However, these first few exchanges have to use the mpi_allgather method to
  526. ! establish the neighbour lists to use in subsequent peer to peer exchanges.
  527. ! Consequently, set l_north_nogather to be false here and set it true only after
  528. ! the lists have been established.
  529. !
  530. l_north_nogather = .FALSE.
  531. !
  532. ! Exchange and store ranks on northern rows
  533. DO jtyp = 1,4
  534. lrankset = .FALSE.
  535. znnbrs = narea
  536. SELECT CASE (jtyp)
  537. CASE(1)
  538. CALL lbc_lnk( znnbrs, 'T', 1. ) ! Type 1: T,W-points
  539. CASE(2)
  540. CALL lbc_lnk( znnbrs, 'U', 1. ) ! Type 2: U-point
  541. CASE(3)
  542. CALL lbc_lnk( znnbrs, 'V', 1. ) ! Type 3: V-point
  543. CASE(4)
  544. CALL lbc_lnk( znnbrs, 'F', 1. ) ! Type 4: F-point
  545. END SELECT
  546. IF ( njmppt(narea) .EQ. MAXVAL( njmppt ) ) THEN
  547. DO jj = nlcj-ijpj+1, nlcj
  548. ij = jj - nlcj + ijpj
  549. DO ji = 1,jpi
  550. IF ( INT(znnbrs(ji,jj)) .NE. 0 .AND. INT(znnbrs(ji,jj)) .NE. narea ) &
  551. & lrankset(INT(znnbrs(ji,jj))) = .true.
  552. END DO
  553. END DO
  554. DO jj = 1,jpnij
  555. IF ( lrankset(jj) ) THEN
  556. nsndto(jtyp) = nsndto(jtyp) + 1
  557. IF ( nsndto(jtyp) .GT. jpmaxngh ) THEN
  558. CALL ctl_stop( ' Too many neighbours in nemo_northcomms ', &
  559. & ' jpmaxngh will need to be increased ')
  560. ENDIF
  561. isendto(nsndto(jtyp),jtyp) = jj-1 ! narea converted to MPI rank
  562. ENDIF
  563. END DO
  564. ENDIF
  565. END DO
  566. !
  567. ! Type 5: I-point
  568. !
  569. ! ICE point exchanges may involve some averaging. The neighbours list is
  570. ! built up using two exchanges to ensure that the whole stencil is covered.
  571. ! lrankset should not be reset between these 'J' and 'K' point exchanges
  572. jtyp = 5
  573. lrankset = .FALSE.
  574. znnbrs = narea
  575. CALL lbc_lnk( znnbrs, 'J', 1. ) ! first ice U-V point
  576. IF ( njmppt(narea) .EQ. MAXVAL( njmppt ) ) THEN
  577. DO jj = nlcj-ijpj+1, nlcj
  578. ij = jj - nlcj + ijpj
  579. DO ji = 1,jpi
  580. IF ( INT(znnbrs(ji,jj)) .NE. 0 .AND. INT(znnbrs(ji,jj)) .NE. narea ) &
  581. & lrankset(INT(znnbrs(ji,jj))) = .true.
  582. END DO
  583. END DO
  584. ENDIF
  585. znnbrs = narea
  586. CALL lbc_lnk( znnbrs, 'K', 1. ) ! second ice U-V point
  587. IF ( njmppt(narea) .EQ. MAXVAL( njmppt )) THEN
  588. DO jj = nlcj-ijpj+1, nlcj
  589. ij = jj - nlcj + ijpj
  590. DO ji = 1,jpi
  591. IF ( INT(znnbrs(ji,jj)) .NE. 0 .AND. INT(znnbrs(ji,jj)) .NE. narea ) &
  592. & lrankset( INT(znnbrs(ji,jj))) = .true.
  593. END DO
  594. END DO
  595. DO jj = 1,jpnij
  596. IF ( lrankset(jj) ) THEN
  597. nsndto(jtyp) = nsndto(jtyp) + 1
  598. IF ( nsndto(jtyp) .GT. jpmaxngh ) THEN
  599. CALL ctl_stop( ' Too many neighbours in nemo_northcomms ', &
  600. & ' jpmaxngh will need to be increased ')
  601. ENDIF
  602. isendto(nsndto(jtyp),jtyp) = jj-1 ! narea converted to MPI rank
  603. ENDIF
  604. END DO
  605. !
  606. ! For northern row areas, set l_north_nogather so that all subsequent exchanges
  607. ! can use peer to peer communications at the north fold
  608. !
  609. l_north_nogather = .TRUE.
  610. !
  611. ENDIF
  612. DEALLOCATE( znnbrs )
  613. DEALLOCATE( lrankset )
  614. END SUBROUTINE nemo_northcomms
  615. #else
  616. SUBROUTINE nemo_northcomms ! Dummy routine
  617. WRITE(*,*) 'nemo_northcomms: You should not have seen this print! error?'
  618. END SUBROUTINE nemo_northcomms
  619. #endif
  620. !!======================================================================
  621. END MODULE nemogcm