domain_decomp.F90 155 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275276277278279280281282283284285286287288289290291292293294295296297298299300301302303304305306307308309310311312313314315316317318319320321322323324325326327328329330331332333334335336337338339340341342343344345346347348349350351352353354355356357358359360361362363364365366367368369370371372373374375376377378379380381382383384385386387388389390391392393394395396397398399400401402403404405406407408409410411412413414415416417418419420421422423424425426427428429430431432433434435436437438439440441442443444445446447448449450451452453454455456457458459460461462463464465466467468469470471472473474475476477478479480481482483484485486487488489490491492493494495496497498499500501502503504505506507508509510511512513514515516517518519520521522523524525526527528529530531532533534535536537538539540541542543544545546547548549550551552553554555556557558559560561562563564565566567568569570571572573574575576577578579580581582583584585586587588589590591592593594595596597598599600601602603604605606607608609610611612613614615616617618619620621622623624625626627628629630631632633634635636637638639640641642643644645646647648649650651652653654655656657658659660661662663664665666667668669670671672673674675676677678679680681682683684685686687688689690691692693694695696697698699700701702703704705706707708709710711712713714715716717718719720721722723724725726727728729730731732733734735736737738739740741742743744745746747748749750751752753754755756757758759760761762763764765766767768769770771772773774775776777778779780781782783784785786787788789790791792793794795796797798799800801802803804805806807808809810811812813814815816817818819820821822823824825826827828829830831832833834835836837838839840841842843844845846847848849850851852853854855856857858859860861862863864865866867868869870871872873874875876877878879880881882883884885886887888889890891892893894895896897898899900901902903904905906907908909910911912913914915916917918919920921922923924925926927928929930931932933934935936937938939940941942943944945946947948949950951952953954955956957958959960961962963964965966967968969970971972973974975976977978979980981982983984985986987988989990991992993994995996997998999100010011002100310041005100610071008100910101011101210131014101510161017101810191020102110221023102410251026102710281029103010311032103310341035103610371038103910401041104210431044104510461047104810491050105110521053105410551056105710581059106010611062106310641065106610671068106910701071107210731074107510761077107810791080108110821083108410851086108710881089109010911092109310941095109610971098109911001101110211031104110511061107110811091110111111121113111411151116111711181119112011211122112311241125112611271128112911301131113211331134113511361137113811391140114111421143114411451146114711481149115011511152115311541155115611571158115911601161116211631164116511661167116811691170117111721173117411751176117711781179118011811182118311841185118611871188118911901191119211931194119511961197119811991200120112021203120412051206120712081209121012111212121312141215121612171218121912201221122212231224122512261227122812291230123112321233123412351236123712381239124012411242124312441245124612471248124912501251125212531254125512561257125812591260126112621263126412651266126712681269127012711272127312741275127612771278127912801281128212831284128512861287128812891290129112921293129412951296129712981299130013011302130313041305130613071308130913101311131213131314131513161317131813191320132113221323132413251326132713281329133013311332133313341335133613371338133913401341134213431344134513461347134813491350135113521353135413551356135713581359136013611362136313641365136613671368136913701371137213731374137513761377137813791380138113821383138413851386138713881389139013911392139313941395139613971398139914001401140214031404140514061407140814091410141114121413141414151416141714181419142014211422142314241425142614271428142914301431143214331434143514361437143814391440144114421443144414451446144714481449145014511452145314541455145614571458145914601461146214631464146514661467146814691470147114721473147414751476147714781479148014811482148314841485148614871488148914901491149214931494149514961497149814991500150115021503150415051506150715081509151015111512151315141515151615171518151915201521152215231524152515261527152815291530153115321533153415351536153715381539154015411542154315441545154615471548154915501551155215531554155515561557155815591560156115621563156415651566156715681569157015711572157315741575157615771578157915801581158215831584158515861587158815891590159115921593159415951596159715981599160016011602160316041605160616071608160916101611161216131614161516161617161816191620162116221623162416251626162716281629163016311632163316341635163616371638163916401641164216431644164516461647164816491650165116521653165416551656165716581659166016611662166316641665166616671668166916701671167216731674167516761677167816791680168116821683168416851686168716881689169016911692169316941695169616971698169917001701170217031704170517061707170817091710171117121713171417151716171717181719172017211722172317241725172617271728172917301731173217331734173517361737173817391740174117421743174417451746174717481749175017511752175317541755175617571758175917601761176217631764176517661767176817691770177117721773177417751776177717781779178017811782178317841785178617871788178917901791179217931794179517961797179817991800180118021803180418051806180718081809181018111812181318141815181618171818181918201821182218231824182518261827182818291830183118321833183418351836183718381839184018411842184318441845184618471848184918501851185218531854185518561857185818591860186118621863186418651866186718681869187018711872187318741875187618771878187918801881188218831884188518861887188818891890189118921893189418951896189718981899190019011902190319041905190619071908190919101911191219131914191519161917191819191920192119221923192419251926192719281929193019311932193319341935193619371938193919401941194219431944194519461947194819491950195119521953195419551956195719581959196019611962196319641965196619671968196919701971197219731974197519761977197819791980198119821983198419851986198719881989199019911992199319941995199619971998199920002001200220032004200520062007200820092010201120122013201420152016201720182019202020212022202320242025202620272028202920302031203220332034203520362037203820392040204120422043204420452046204720482049205020512052205320542055205620572058205920602061206220632064206520662067206820692070207120722073207420752076207720782079208020812082208320842085208620872088208920902091209220932094209520962097209820992100210121022103210421052106210721082109211021112112211321142115211621172118211921202121212221232124212521262127212821292130213121322133213421352136213721382139214021412142214321442145214621472148214921502151215221532154215521562157215821592160216121622163216421652166216721682169217021712172217321742175217621772178217921802181218221832184218521862187218821892190219121922193219421952196219721982199220022012202220322042205220622072208220922102211221222132214221522162217221822192220222122222223222422252226222722282229223022312232223322342235223622372238223922402241224222432244224522462247224822492250225122522253225422552256225722582259226022612262226322642265226622672268226922702271227222732274227522762277227822792280228122822283228422852286228722882289229022912292229322942295229622972298229923002301230223032304230523062307230823092310231123122313231423152316231723182319232023212322232323242325232623272328232923302331233223332334233523362337233823392340234123422343234423452346234723482349235023512352235323542355235623572358235923602361236223632364236523662367236823692370237123722373237423752376237723782379238023812382238323842385238623872388238923902391239223932394239523962397239823992400240124022403240424052406240724082409241024112412241324142415241624172418241924202421242224232424242524262427242824292430243124322433243424352436243724382439244024412442244324442445244624472448244924502451245224532454245524562457245824592460246124622463246424652466246724682469247024712472247324742475247624772478247924802481248224832484248524862487248824892490249124922493249424952496249724982499250025012502250325042505250625072508250925102511251225132514251525162517251825192520252125222523252425252526252725282529253025312532253325342535253625372538253925402541254225432544254525462547254825492550255125522553255425552556255725582559256025612562256325642565256625672568256925702571257225732574257525762577257825792580258125822583258425852586258725882589259025912592259325942595259625972598259926002601260226032604260526062607260826092610261126122613261426152616261726182619262026212622262326242625262626272628262926302631263226332634263526362637263826392640264126422643264426452646264726482649265026512652265326542655265626572658265926602661266226632664266526662667266826692670267126722673267426752676267726782679268026812682268326842685268626872688268926902691269226932694269526962697269826992700270127022703270427052706270727082709271027112712271327142715271627172718271927202721272227232724272527262727272827292730273127322733273427352736273727382739274027412742274327442745274627472748274927502751275227532754275527562757275827592760276127622763276427652766276727682769277027712772277327742775277627772778277927802781278227832784278527862787278827892790279127922793279427952796279727982799280028012802280328042805280628072808280928102811281228132814281528162817281828192820282128222823282428252826282728282829283028312832283328342835283628372838283928402841284228432844284528462847284828492850285128522853285428552856285728582859286028612862286328642865286628672868286928702871287228732874287528762877287828792880288128822883288428852886288728882889289028912892289328942895289628972898289929002901290229032904290529062907290829092910291129122913291429152916291729182919292029212922292329242925292629272928292929302931293229332934293529362937293829392940294129422943294429452946294729482949295029512952295329542955295629572958295929602961296229632964296529662967296829692970297129722973297429752976297729782979298029812982298329842985298629872988298929902991299229932994299529962997299829993000300130023003300430053006300730083009301030113012301330143015301630173018301930203021302230233024302530263027302830293030303130323033303430353036303730383039304030413042304330443045304630473048304930503051305230533054305530563057305830593060306130623063306430653066306730683069307030713072307330743075307630773078307930803081308230833084308530863087308830893090309130923093309430953096309730983099310031013102310331043105310631073108310931103111311231133114311531163117311831193120312131223123312431253126312731283129313031313132313331343135313631373138313931403141314231433144314531463147314831493150315131523153315431553156315731583159316031613162316331643165316631673168316931703171317231733174317531763177317831793180318131823183318431853186318731883189319031913192319331943195319631973198319932003201320232033204320532063207320832093210321132123213321432153216321732183219322032213222322332243225322632273228322932303231323232333234323532363237323832393240324132423243324432453246324732483249325032513252325332543255325632573258325932603261326232633264326532663267326832693270327132723273327432753276327732783279328032813282328332843285328632873288328932903291329232933294329532963297329832993300330133023303330433053306330733083309331033113312331333143315331633173318331933203321332233233324332533263327332833293330333133323333333433353336333733383339334033413342334333443345334633473348334933503351335233533354335533563357335833593360336133623363336433653366336733683369337033713372337333743375337633773378337933803381338233833384338533863387338833893390339133923393339433953396339733983399340034013402340334043405340634073408340934103411341234133414341534163417341834193420342134223423342434253426342734283429343034313432343334343435343634373438343934403441344234433444344534463447344834493450345134523453345434553456345734583459346034613462346334643465346634673468346934703471347234733474347534763477347834793480348134823483348434853486348734883489349034913492349334943495349634973498349935003501350235033504350535063507350835093510351135123513351435153516351735183519352035213522352335243525352635273528352935303531353235333534353535363537353835393540354135423543354435453546354735483549355035513552355335543555355635573558355935603561356235633564356535663567356835693570357135723573357435753576357735783579358035813582358335843585358635873588358935903591359235933594359535963597359835993600360136023603360436053606360736083609361036113612361336143615361636173618361936203621362236233624362536263627362836293630363136323633363436353636363736383639364036413642364336443645364636473648364936503651365236533654365536563657365836593660366136623663366436653666366736683669367036713672367336743675367636773678367936803681368236833684368536863687368836893690369136923693369436953696369736983699370037013702370337043705370637073708370937103711371237133714371537163717371837193720372137223723372437253726372737283729373037313732373337343735373637373738373937403741374237433744374537463747374837493750375137523753375437553756375737583759376037613762376337643765376637673768376937703771377237733774377537763777377837793780378137823783378437853786378737883789379037913792379337943795379637973798379938003801380238033804380538063807380838093810381138123813381438153816381738183819382038213822382338243825382638273828382938303831383238333834383538363837383838393840384138423843384438453846384738483849385038513852385338543855385638573858385938603861386238633864386538663867386838693870387138723873387438753876387738783879388038813882388338843885388638873888388938903891389238933894389538963897389838993900390139023903390439053906390739083909391039113912391339143915391639173918391939203921392239233924392539263927392839293930393139323933393439353936393739383939394039413942394339443945394639473948394939503951395239533954395539563957395839593960396139623963396439653966396739683969397039713972397339743975397639773978397939803981398239833984398539863987398839893990399139923993399439953996399739983999400040014002400340044005400640074008400940104011401240134014401540164017401840194020402140224023402440254026402740284029403040314032403340344035403640374038403940404041404240434044404540464047404840494050405140524053405440554056405740584059406040614062406340644065406640674068406940704071407240734074407540764077407840794080408140824083408440854086408740884089409040914092409340944095409640974098409941004101410241034104410541064107410841094110411141124113411441154116411741184119412041214122412341244125412641274128412941304131413241334134413541364137413841394140414141424143414441454146414741484149415041514152415341544155415641574158415941604161416241634164416541664167416841694170417141724173417441754176417741784179418041814182418341844185418641874188418941904191419241934194419541964197419841994200420142024203420442054206420742084209421042114212421342144215421642174218421942204221422242234224422542264227422842294230423142324233423442354236423742384239424042414242424342444245424642474248424942504251425242534254425542564257425842594260426142624263426442654266426742684269427042714272427342744275427642774278427942804281428242834284428542864287428842894290429142924293429442954296429742984299430043014302430343044305430643074308430943104311431243134314431543164317431843194320432143224323432443254326432743284329433043314332433343344335433643374338433943404341434243434344434543464347434843494350435143524353435443554356435743584359436043614362436343644365436643674368436943704371437243734374437543764377437843794380438143824383438443854386438743884389439043914392439343944395439643974398439944004401440244034404440544064407440844094410441144124413441444154416441744184419442044214422442344244425442644274428442944304431443244334434443544364437443844394440444144424443444444454446444744484449445044514452445344544455445644574458445944604461446244634464446544664467446844694470447144724473447444754476447744784479448044814482448344844485448644874488448944904491449244934494449544964497449844994500450145024503450445054506450745084509451045114512451345144515451645174518451945204521452245234524452545264527452845294530453145324533453445354536453745384539454045414542454345444545454645474548454945504551455245534554455545564557455845594560456145624563456445654566456745684569457045714572457345744575457645774578457945804581458245834584458545864587458845894590459145924593459445954596459745984599460046014602460346044605460646074608460946104611461246134614461546164617461846194620462146224623462446254626462746284629463046314632463346344635463646374638463946404641464246434644464546464647464846494650465146524653465446554656465746584659466046614662466346644665466646674668466946704671467246734674467546764677467846794680468146824683468446854686468746884689469046914692469346944695469646974698469947004701470247034704470547064707470847094710471147124713471447154716471747184719472047214722472347244725472647274728472947304731473247334734473547364737473847394740474147424743474447454746474747484749475047514752475347544755475647574758475947604761476247634764476547664767476847694770477147724773477447754776477747784779478047814782478347844785478647874788478947904791479247934794479547964797479847994800480148024803480448054806480748084809481048114812481348144815481648174818481948204821482248234824482548264827482848294830483148324833483448354836483748384839484048414842484348444845484648474848484948504851485248534854485548564857485848594860486148624863486448654866486748684869487048714872487348744875487648774878487948804881488248834884488548864887488848894890489148924893489448954896489748984899490049014902490349044905490649074908490949104911491249134914491549164917491849194920492149224923492449254926492749284929493049314932493349344935493649374938493949404941494249434944494549464947494849494950495149524953495449554956495749584959496049614962496349644965496649674968496949704971497249734974497549764977497849794980498149824983498449854986498749884989499049914992499349944995499649974998499950005001500250035004500550065007500850095010501150125013501450155016501750185019502050215022502350245025502650275028502950305031503250335034503550365037503850395040504150425043504450455046504750485049505050515052505350545055505650575058505950605061506250635064506550665067506850695070507150725073507450755076507750785079508050815082508350845085508650875088508950905091509250935094509550965097509850995100510151025103510451055106510751085109511051115112511351145115511651175118511951205121512251235124512551265127512851295130513151325133513451355136513751385139514051415142514351445145514651475148514951505151515251535154515551565157515851595160516151625163516451655166516751685169517051715172517351745175517651775178517951805181518251835184518551865187518851895190519151925193519451955196519751985199520052015202520352045205520652075208520952105211521252135214521552165217521852195220522152225223522452255226
  1. !
  2. #define TRACEBACK write (gol,'("in ",a," (",a,", line",i5,")")') rname, __FILE__, __LINE__; call goErr
  3. #define IF_NOTOK_RETURN(action) if (status/=0) then; TRACEBACK; action; return; end if
  4. #define IF_ERROR_RETURN(action) if (status> 0) then; TRACEBACK; action; return; end if
  5. !
  6. #define IF_NOTOK_MPI(action) if (ierr/=MPI_SUCCESS) then; TRACEBACK; action; return; end if
  7. !
  8. #include "tm5.inc"
  9. !
  10. !----------------------------------------------------------------------------
  11. ! TM5 !
  12. !----------------------------------------------------------------------------
  13. !BOP
  14. !
  15. ! !MODULE: DOMAIN_DECOMP
  16. !
  17. ! !DESCRIPTION: Define a distributed grid object and its methods.
  18. ! Horizontal grid is decomposed across longitudes and latitudes.
  19. !\\
  20. !\\
  21. ! !INTERFACE:
  22. !
  23. MODULE DOMAIN_DECOMP
  24. !
  25. ! !USES:
  26. !
  27. use grid, only : TllGridInfo ! Type with Lon/Lat Grid Info
  28. use Go, only : goErr, goPr, gol ! go = general objects
  29. use dims, only : okdebug
  30. use partools ! to include mpif.h, ierr, localComm,...
  31. IMPLICIT NONE
  32. PRIVATE
  33. !
  34. ! !PUBLIC MEMBER FUNCTIONS:
  35. !
  36. public :: Init_DistGrid, Done_DistGrid ! life cycle routines
  37. public :: Get_DistGrid ! get bounds & grids
  38. public :: Print_DistGrid ! print utility (debug)
  39. public :: GATHER, SCATTER, UPDATE_HALO ! communication routines
  40. public :: GATHER_I_BAND, GATHER_J_BAND ! communication routines
  41. public :: SCATTER_I_BAND, SCATTER_J_BAND! for distributed slices
  42. public :: UPDATE_HALO_JBAND !
  43. public :: UPDATE_HALO_IBAND !
  44. public :: REDUCE ! mimic MPI_REDUCE / MPI_ALLREDUCE
  45. public :: DIST_ARR_STAT ! basic statitics of a distributed array
  46. public :: testcomm ! test communication (debug)
  47. !
  48. ! !PUBLIC TYPES:
  49. !
  50. TYPE, PUBLIC :: DIST_GRID
  51. PRIVATE
  52. ! parameters for global domain
  53. integer :: im_region ! number of longitudes
  54. integer :: jm_region ! number of latitudes
  55. ! parameters for local domain
  56. integer :: i_strt ! begin local domain longitude index
  57. integer :: i_stop ! end local domain longitude index
  58. integer :: i_strt_halo ! begin halo longitude index
  59. integer :: i_stop_halo ! end halo longitude index
  60. integer :: j_strt ! begin local domain latitude index
  61. integer :: j_stop ! end local domain latitude index
  62. integer :: j_strt_halo ! begin halo latitude index
  63. integer :: j_stop_halo ! end halo latitude index
  64. type(TllGridInfo) :: lli ! local Lat-Lon grid info
  65. type(TllGridInfo) :: glbl_lli ! global Lat-Lon grid info
  66. type(TllGridInfo) :: lli_z ! global meridional grid info
  67. integer :: neighbors(4) ! rank of (west, north, east, south) processes
  68. logical :: has_south_pole ! south pole is in local domain
  69. logical :: has_north_pole ! north pole is in local domain
  70. logical :: has_south_boundary ! south boundary is in local domain
  71. logical :: has_north_boundary ! north boundary is in local domain
  72. logical :: has_west_boundary ! west boundary is in local domain
  73. logical :: has_east_boundary ! east boundary is in local domain
  74. logical :: is_periodic ! covers [-180, 180]
  75. ! i_start, i_stop, j_start, j_stop of all PEs: (4,npes)
  76. integer, pointer :: bounds(:,:)
  77. END TYPE DIST_GRID
  78. !
  79. ! !PRIVATE DATA MEMBERS:
  80. !
  81. character(len=*), parameter :: mname='Domain_Decomp_MOD_'
  82. !
  83. ! !INTERFACE:
  84. !
  85. INTERFACE Init_DistGrid
  86. MODULE PROCEDURE INIT_DISTGRID_FROM_REGION
  87. MODULE PROCEDURE INIT_DISTGRID_FROM_LLI
  88. END INTERFACE
  89. INTERFACE Update_halo ! for arrays distributed accross both I and J (1st and 2nd dim)
  90. MODULE PROCEDURE Update_halo_2d_r
  91. MODULE PROCEDURE Update_halo_3d_r
  92. MODULE PROCEDURE Update_halo_4d_r
  93. MODULE PROCEDURE Update_halo_2d_i
  94. END INTERFACE
  95. INTERFACE Update_halo_jband ! for arrays distributed accross I (1st dim)
  96. MODULE PROCEDURE Update_halo_jband_1d_r
  97. MODULE PROCEDURE Update_halo_jband_2d_r
  98. MODULE PROCEDURE Update_halo_jband_3d_r
  99. MODULE PROCEDURE Update_halo_jband_4d_r
  100. END INTERFACE
  101. INTERFACE Update_halo_iband ! for arrays distributed accross J (1st dim)
  102. MODULE PROCEDURE Update_halo_iband_1d_r
  103. END INTERFACE
  104. INTERFACE Gather
  105. MODULE PROCEDURE gather_2d_i
  106. MODULE PROCEDURE gather_2d_r
  107. MODULE PROCEDURE gather_3d_r
  108. MODULE PROCEDURE gather_4d_r
  109. MODULE PROCEDURE gather_5d_r
  110. END INTERFACE
  111. INTERFACE Scatter
  112. MODULE PROCEDURE scatter_2d_r
  113. MODULE PROCEDURE scatter_3d_r
  114. MODULE PROCEDURE scatter_4d_r
  115. END INTERFACE
  116. INTERFACE Gather_I_Band
  117. MODULE PROCEDURE gather_iband_1d_r
  118. ! MODULE PROCEDURE gather_iband_2d_r
  119. MODULE PROCEDURE gather_iband_3d_r
  120. END INTERFACE
  121. INTERFACE Gather_J_Band
  122. ! MODULE PROCEDURE gather_jband_1d_r
  123. MODULE PROCEDURE gather_jband_2d_r
  124. MODULE PROCEDURE gather_jband_3d_r
  125. MODULE PROCEDURE gather_jband_4d_r
  126. END INTERFACE
  127. INTERFACE Scatter_I_Band
  128. MODULE PROCEDURE scatter_iband_1d_r
  129. MODULE PROCEDURE scatter_iband_2d_r
  130. END INTERFACE
  131. INTERFACE Scatter_J_Band
  132. MODULE PROCEDURE scatter_jband_1d_r
  133. MODULE PROCEDURE scatter_jband_2d_r
  134. MODULE PROCEDURE scatter_jband_3d_r
  135. MODULE PROCEDURE scatter_jband_4d_r
  136. END INTERFACE
  137. INTERFACE Reduce
  138. MODULE PROCEDURE reduce_2d_r
  139. MODULE PROCEDURE reduce_3d_r
  140. MODULE PROCEDURE reduce_4d_r
  141. END INTERFACE
  142. INTERFACE Dist_Arr_Stat
  143. MODULE PROCEDURE dist_arr_stat_2d_r
  144. END INTERFACE
  145. !
  146. ! !REVISION HISTORY:
  147. ! 01 Nov 2011 - P. Le Sager - v0
  148. ! 13 Feb 2013 - P. Le Sager - Remove deprecated MPI_GET_EXTENT and
  149. ! MPI_TYPE_HVECTOR.
  150. ! - Extra garbage clean.
  151. ! 21 Jun 2013 - P. Le Sager - Replace all MPI_SEND with MPI_SSEND in all
  152. ! scatter routines to avoid buffering.
  153. !
  154. ! !REMARKS:
  155. ! (1) There is two categories of subroutines here : one to define the
  156. ! distributed grid objects, the other for communication b/w processes
  157. ! (update_halo for one-to-one comm, and scatter/gather/reduce for
  158. ! collective comm)
  159. ! Communications routines are for data DECOMPOSED on the domain. For
  160. ! more general comm, see partools module.
  161. !
  162. ! (2) GATHER & SCATTER :
  163. ! - global arrays have to really be global on root only, and can be
  164. ! (1,1,..) on other processes.
  165. ! - global arrays are without halo.
  166. ! - if not using MPI, gather and scatter just copy arrays, so it may be
  167. ! better to not call them to save memory in that case.
  168. !
  169. ! (3) Be careful when passing a slice (or a pointer to a slice) as argument:
  170. !
  171. ! Passing a subarray will *NOT* work most of the time, unless it is a
  172. ! slice on the last dimension(s). This will give erroneous results:
  173. !
  174. ! allocate( gbl_arr(-3:imr, 1:jmr ))
  175. ! call gather( dgrid, local_arr, gbl_arr(1:imr,:), halo, status)
  176. !
  177. ! but passing full size array will work:
  178. !
  179. ! allocate( gbl_arr(-3:imr, 1:jmr ))
  180. ! allocate( temp(1:imr,1:jmr) )
  181. ! call gather( dgrid, local_arr, temp, halo, status)
  182. ! gbl_arr(1:imr,:) = temp
  183. !
  184. ! (4) Scatter[Gather]_I[J]_band are communications between a row or column
  185. ! of processors and the root processor (optionally the row_root in few
  186. ! cases).
  187. !EOP
  188. !------------------------------------------------------------------------
  189. CONTAINS
  190. !--------------------------------------------------------------------------
  191. ! TM5 !
  192. !--------------------------------------------------------------------------
  193. !BOP
  194. !
  195. ! !IROUTINE: DISTGRID_RANGE
  196. !
  197. ! !DESCRIPTION: Give range of indices covered by rank when using nprocs.
  198. ! This is used for one dimension, and thus called twice.
  199. ! Distribution is done evenly. Eg: it will distribute 11 boxes
  200. ! across 3 processes as 4,4,3 on each pe.
  201. !\\
  202. !\\
  203. ! !INTERFACE:
  204. !
  205. SUBROUTINE DISTGRID_RANGE(ij, rank, nprocs, istart, iend)
  206. !
  207. ! !INPUT PARAMETERS:
  208. !
  209. integer, intent(in) :: ij ! defines range (1,..,ij) to be distributed
  210. integer, intent(in) :: rank ! current process, in (0,.., nprocs-1)
  211. integer, intent(in) :: nprocs ! total # of processes
  212. !
  213. ! !OUTPUT PARAMETERS:
  214. !
  215. integer, intent(out):: istart, iend ! index range covered by rank
  216. !
  217. ! !REVISION HISTORY:
  218. ! 01 Nov 2011 - P. Le Sager - v0
  219. !
  220. !EOP
  221. !------------------------------------------------------------------------
  222. !BOC
  223. integer :: iwork1, iwork2
  224. iwork1 = ij/nprocs
  225. iwork2 = mod(ij,nprocs)
  226. istart = rank * iwork1 + 1 + min(rank, iwork2)
  227. iend = istart + iwork1 - 1
  228. if (iwork2 > rank) iend = iend + 1
  229. END SUBROUTINE DISTGRID_RANGE
  230. !EOC
  231. !--------------------------------------------------------------------------
  232. ! TM5 !
  233. !--------------------------------------------------------------------------
  234. !BOP
  235. !
  236. ! !IROUTINE: INIT_DISTGRID_FROM_REGION
  237. !
  238. ! !DESCRIPTION: initialize a distributed grid object for a TM5 region
  239. !\\
  240. !\\
  241. ! !INTERFACE:
  242. !
  243. SUBROUTINE INIT_DISTGRID_FROM_REGION( DistGrid, region, rank, nplon, nplat, halo, status)
  244. !
  245. ! !USES:
  246. !
  247. use grid, only : init
  248. use dims, only : xbeg, xend, dx, im, xcyc
  249. use dims, only : ybeg, yend, dy, jm, touch_np, touch_sp
  250. !
  251. ! !RETURN VALUE:
  252. !
  253. type(dist_grid), intent(inout) :: DistGrid
  254. !
  255. ! !INPUT PARAMETERS:
  256. !
  257. integer, intent(in) :: region ! TM5 region number
  258. integer, intent(in) :: rank ! current process in (0,.., nprocs-1)
  259. integer, intent(in) :: nplon ! number of processes along longitude
  260. integer, intent(in) :: nplat ! number of processes along latitude
  261. integer, intent(in), optional :: halo ! halo size (default=0)
  262. !
  263. ! !OUTPUT PARAMETERS:
  264. !
  265. integer, intent(out) :: status
  266. !
  267. ! !REVISION HISTORY:
  268. ! 01 Nov 2011 - P. Le Sager - v0
  269. !
  270. ! !REMARKS:
  271. !
  272. ! (1) Ideally: to call with WIDTH=<halo used in the code>... but not the
  273. ! same halo is used for all data sets. Then, in many subroutines, halo has
  274. ! to be independent of the halo carried by the distributed grid. We could
  275. ! simplify things by using a halo of 2 for all distributed data, or
  276. ! disregard halo in the dist_grid, but we just keep it general (as is).
  277. !
  278. ! (2) npe_lat/lon are also available thru partools, but may not have been
  279. ! initialized yet, so we keep the nplon/nplat inputs here. Could do
  280. ! without though...
  281. !
  282. !EOP
  283. !------------------------------------------------------------------------
  284. !BOC
  285. character(len=*), parameter :: rname = mname//'init_distgrid'
  286. integer :: start, end, iwidth, i, iml, jml, iwork(2), lshape(4)
  287. real :: yb, xb, dlon, dlat
  288. character(len=39) :: name
  289. ! for pretty print
  290. integer, parameter :: maxrow=5
  291. integer, parameter :: maxcol=5
  292. integer :: i1, j1, j2, k, work
  293. character(len=3) :: str1
  294. ! for virtual topology
  295. integer, allocatable :: dist_map(:,:) ! 2D process topology (np_lon, np_lat)
  296. integer, allocatable :: dist_mapw(:,:) ! 2D process topology with neighbors (np_lon+1, np_lat+1)
  297. integer :: rank_lon ! index of current process in azimuthal set (1,..,np_lon)
  298. integer :: rank_lat ! index of current process in meridional set (1,..,np_lat)
  299. !---------------------------------------------
  300. ! 2D process topology, and mapping 1D <-> 2D
  301. !---------------------------------------------
  302. ! besides the periodicity, topology is independent of the region
  303. allocate(dist_map(nplon, nplat))
  304. npes = nplat*nplon
  305. dist_map = reshape( (/ (i,i=0,npes-1) /), shape=(/ nplon, nplat /))
  306. ! store
  307. iwork = maxloc(dist_map, mask=dist_map.eq.rank)
  308. rank_lon = iwork(1) ! in [1..nplon]
  309. rank_lat = iwork(2) ! in [1..nplat]
  310. ! Neighbors = 2d process map with extra neighbors
  311. allocate(dist_mapw(0:nplon+1, 0:nplat+1))
  312. dist_mapw = MPI_PROC_NULL
  313. dist_mapw(1:nplon,1:nplat) = dist_map
  314. ! East-West periodicity
  315. DistGrid%is_periodic =.false.
  316. if (xcyc(region)==1) then
  317. dist_mapw( 0,1:nplat) = dist_map(nplon,:)
  318. dist_mapw(nplon+1,1:nplat) = dist_map(1,:)
  319. DistGrid%is_periodic =.true.
  320. end if
  321. DistGrid%neighbors = (/ dist_mapw(rank_lon-1, rank_lat ), & ! west
  322. dist_mapw(rank_lon, rank_lat+1), & ! north
  323. dist_mapw(rank_lon+1, rank_lat ), & ! east
  324. dist_mapw(rank_lon, rank_lat-1) /) ! south
  325. !---------------------------------------------
  326. ! fill in distributed grid info
  327. !---------------------------------------------
  328. ! region boundaries
  329. DistGrid%has_south_boundary = (rank_lat == 1 )
  330. DistGrid%has_north_boundary = (rank_lat == nplat)
  331. DistGrid%has_west_boundary = (rank_lon == 1 )
  332. DistGrid%has_east_boundary = (rank_lon == nplon)
  333. ! poles
  334. DistGrid%has_south_pole = DistGrid%has_south_boundary .and. (touch_sp(region) == 1)
  335. DistGrid%has_north_pole = DistGrid%has_north_boundary .and. (touch_np(region) == 1)
  336. ! max halo that will be used in the code
  337. iwidth=0
  338. if (present(halo)) iwidth=halo
  339. ! index ranges covered by current process
  340. call DistGrid_range(im(region), rank_lon-1, nplon, start, end)
  341. ! check we are within the limit, i.e. we must have #boxes >= halo and at least 1.
  342. if ( (end-start+1) < max(1,iwidth) ) then
  343. write(gol,*)" Too much processors in X (longitude) direction! ", nplon, iwidth, start, end
  344. call goErr
  345. status=1; TRACEBACK; return
  346. end if
  347. DistGrid%im_region = im(region)
  348. DistGrid%i_strt = start
  349. DistGrid%i_stop = end
  350. DistGrid%i_strt_halo = start-iwidth
  351. DistGrid%i_stop_halo = end+iwidth
  352. ! To think about it when/if needed:
  353. ! if (DistGrid%has_north_pole) DistGrid%i_stop_halo = end
  354. ! if (DistGrid%has_south_pole) DistGrid%i_strt_halo = start
  355. call DistGrid_range(jm(region), rank_lat-1, nplat, start, end)
  356. if ( (end-start+1) < max(1,iwidth) ) then
  357. write(gol,*)" Too much processors in Y (latitude) direction! ", nplat, iwidth, start, end
  358. call goErr
  359. status=1; TRACEBACK; return
  360. end if
  361. DistGrid%jm_region = jm(region)
  362. DistGrid%j_strt = start
  363. DistGrid%j_stop = end
  364. DistGrid%j_strt_halo = start-iwidth
  365. DistGrid%j_stop_halo = end+iwidth
  366. #ifdef parallel_cplng
  367. if ( modulo(im(region),nplon) /= 0) then
  368. write(gol,'("number of X processors ",i3," does not divide evenly the number of grid boxes",i3)') &
  369. nplon, im(region) ; call goErr
  370. status=1; TRACEBACK; return
  371. endif
  372. if ( modulo(jm(region),nplat) /= 0) then
  373. write(gol,'("number of Y processors ",i3," does not divide evenly the number of grid boxes ",i3)') &
  374. nplat, jm(region) ; call goErr
  375. status=1; TRACEBACK; return
  376. endif
  377. #endif
  378. !---------------------------------------------
  379. ! geophysical grids
  380. !---------------------------------------------
  381. ! grid spacing:
  382. dlon = real(xend(region)-xbeg(region))/im(region)
  383. dlat = real(yend(region)-ybeg(region))/jm(region)
  384. !------ local
  385. iml = DistGrid%i_stop - DistGrid%i_strt + 1
  386. jml = DistGrid%j_stop - DistGrid%j_strt + 1
  387. xb = xbeg(region) + ( DistGrid%i_strt - 0.5 ) * dlon
  388. yb = ybeg(region) + ( DistGrid%j_strt - 0.5 ) * dlat
  389. write(name, '("distributed grid on process ", i5.5)') rank
  390. call INIT( DistGrid%lli, xb, dlon, iml, yb, dlat, jml, status, name=name )
  391. IF_NOTOK_RETURN(status=1)
  392. !------ global
  393. xb = xbeg(region) + 0.5 * dlon
  394. yb = ybeg(region) + 0.5 * dlat
  395. write(name, '("global grid on process ", i5.5)') rank
  396. call INIT( DistGrid%glbl_lli, xb, dlon, im(region), yb, dlat, jm(region), status, name=name )
  397. IF_NOTOK_RETURN(status=1)
  398. !------ global meridional grid
  399. name = "merid " // trim(name)
  400. call INIT( DistGrid%lli_z, 0.0, 360., 1, yb, dlat, jm(region), status, name=name )
  401. IF_NOTOK_RETURN(status=1)
  402. !---------------------------------------------
  403. ! store shapes info of all PE on all PE (used only on root so far, but may
  404. ! become useful if we gather/scatter to/from non-root in the future)
  405. !---------------------------------------------
  406. #ifdef MPI
  407. allocate(DistGrid%bounds(4,0:npes-1))
  408. lshape = (/ DistGrid%i_strt, DistGrid%i_stop, DistGrid%j_strt, DistGrid%j_stop /)
  409. call MPI_ALLGATHER(lshape, 4, MPI_INTEGER, DistGrid%bounds, 4, MPI_INTEGER, localComm, ierr)
  410. #endif
  411. !---------------------------------------------
  412. ! PRINT (Debug) 2D process topology
  413. !---------------------------------------------
  414. if(okdebug)then
  415. write(gol,*) '------------- world view ----------------------' ; call goPr
  416. write(gol, '("process #", i5, " out of ", i5)') myid, npes ; call goPr
  417. write(gol, '("2D rank = [",i4,", ",i4,"]")') rank_lon, rank_lat ; call goPr
  418. i1=min(maxcol,nplon)
  419. str1='...'
  420. if (i1==nplon) str1=''
  421. work=nplat/2
  422. j1=min(work, maxrow)
  423. j2=max(nplat-maxrow+1, work+1)
  424. do k=nplat,j2,-1
  425. write(gol,*) dist_map(1:i1,k),str1 ; call goPr
  426. enddo
  427. if ((j2-1) /= j1) write(gol,*)" ..." ; call goPr
  428. do k=j1,1,-1
  429. write(gol,*) dist_map(1:i1,k),str1 ; call goPr
  430. enddo
  431. write(gol,*) ''
  432. write(gol,*) '-----------------------------------------------' ; call goPr
  433. end if
  434. ! Done
  435. deallocate(dist_map)
  436. deallocate(dist_mapw)
  437. status = 0
  438. END SUBROUTINE INIT_DISTGRID_FROM_REGION
  439. !EOC
  440. !--------------------------------------------------------------------------
  441. ! TM5 !
  442. !--------------------------------------------------------------------------
  443. !BOP
  444. !
  445. ! !IROUTINE: INIT_DISTGRID_FROM_LLI
  446. !
  447. ! !DESCRIPTION: initialize a distributed grid object from a lli object
  448. !\\
  449. !\\
  450. ! !INTERFACE:
  451. !
  452. SUBROUTINE INIT_DISTGRID_FROM_LLI( DistGrid, lli, rank, halo, status)
  453. !
  454. ! !USES:
  455. !
  456. use grid, only : init, assignment(=)
  457. !
  458. ! !INPUT/OUTPUT PARAMETERS:
  459. !
  460. type(dist_grid), intent(inout) :: DistGrid
  461. !
  462. ! !INPUT PARAMETERS:
  463. !
  464. type(TllGridInfo), intent(in) :: lli
  465. integer, intent(in) :: rank ! current process in (0,.., nprocs-1)
  466. integer, optional, intent(in) :: halo ! halo size (default=0)
  467. !
  468. ! !OUTPUT PARAMETERS:
  469. !
  470. integer, intent(out) :: status
  471. !
  472. ! !REVISION HISTORY:
  473. ! 18 Nov 2013 - Ph. Le Sager - v0, adapted from init_distgrid_from_region
  474. !
  475. ! !REMARKS:
  476. !
  477. ! (1) See INIT_DISTGRID_FROM_REGION for comment about halo input.
  478. ! (2) Uses npe_lat/lon from partools module : TM5_MPI_Init2 must have been
  479. ! called before.
  480. !EOP
  481. !------------------------------------------------------------------------
  482. !BOC
  483. character(len=*), parameter :: rname = mname//'init_distgrid_from_lli'
  484. integer :: start, end, iwidth, i, iml, jml, iwork(2), lshape(4)
  485. real :: yb, xb, dlon, dlat
  486. character(len=39) :: name
  487. integer :: nplon ! number of processes along longitude
  488. integer :: nplat ! number of processes along latitude
  489. ! for pretty print
  490. integer, parameter :: maxrow=5
  491. integer, parameter :: maxcol=5
  492. integer :: i1, j1, j2, k, work
  493. character(len=3) :: str1
  494. ! for virtual topology
  495. integer, allocatable :: dist_map(:,:) ! 2D process topology (np_lon, np_lat)
  496. integer, allocatable :: dist_mapw(:,:) ! 2D process topology with neighbors (np_lon+1, np_lat+1)
  497. integer :: rank_lon ! index of current process in azimuthal set (1,..,np_lon)
  498. integer :: rank_lat ! index of current process in meridional set (1,..,np_lat)
  499. !---------------------------------------------
  500. ! test for valid input
  501. !---------------------------------------------
  502. if (.not.(associated( lli%lon_deg ))) then
  503. write(gol,*)" The LLI object has not been initialized. " ; call goErr
  504. write(gol,*)" A distributed grid for MPI cannot be build." ; call goErr
  505. status=1; TRACEBACK; return
  506. end if
  507. nplat = npe_lat
  508. nplon = npe_lon
  509. !---------------------------------------------
  510. ! 2D process topology, and mapping 1D <-> 2D
  511. !---------------------------------------------
  512. ! besides the periodicity, topology is independent of the region
  513. allocate(dist_map(nplon, nplat))
  514. npes = nplat*nplon
  515. dist_map = reshape( (/ (i,i=0,npes-1) /), shape=(/ nplon, nplat /))
  516. ! store
  517. iwork = maxloc(dist_map, mask=dist_map.eq.rank)
  518. rank_lon = iwork(1) ! in [1..nplon]
  519. rank_lat = iwork(2) ! in [1..nplat]
  520. ! Neighbors = 2d process map with extra neighbors
  521. allocate(dist_mapw(0:nplon+1, 0:nplat+1))
  522. dist_mapw = MPI_PROC_NULL
  523. dist_mapw(1:nplon,1:nplat) = dist_map
  524. ! East-West periodicity
  525. DistGrid%is_periodic =.false.
  526. if ( lli%dlon_deg*lli%im == 360. ) then
  527. dist_mapw( 0,1:nplat) = dist_map(nplon,:)
  528. dist_mapw(nplon+1,1:nplat) = dist_map(1,:)
  529. DistGrid%is_periodic =.true.
  530. end if
  531. DistGrid%neighbors = (/ dist_mapw(rank_lon-1, rank_lat ), & ! west
  532. dist_mapw(rank_lon, rank_lat+1), & ! north
  533. dist_mapw(rank_lon+1, rank_lat ), & ! east
  534. dist_mapw(rank_lon, rank_lat-1) /) ! south
  535. !---------------------------------------------
  536. ! fill in distributed grid info
  537. !---------------------------------------------
  538. ! region boundaries
  539. DistGrid%has_south_boundary = (rank_lat == 1 )
  540. DistGrid%has_north_boundary = (rank_lat == nplat)
  541. DistGrid%has_west_boundary = (rank_lon == 1 )
  542. DistGrid%has_east_boundary = (rank_lon == nplon)
  543. ! poles
  544. DistGrid%has_south_pole = DistGrid%has_south_boundary .and. (lli%blat_deg(0) == -90.0)
  545. DistGrid%has_north_pole = DistGrid%has_north_boundary .and. (lli%blat_deg(lli%jm) == 90.0)
  546. ! max halo that will be used in the code
  547. iwidth=0
  548. if (present(halo)) iwidth=halo
  549. ! index ranges covered by current process
  550. call DistGrid_range(lli%im, rank_lon-1, nplon, start, end)
  551. ! check we are within the limit, i.e. we must have #boxes >= halo and at least 1.
  552. if ( (end-start+1) < max(1,iwidth) ) then
  553. write(gol,*)" Too much processors in X (longitude) direction:", nplon, iwidth, start, end
  554. call goErr
  555. status=1; TRACEBACK; return
  556. end if
  557. DistGrid%im_region = lli%im
  558. DistGrid%i_strt = start
  559. DistGrid%i_stop = end
  560. DistGrid%i_strt_halo = start-iwidth
  561. DistGrid%i_stop_halo = end+iwidth
  562. ! To think about it when/if needed:
  563. ! if (DistGrid%has_north_pole) DistGrid%i_stop_halo = end
  564. ! if (DistGrid%has_south_pole) DistGrid%i_strt_halo = start
  565. call DistGrid_range(lli%jm, rank_lat-1, nplat, start, end)
  566. if ( (end-start+1) < max(1,iwidth) ) then
  567. write(gol,*)" Too much processors in Y (latitude) direction:", nplat, iwidth, start, end
  568. call goErr
  569. status=1; TRACEBACK; return
  570. end if
  571. DistGrid%jm_region = lli%jm
  572. DistGrid%j_strt = start
  573. DistGrid%j_stop = end
  574. DistGrid%j_strt_halo = start-iwidth
  575. DistGrid%j_stop_halo = end+iwidth
  576. !---------------------------------------------
  577. ! geophysical grids
  578. !---------------------------------------------
  579. !------ tile
  580. iml = DistGrid%i_stop - DistGrid%i_strt + 1
  581. jml = DistGrid%j_stop - DistGrid%j_strt + 1
  582. xb = lli%lon_deg(1) + ( DistGrid%i_strt - 1 ) * lli%dlon_deg
  583. yb = lli%lat_deg(1) + ( DistGrid%j_strt - 1 ) * lli%dlat_deg
  584. write(name, '("distributed grid on process ", i5.5)') rank
  585. call INIT( DistGrid%lli, xb, lli%dlon_deg, iml, yb, lli%dlat_deg, jml, status, name=name )
  586. IF_NOTOK_RETURN(status=1)
  587. !------ world
  588. DistGrid%glbl_lli = lli
  589. !------ world meridional grid
  590. name = "merid "
  591. call INIT( DistGrid%lli_z, 0.0, 360., 1, yb, lli%dlat_deg, lli%jm, status, name=name )
  592. IF_NOTOK_RETURN(status=1)
  593. !---------------------------------------------
  594. ! store shapes info of all PE on all PE (used only on root so far, but may
  595. ! become useful if we gather/scatter to/from non-root in the future)
  596. !---------------------------------------------
  597. #ifdef MPI
  598. allocate(DistGrid%bounds(4,0:npes-1))
  599. lshape = (/ DistGrid%i_strt, DistGrid%i_stop, DistGrid%j_strt, DistGrid%j_stop /)
  600. call MPI_ALLGATHER(lshape, 4, MPI_INTEGER, DistGrid%bounds, 4, MPI_INTEGER, localComm, ierr)
  601. #endif
  602. !---------------------------------------------
  603. ! PRINT (Debug) 2D process topology
  604. !---------------------------------------------
  605. if(okdebug)then
  606. write(gol,*) '------------- world view ----------------------' ; call goPr
  607. write(gol, '("process #", i5, " out of ", i5)') myid, npes ; call goPr
  608. write(gol, '("2D rank = [",i4,", ",i4,"]")') rank_lon, rank_lat ; call goPr
  609. i1=min(maxcol,nplon)
  610. str1='...'
  611. if (i1==nplon) str1=''
  612. work=nplat/2
  613. j1=min(work, maxrow)
  614. j2=max(nplat-maxrow+1, work+1)
  615. do k=nplat,j2,-1
  616. write(gol,*) dist_map(1:i1,k),str1 ; call goPr
  617. enddo
  618. if ((j2-1) /= j1) write(gol,*)" ..." ; call goPr
  619. do k=j1,1,-1
  620. write(gol,*) dist_map(1:i1,k),str1 ; call goPr
  621. enddo
  622. write(gol,*) ''
  623. write(gol,*) '-----------------------------------------------' ; call goPr
  624. end if
  625. ! Done
  626. deallocate(dist_map)
  627. deallocate(dist_mapw)
  628. status = 0
  629. END SUBROUTINE INIT_DISTGRID_FROM_LLI
  630. !EOC
  631. !--------------------------------------------------------------------------
  632. ! TM5 !
  633. !--------------------------------------------------------------------------
  634. !BOP
  635. !
  636. ! !IROUTINE: DONE_DISTGRID
  637. !
  638. ! !DESCRIPTION: deallocate distributed grid object elements
  639. !\\
  640. !\\
  641. ! !INTERFACE:
  642. !
  643. SUBROUTINE DONE_DISTGRID( DistGrid, STATUS )
  644. !
  645. ! !USES:
  646. !
  647. use grid, only : done
  648. !
  649. ! !INPUT PARAMETERS:
  650. !
  651. type(dist_grid), intent(inout) :: DistGrid
  652. !
  653. ! !OUTPUT PARAMETERS:
  654. !
  655. integer, intent(out) :: status
  656. !
  657. ! !REVISION HISTORY:
  658. ! 01 Nov 2011 - P. Le Sager - v0
  659. !
  660. !EOP
  661. !------------------------------------------------------------------------
  662. !BOC
  663. character(len=*), parameter :: rname = mname//'Done_Distgrid'
  664. call done(DistGrid%lli, status)
  665. IF_NOTOK_RETURN(status=1)
  666. call done(DistGrid%lli_z, status)
  667. IF_NOTOK_RETURN(status=1)
  668. call done(DistGrid%glbl_lli, status)
  669. IF_NOTOK_RETURN(status=1)
  670. if (associated(DistGrid%bounds)) deallocate(DistGrid%bounds)
  671. status=0
  672. END SUBROUTINE DONE_DISTGRID
  673. !EOC
  674. !--------------------------------------------------------------------------
  675. ! TM5 !
  676. !--------------------------------------------------------------------------
  677. !BOP
  678. !
  679. ! !IROUTINE: GET_DISTGRID
  680. !
  681. ! !DESCRIPTION: provide quick access to object elements (bounds and grids),
  682. ! while keeping them private.
  683. !\\
  684. !\\
  685. ! !INTERFACE:
  686. !
  687. SUBROUTINE GET_DISTGRID(DistGrid, &
  688. i_strt, i_stop, i_strt_halo, i_stop_halo, &
  689. j_strt, j_stop, j_strt_halo, j_stop_halo, &
  690. i_wrld, j_wrld, &
  691. hasSouthPole, hasNorthPole, &
  692. hasSouthBorder, hasNorthBorder, &
  693. hasEastBorder, hasWestBorder, &
  694. lli, lli_z, global_lli )
  695. !
  696. ! !USES:
  697. !
  698. use grid, only : assignment(=) ! to copy lli
  699. !
  700. ! !INPUT PARAMETERS:
  701. !
  702. type(dist_grid), intent(in) :: DistGrid
  703. integer, optional :: i_strt, i_stop, i_strt_halo, i_stop_halo
  704. integer, optional :: j_strt, j_stop, j_strt_halo, j_stop_halo
  705. integer, optional :: i_wrld, j_wrld
  706. logical, optional :: hasSouthPole, hasNorthPole
  707. logical, optional :: hasSouthBorder, hasNorthBorder
  708. logical, optional :: hasWestBorder, hasEastBorder
  709. type(TllGridInfo), optional :: lli ! local Lat-Lon grid info
  710. type(TllGridInfo), optional :: global_lli ! global Lat-Lon grid info
  711. type(TllGridInfo), optional :: lli_z ! global zonal grid info
  712. !
  713. ! !REVISION HISTORY:
  714. ! 01 Nov 2011 - P. Le Sager - v0
  715. !
  716. !EOP
  717. !------------------------------------------------------------------------
  718. !BOC
  719. if (present(i_strt)) i_strt = DistGrid%i_strt
  720. if (present(i_stop)) i_stop = DistGrid%i_stop
  721. if (present(i_strt_halo)) i_strt_halo = DistGrid%i_strt_halo
  722. if (present(i_stop_halo)) i_stop_halo = DistGrid%i_stop_halo
  723. if (present(j_strt)) j_strt = DistGrid%j_strt
  724. if (present(j_stop)) j_stop = DistGrid%j_stop
  725. if (present(j_strt_halo)) j_strt_halo = DistGrid%j_strt_halo
  726. if (present(j_stop_halo)) j_stop_halo = DistGrid%j_stop_halo
  727. if (present(i_wrld)) i_wrld = DistGrid%im_region
  728. if (present(j_wrld)) j_wrld = DistGrid%jm_region
  729. if (present(hasSouthPole)) hasSouthPole = DistGrid%has_south_pole
  730. if (present(hasNorthPole)) hasNorthPole = DistGrid%has_north_pole
  731. if (present(hasSouthBorder)) hasSouthBorder = DistGrid%has_south_boundary
  732. if (present(hasNorthBorder)) hasNorthBorder = DistGrid%has_north_boundary
  733. if (present(hasEastBorder )) hasEastBorder = DistGrid%has_east_boundary
  734. if (present(hasWestBorder )) hasWestBorder = DistGrid%has_west_boundary
  735. if (present(lli)) lli = DistGrid%lli
  736. if (present(global_lli)) global_lli = DistGrid%glbl_lli
  737. if (present(lli_z)) lli_z = DistGrid%lli_z
  738. END SUBROUTINE GET_DISTGRID
  739. !EOC
  740. #ifdef MPI /* MPI TYPES */
  741. !--------------------------------------------------------------------------
  742. ! TM5 !
  743. !--------------------------------------------------------------------------
  744. !BOP
  745. !
  746. ! !IROUTINE: GET_HALO_TYPE
  747. !
  748. ! !DESCRIPTION: Returns derived MPI types needed for halo communications,
  749. ! and the start indices for the send & receive communications.
  750. ! Four of each are returned, one for each side of the domain,
  751. ! in the following order: West, North, East, South.
  752. !\\
  753. !\\
  754. ! !INTERFACE:
  755. !
  756. SUBROUTINE GET_HALO_TYPE( DistGrid, arr_shape, halo, datatype, srtype, ijsr, status )
  757. !
  758. ! !INPUT PARAMETERS:
  759. !
  760. type(dist_grid), intent(in) :: DistGrid
  761. integer, intent(in) :: arr_shape(:) ! shape of local array
  762. integer, intent(in) :: halo ! halo size
  763. integer, intent(in) :: datatype ! basic MPI datatype
  764. !
  765. ! !OUTPUT PARAMETERS:
  766. !
  767. integer, intent(out) :: srtype(4) ! derived MPI datatypes for send/recv
  768. integer, intent(out) :: ijsr(4,4) ! start indices for send/recv
  769. integer, intent(out) :: status
  770. !
  771. ! !REVISION HISTORY:
  772. ! 01 Nov 2011 - P. Le Sager - v0
  773. !
  774. ! !REMARKS:
  775. ! (1) Not tested on all imaginable cases, but should work with any size
  776. ! GE 2, and any of the basic numerical MPI_xxx datatype
  777. !
  778. !EOP
  779. !------------------------------------------------------------------------
  780. !BOC
  781. character(len=*), parameter :: rname = mname//'get_halo_type'
  782. integer :: nsslice, weslice, nstype, wetype ! MPI derived datatypes
  783. integer :: n, m, hstride ! sizes to build datatypes
  784. integer :: ndims, sz, i0, j0, i1, j1
  785. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, vstride
  786. sz = size(arr_shape)
  787. ! collapse third and above dimensions
  788. ndims = 1
  789. if (sz > 2) ndims = product(arr_shape(3:))
  790. ! strides
  791. CALL MPI_TYPE_GET_EXTENT( datatype, lb, sizeoftype, ierr)
  792. IF_NOTOK_MPI(status=1)
  793. hstride = arr_shape(1)
  794. vstride = arr_shape(1)*arr_shape(2)*sizeoftype
  795. ! size of data slice to carry
  796. n = DistGrid%I_STOP - DistGrid%I_STRT + 1
  797. m = DistGrid%J_STOP - DistGrid%J_STRT + 1
  798. ! Type for North and South halo regions
  799. ! --------------------------------------
  800. ! halo lines of lenght N, separated by hstride
  801. call MPI_TYPE_VECTOR (halo, n, hstride, datatype, nsslice, ierr)
  802. IF_NOTOK_MPI(status=1)
  803. ! stack 3rd (and above) dimension if any
  804. if (ndims == 1) then
  805. nstype = nsslice
  806. else
  807. ! note : also works with ndims=1
  808. call MPI_TYPE_CREATE_HVECTOR (ndims, 1, vstride, nsslice, nstype, ierr)
  809. IF_NOTOK_MPI(status=1)
  810. call MPI_TYPE_FREE(nsslice, ierr)
  811. IF_NOTOK_MPI(status=1)
  812. end if
  813. call MPI_TYPE_COMMIT (nstype, ierr)
  814. IF_NOTOK_MPI(status=1)
  815. ! Type for West and East halo regions
  816. ! ------------------------------------
  817. ! M lines of lenght 'halo', separated by hstride
  818. call MPI_TYPE_VECTOR (m, halo, hstride, datatype, weslice, ierr)
  819. IF_NOTOK_MPI(status=1)
  820. ! stack 3rd (and above) dimension
  821. if (ndims == 1) then
  822. wetype = weslice
  823. else
  824. ! note : also works with ndims=1
  825. call MPI_TYPE_CREATE_HVECTOR (ndims, 1, vstride, weslice, wetype, ierr)
  826. IF_NOTOK_MPI(status=1)
  827. call MPI_TYPE_FREE(weslice, ierr)
  828. IF_NOTOK_MPI(status=1)
  829. end if
  830. call MPI_TYPE_COMMIT (wetype, ierr)
  831. IF_NOTOK_MPI(status=1)
  832. ! Buffers anchors
  833. ! ----------------
  834. ! Assuming that neighbors are stored in (West, North, East, South) order
  835. i0 = DistGrid%i_strt
  836. j0 = DistGrid%j_strt
  837. i1 = DistGrid%i_stop
  838. j1 = DistGrid%j_stop
  839. srtype = (/ wetype, nstype, wetype, nstype /)
  840. ijsr(:,1) = (/ i0, i0, i1+1-halo, i0 /) ! i-start location of buffer to send
  841. ijsr(:,2) = (/ j0, j1-halo+1, j0, j0 /) ! j-start location of buffer to send
  842. ijsr(:,3) = (/ i0-halo, i0, i1+1, i0 /) ! i-start location of buffer to receive
  843. ijsr(:,4) = (/ j0, j1+1, j0, j0-halo /) ! j-start location of buffer to receive
  844. status=0
  845. END SUBROUTINE GET_HALO_TYPE
  846. !EOC
  847. !--------------------------------------------------------------------------
  848. ! TM5 !
  849. !--------------------------------------------------------------------------
  850. !BOP
  851. !
  852. ! !IROUTINE: GET_INTERIOR_TYPE
  853. !
  854. ! !DESCRIPTION: Returns derived MPI types that describe the interior domains
  855. ! needed for collective communications.
  856. !\\
  857. !\\
  858. ! !INTERFACE:
  859. !
  860. SUBROUTINE GET_INTERIOR_TYPE( DistGrid, shp, datatype, linterior, ginterior, status )
  861. !
  862. ! !INPUT PARAMETERS:
  863. !
  864. type(dist_grid), intent(in) :: DistGrid
  865. integer, intent(in) :: shp(:) ! shape of local array
  866. integer, intent(in) :: datatype ! basic MPI datatype
  867. !
  868. ! !OUTPUT PARAMETERS:
  869. !
  870. ! derived MPI datatypes describing distributed interior domains:
  871. integer, intent(out) :: ginterior(npes-1) ! within global array (used by root, as many as NPES-1)
  872. integer, intent(out) :: linterior ! within local array (used by non-root)
  873. integer, intent(out) :: status
  874. !
  875. ! !REVISION HISTORY:
  876. ! 01 Nov 2011 - P. Le Sager - v0
  877. !
  878. ! !REMARKS:
  879. ! (1) input must be checked before by calling CHECK_DIST_ARR first
  880. !
  881. !EOP
  882. !------------------------------------------------------------------------
  883. !BOC
  884. character(len=*), parameter :: rname = mname//'get_interior_type'
  885. integer :: gslice, lslice ! intermediate datatypes
  886. integer :: n, m ! sizes to build datatypes
  887. integer :: hlstride, hgstride ! strides to build datatypes
  888. integer :: stack, sz, klm
  889. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, vlstride, vgstride
  890. ! init : number of dimensions, default value
  891. sz = size(shp)
  892. ginterior = MPI_DATATYPE_NULL
  893. linterior = MPI_DATATYPE_NULL
  894. ! collapse third and above dimensions
  895. stack = 1
  896. if (sz > 2) stack = product(shp(3:))
  897. ! size of data slice to carry
  898. n = DistGrid%I_STOP - DistGrid%I_STRT + 1
  899. m = DistGrid%J_STOP - DistGrid%J_STRT + 1
  900. CALL MPI_TYPE_GET_EXTENT( datatype, lb, sizeoftype, ierr)
  901. IF_NOTOK_MPI(status=1)
  902. ! horizontal global stride (in data)
  903. hgstride = DistGrid%im_region
  904. ! vertical global stride (in byte)
  905. vgstride = DistGrid%im_region * DistGrid%jm_region * sizeoftype
  906. ! local strides (may be different than n and n*m because of halo)
  907. hlstride = shp(1) ! in data
  908. vlstride = shp(1)*shp(2)*sizeoftype ! in byte
  909. if ( isRoot ) then
  910. do klm=1,npes-1
  911. ! horizontal chunk is M lines of lenght N, separated by a global
  912. ! stride
  913. n = DistGrid%bounds(2,klm) - DistGrid%bounds(1,klm) + 1
  914. m = DistGrid%bounds(4,klm) - DistGrid%bounds(3,klm) + 1
  915. call MPI_TYPE_VECTOR (m, n, hgstride, datatype, gslice, ierr)
  916. IF_NOTOK_MPI(status=1)
  917. ! stack 3rd and above dimensions
  918. if (stack == 1) then
  919. ginterior(klm) = gslice
  920. else
  921. ! note : also works with stack=1
  922. call MPI_TYPE_CREATE_HVECTOR(stack, 1, vgstride, gslice, ginterior(klm), ierr)
  923. IF_NOTOK_MPI(status=1)
  924. call MPI_TYPE_FREE(gslice, ierr)
  925. IF_NOTOK_MPI(status=1)
  926. end if
  927. call MPI_TYPE_COMMIT (ginterior(klm), ierr)
  928. IF_NOTOK_MPI(status=1)
  929. end do
  930. else
  931. ! local interior is basically M lines of lenght N, separated by a local
  932. ! stride
  933. call MPI_TYPE_VECTOR (m, n, hlstride, datatype, lslice, ierr)
  934. IF_NOTOK_MPI(status=1)
  935. ! stack 3rd and above dimensions
  936. if (stack == 1) then
  937. linterior = lslice
  938. else
  939. ! note : also works with stack=1
  940. call MPI_TYPE_CREATE_HVECTOR (stack, 1, vlstride, lslice, linterior, ierr)
  941. IF_NOTOK_MPI(status=1)
  942. call MPI_TYPE_FREE(lslice, ierr)
  943. IF_NOTOK_MPI(status=1)
  944. end if
  945. call MPI_TYPE_COMMIT (linterior, ierr)
  946. IF_NOTOK_MPI(status=1)
  947. end if
  948. status=0
  949. END SUBROUTINE GET_INTERIOR_TYPE
  950. !EOC
  951. !--------------------------------------------------------------------------
  952. ! TM5 !
  953. !--------------------------------------------------------------------------
  954. !BOP
  955. !
  956. ! !IROUTINE: FREE_DERIVED_TYPE
  957. !
  958. ! !DESCRIPTION: free all MPI derived datatypes in a vector
  959. !\\
  960. !\\
  961. ! !INTERFACE:
  962. !
  963. SUBROUTINE FREE_DERIVED_TYPE( datatype )
  964. !
  965. ! !INPUT/OUTPUT PARAMETERS:
  966. !
  967. integer, intent(inout) :: datatype(:) ! set of derived MPI datatypes
  968. !
  969. ! !REVISION HISTORY:
  970. ! 01 Nov 2011 - P. Le Sager - v0
  971. !
  972. !EOP
  973. !------------------------------------------------------------------------
  974. !BOC
  975. integer :: i, j
  976. integer :: res(size(datatype)) ! hold unique elements
  977. integer :: k ! number of unique elements
  978. ! Get list of unique handle(s)
  979. ! ----------------------------
  980. k = 1
  981. res(1) = 1
  982. outer: do i=2,size(datatype)
  983. ! look for a match
  984. do j=1,k
  985. if (datatype(res(j)) == datatype(i)) cycle outer ! match
  986. end do
  987. ! no match : add it to the list
  988. k = k + 1
  989. res(k) = i
  990. end do outer
  991. ! Free handle(s)
  992. ! ---------------------------
  993. do i=1,k
  994. if (datatype(res(i)) /= MPI_DATATYPE_NULL) &
  995. call MPI_TYPE_FREE(datatype(res(i)), ierr)
  996. end do
  997. END SUBROUTINE FREE_DERIVED_TYPE
  998. !EOC
  999. #endif /* MPI TYPES */
  1000. !--------------------------------------------------------------------------
  1001. ! TM5 !
  1002. !--------------------------------------------------------------------------
  1003. !BOP
  1004. !
  1005. ! !IROUTINE: CHECK_DIST_ARR
  1006. !
  1007. ! !DESCRIPTION: Check that the shape of a local array correspond to an array
  1008. ! distributed on current process. This check is done on the
  1009. ! first 2 dimensions only, along which the domain
  1010. ! decomposition is done.
  1011. !
  1012. ! Optionally: check shape of a global array. If arrays are 3D
  1013. ! or more, the 3rd and above dimensions of local and global
  1014. ! arrays are also compared. (This becomes "2D or more" for I-
  1015. ! and J-Slices.)
  1016. !\\
  1017. !\\
  1018. ! !INTERFACE:
  1019. !
  1020. SUBROUTINE CHECK_DIST_ARR( DistGrid, shp, halo, status, glbl_shp, jband, iband, has_global )
  1021. !
  1022. ! !INPUT PARAMETERS:
  1023. !
  1024. type(dist_grid), intent(in) :: DistGrid
  1025. integer, intent(in) :: shp(:) ! shape of local array
  1026. integer, intent(in) :: halo ! halo size
  1027. !
  1028. integer, intent(in), optional :: glbl_shp(:) ! shape of global array
  1029. logical, intent(in), optional :: jband, iband ! is it a zonal or meridional slice?
  1030. logical, intent(in), optional :: has_global ! current proc hold global array (default is root)
  1031. !
  1032. ! !OUTPUT PARAMETERS:
  1033. !
  1034. integer, intent(out) :: status
  1035. !
  1036. ! !REVISION HISTORY:
  1037. ! 01 Nov 2011 - P. Le Sager - v0
  1038. !
  1039. ! !REMARKS: i-band refers to a unique i value, i.e. a meridional dataset.
  1040. ! j-band refers to a unique j value, i.e. a zonal dataset.
  1041. !
  1042. !EOP
  1043. !------------------------------------------------------------------------
  1044. !BOC
  1045. character(len=*), parameter :: rname = mname//'check_dist_arr '
  1046. integer :: n, m, sz
  1047. integer, allocatable :: glbsz(:)
  1048. logical :: x_jband, x_iband, hold_global
  1049. status = 0
  1050. ! check inputs
  1051. x_jband=.false.
  1052. x_iband=.false.
  1053. if(present(iband))x_iband=iband
  1054. if(present(jband))x_jband=jband
  1055. if(x_iband.and.x_jband)then
  1056. write (gol,*) "CHECK_DIST_ARR: cannot have **both** I- and J-Slices" ; call goErr
  1057. TRACEBACK; status=1; return
  1058. end if
  1059. ! by default global array is expected on root
  1060. hold_global = isRoot
  1061. if (present(has_global)) hold_global=has_global
  1062. ! required size w/o halo
  1063. n = DistGrid%I_STOP - DistGrid%I_STRT + 1
  1064. m = DistGrid%J_STOP - DistGrid%J_STRT + 1
  1065. ! check shape of array
  1066. if (x_iband) then
  1067. if (shp(1) /= m+2*halo) then
  1068. write (gol,*) "CHECK_DIST_ARR: local array shape does not conform" ; call goErr
  1069. write (gol,'(" local array : ", i4)') shp(1) ; call goErr
  1070. write (gol,'(" should be : ", i4)') m+2*halo ; call goErr
  1071. write (gol,'(" with J-stop : ", i4)') DistGrid%J_STOP ; call goErr
  1072. write (gol,'(" J-start: ", i4)') DistGrid%J_STRT ; call goErr
  1073. TRACEBACK; status=1; return
  1074. end if
  1075. else if (x_jband) then
  1076. if (shp(1) /= n+2*halo) then
  1077. write (gol,*) "CHECK_DIST_ARR: local array shape does not conform" ; call goErr
  1078. write (gol,'(" local array : ",2i4)') shp(1) ; call goErr
  1079. write (gol,'(" should be : ",2i4)') n+2*halo ; call goErr
  1080. write (gol,'(" with J-stop : ", i4)') DistGrid%I_STOP ; call goErr
  1081. write (gol,'(" J-start: ", i4)') DistGrid%I_STRT ; call goErr
  1082. TRACEBACK; status=1; return
  1083. end if
  1084. else
  1085. if ((shp(1) /= n+2*halo).or.(shp(2) /= m+2*halo)) then
  1086. write (gol,*) "CHECK_DIST_ARR: local array shape does not conform" ; call goErr
  1087. write (gol,'(" local array : ",2i4)') shp(1:2) ; call goErr
  1088. write (gol,'(" should be : ",2i4)') n+2*halo, m+2*halo ; call goErr
  1089. write (gol,'(" w/ I/J-stop : ", i4)') DistGrid%I_STOP, DistGrid%J_STOP ; call goErr
  1090. write (gol,'(" I/J-start: ", i4)') DistGrid%I_STRT, DistGrid%J_STRT ; call goErr
  1091. TRACEBACK; status=1; return
  1092. end if
  1093. end if
  1094. ! check shape of global array on root
  1095. if (present(glbl_shp) .and. hold_global) then
  1096. sz = size(shp)
  1097. if (sz /= size(glbl_shp)) then
  1098. write (gol,'("CHECK_DIST_ARR : global and local arrays have different rank:")') ; call goErr
  1099. write (gol,'(" local rank : ", i4)') sz ; call goErr
  1100. write (gol,'(" global rank : ", i4)') size(glbl_shp) ; call goErr
  1101. TRACEBACK; status=1; return
  1102. end if
  1103. allocate(glbsz(sz))
  1104. glbsz = shp
  1105. if (x_iband) then
  1106. glbsz(1) = DistGrid%jm_region
  1107. else if (x_jband) then
  1108. glbsz(1) = DistGrid%im_region
  1109. else
  1110. glbsz(1:2) = (/ DistGrid%im_region, DistGrid%jm_region /)
  1111. end if
  1112. if ( any (glbl_shp /= glbsz) ) then
  1113. write (gol,'("CHECK_DIST_ARR : global array has wrong shape:")') ; call goErr
  1114. write (gol,*) " shape(global) : ", glbl_shp ; call goErr
  1115. write (gol,*) " im [and/or] jm_region/-extra dims- : ", glbsz ; call goErr
  1116. TRACEBACK; status=1; return
  1117. end if
  1118. deallocate(glbsz)
  1119. end if
  1120. END SUBROUTINE CHECK_DIST_ARR
  1121. !EOC
  1122. !--------------------------------------------------------------------------
  1123. ! TM5 !
  1124. !--------------------------------------------------------------------------
  1125. !BOP
  1126. !
  1127. ! !IROUTINE: UPDATE_HALO_IBAND_1D_R
  1128. !
  1129. ! !DESCRIPTION: update halo cells of a vector distributed along latitudes
  1130. !\\
  1131. !\\
  1132. ! !INTERFACE:
  1133. !
  1134. SUBROUTINE UPDATE_HALO_IBAND_1D_R( DistGrid, dist_array, halo, status )
  1135. !
  1136. ! !INPUT PARAMETERS:
  1137. !
  1138. type(dist_grid), intent(in) :: DistGrid
  1139. integer, intent(in) :: halo
  1140. real, intent(inout) :: dist_array(DistGrid%j_strt-halo:)
  1141. !
  1142. ! !OUTPUT PARAMETERS:
  1143. !
  1144. integer, intent(out) :: status
  1145. !
  1146. ! !REVISION HISTORY:
  1147. ! 7 Jan 2016 - Ph. Le Sager - v0
  1148. !
  1149. !EOP
  1150. !------------------------------------------------------------------------
  1151. !BOC
  1152. character(len=*), parameter :: rname = mname//'update_halo_iband_1d_r'
  1153. integer :: j0, j1
  1154. #ifdef MPI
  1155. integer :: stat(MPI_STATUS_SIZE,4), req(4)
  1156. integer :: k, sz(1), tag_snd(2), tag_rcv(2)
  1157. integer :: ijsr(2,2), nghbr(2)
  1158. ! check input
  1159. if ( halo == 0 ) return
  1160. sz = shape(dist_array)
  1161. j0 = DistGrid%j_strt
  1162. j1 = DistGrid%j_stop
  1163. ! degenerate case
  1164. if (npe_lat==1) return
  1165. if(okdebug)then
  1166. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, iband=.true. )
  1167. IF_NOTOK_RETURN(status=1)
  1168. end if
  1169. ! Buffers anchors
  1170. ! ----------------
  1171. ijsr(:,1) = (/ j1-halo+1, j0 /) ! j-start location of north and south buffers to send
  1172. ijsr(:,2) = (/ j1+1, j0-halo /) ! j-start location of north and south buffers to receive
  1173. ! Communicate
  1174. ! ---------------- ! only south and north
  1175. tag_snd = (/2,4/)
  1176. tag_rcv = (/4,2/)
  1177. nghbr = (/2,4/)
  1178. neigh : do k=1,2
  1179. call MPI_ISEND( dist_array( ijsr(k,1)), halo, my_real, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1180. call MPI_IRECV( dist_array( ijsr(k,2)), halo, my_real, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1181. end do neigh
  1182. call MPI_WAITALL(4, req, stat, ierr)
  1183. IF_NOTOK_MPI(status=1)
  1184. #endif
  1185. status = 0
  1186. END SUBROUTINE UPDATE_HALO_IBAND_1D_R
  1187. !EOC
  1188. !--------------------------------------------------------------------------
  1189. ! TM5 !
  1190. !--------------------------------------------------------------------------
  1191. !BOP
  1192. !
  1193. ! !IROUTINE: UPDATE_HALO_JBAN_1D_R
  1194. !
  1195. ! !DESCRIPTION: update halo cells of a decomposed zonal vector
  1196. !\\
  1197. !\\
  1198. ! !INTERFACE:
  1199. !
  1200. SUBROUTINE UPDATE_HALO_JBAND_1D_R( DistGrid, dist_array, halo, status )
  1201. !
  1202. ! !INPUT PARAMETERS:
  1203. !
  1204. type(dist_grid), intent(in) :: DistGrid
  1205. integer, intent(in) :: halo
  1206. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:)
  1207. !
  1208. ! !OUTPUT PARAMETERS:
  1209. !
  1210. integer, intent(out) :: status
  1211. !
  1212. ! !REVISION HISTORY:
  1213. ! 20 Feb 2012 - P. Le Sager - v0
  1214. !
  1215. !EOP
  1216. !------------------------------------------------------------------------
  1217. !BOC
  1218. character(len=*), parameter :: rname = mname//'update_halo_jband_1d_r'
  1219. integer :: i0, i1
  1220. #ifdef MPI
  1221. integer :: stat(MPI_STATUS_SIZE,4), req(4)
  1222. integer :: k, sz(1), tag_snd(2), tag_rcv(2)
  1223. integer :: ijsr(2,2), nghbr(2)
  1224. ! check input
  1225. if ( halo == 0 ) return
  1226. sz = shape(dist_array)
  1227. i0 = DistGrid%i_strt
  1228. i1 = DistGrid%i_stop
  1229. ! degenerate case
  1230. if (npe_lon==1) then
  1231. if (DistGrid%is_periodic) then
  1232. dist_array(i0-halo:i0-1) = dist_array(i1-halo+1:i1)
  1233. dist_array(i1+1:i1+halo) = dist_array(i0:i0+halo-1)
  1234. end if
  1235. return
  1236. end if
  1237. if(okdebug)then
  1238. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, jband=.true. )
  1239. IF_NOTOK_RETURN(status=1)
  1240. end if
  1241. ! Buffers anchors
  1242. ! ----------------
  1243. ijsr(:,1) = (/ i0, i1+1-halo /) ! i-start location of buffer to send
  1244. ijsr(:,2) = (/ i0-halo, i1+1 /) ! i-start location of buffer to receive
  1245. ! Communicate
  1246. ! ---------------- ! only east and west
  1247. tag_snd = (/1,3/)
  1248. tag_rcv = (/3,1/)
  1249. nghbr = (/1,3/)
  1250. neigh : do k=1,2
  1251. call MPI_ISEND( dist_array( ijsr(k,1)), halo, my_real, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1252. call MPI_IRECV( dist_array( ijsr(k,2)), halo, my_real, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1253. end do neigh
  1254. call MPI_WAITALL(4, req, stat, ierr)
  1255. IF_NOTOK_MPI(status=1)
  1256. #else
  1257. if ((halo/=0).and.DistGrid%is_periodic) then
  1258. i0 = DistGrid%i_strt
  1259. i1 = DistGrid%i_stop
  1260. dist_array(i0-halo:i0-1) = dist_array(i1-halo+1:i1)
  1261. dist_array(i1+1:i1+halo) = dist_array(i0:i0+halo-1)
  1262. end if
  1263. #endif
  1264. status = 0
  1265. END SUBROUTINE UPDATE_HALO_JBAND_1D_R
  1266. !EOC
  1267. !--------------------------------------------------------------------------
  1268. ! TM5 !
  1269. !--------------------------------------------------------------------------
  1270. !BOP
  1271. !
  1272. ! !IROUTINE: UPDATE_HALO_JBAND_2D_R
  1273. !
  1274. ! !DESCRIPTION: update halo cells of a decomposed zonal 2d slice
  1275. !\\
  1276. !\\
  1277. ! !INTERFACE:
  1278. !
  1279. SUBROUTINE UPDATE_HALO_JBAND_2D_R( DistGrid, dist_array, halo, status )
  1280. !
  1281. ! !INPUT PARAMETERS:
  1282. !
  1283. type(dist_grid), intent(in) :: DistGrid
  1284. integer, intent(in) :: halo
  1285. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,:)
  1286. !
  1287. ! !OUTPUT PARAMETERS:
  1288. !
  1289. integer, intent(out) :: status
  1290. !
  1291. ! !REVISION HISTORY:
  1292. ! 20 Feb 2012 - P. Le Sager - v0
  1293. !
  1294. !EOP
  1295. !------------------------------------------------------------------------
  1296. !BOC
  1297. character(len=*), parameter :: rname = mname//'update_halo_jband_2d_r'
  1298. integer :: i0, i1
  1299. #ifdef MPI
  1300. integer :: stat(MPI_STATUS_SIZE,4), req(4), wetype
  1301. integer :: k, sz(2), tag_snd(2), tag_rcv(2)
  1302. integer :: ijsr(2,2), nghbr(2)
  1303. status = 0
  1304. ! check input
  1305. if ( halo == 0 ) return
  1306. sz = shape(dist_array)
  1307. i0 = DistGrid%i_strt
  1308. i1 = DistGrid%i_stop
  1309. ! degenerate case
  1310. if (npe_lon==1) then
  1311. if (DistGrid%is_periodic) then
  1312. dist_array(i0-halo:i0-1,:) = dist_array(i1-halo+1:i1,:)
  1313. dist_array(i1+1:i1+halo,:) = dist_array(i0:i0+halo-1,:)
  1314. end if
  1315. return
  1316. end if
  1317. if(okdebug)then
  1318. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, jband=.true. )
  1319. IF_NOTOK_RETURN(status=1)
  1320. end if
  1321. ! Buffers anchors
  1322. ! ----------------
  1323. ijsr(:,1) = (/ i0, i1+1-halo /) ! i-start location of buffer to send
  1324. ijsr(:,2) = (/ i0-halo, i1+1 /) ! i-start location of buffer to receive
  1325. ! pack data
  1326. !----------
  1327. call MPI_TYPE_VECTOR (sz(2), halo, sz(1), my_real, wetype, ierr)
  1328. call MPI_TYPE_COMMIT (wetype, ierr)
  1329. ! Communicate
  1330. ! ----------------
  1331. tag_snd = (/1,3/)
  1332. tag_rcv = (/3,1/)
  1333. nghbr = (/1,3/)
  1334. neigh : do k=1,2 ! only east and west
  1335. call MPI_ISEND( dist_array(ijsr(k,1),1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1336. call MPI_IRECV( dist_array(ijsr(k,2),1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1337. end do neigh
  1338. call MPI_WAITALL(4, req, stat, ierr)
  1339. IF_NOTOK_MPI(status=1)
  1340. call MPI_TYPE_FREE(wetype, ierr)
  1341. #else
  1342. if ((halo/=0).and.DistGrid%is_periodic) then
  1343. i0 = DistGrid%i_strt
  1344. i1 = DistGrid%i_stop
  1345. dist_array(i0-halo:i0-1,:) = dist_array(i1-halo+1:i1,:)
  1346. dist_array(i1+1:i1+halo,:) = dist_array(i0:i0+halo-1,:)
  1347. end if
  1348. #endif
  1349. status = 0
  1350. END SUBROUTINE UPDATE_HALO_JBAND_2D_R
  1351. !EOC
  1352. !--------------------------------------------------------------------------
  1353. ! TM5 !
  1354. !--------------------------------------------------------------------------
  1355. !BOP
  1356. !
  1357. ! !IROUTINE: UPDATE_HALO_JBAND_3D_R
  1358. !
  1359. ! !DESCRIPTION: update halo cells of a decomposed zonal 3d slice
  1360. !\\
  1361. !\\
  1362. ! !INTERFACE:
  1363. !
  1364. SUBROUTINE UPDATE_HALO_JBAND_3D_R( DistGrid, dist_array, halo, status )
  1365. !
  1366. ! !INPUT PARAMETERS:
  1367. !
  1368. type(dist_grid), intent(in) :: DistGrid
  1369. integer, intent(in) :: halo
  1370. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,:,:)
  1371. !
  1372. ! !OUTPUT PARAMETERS:
  1373. !
  1374. integer, intent(out) :: status
  1375. !
  1376. ! !REVISION HISTORY:
  1377. ! 20 Feb 2012 - P. Le Sager - v0
  1378. !
  1379. !EOP
  1380. !------------------------------------------------------------------------
  1381. !BOC
  1382. character(len=*), parameter :: rname = mname//'update_halo_jband_3d_r'
  1383. integer :: i0, i1
  1384. #ifdef MPI
  1385. integer :: stat(MPI_STATUS_SIZE,4), req(4), wetype
  1386. integer :: k, sz(3), tag_snd(2), tag_rcv(2)
  1387. integer :: ijsr(2,2), nghbr(2)
  1388. status = 0
  1389. ! check input
  1390. if ( halo == 0 ) return
  1391. sz = shape(dist_array)
  1392. i0 = DistGrid%i_strt
  1393. i1 = DistGrid%i_stop
  1394. ! degenerate case
  1395. if (npe_lon==1) then
  1396. if (DistGrid%is_periodic) then
  1397. dist_array(i0-halo:i0-1,:,:) = dist_array(i1-halo+1:i1,:,:)
  1398. dist_array(i1+1:i1+halo,:,:) = dist_array(i0:i0+halo-1,:,:)
  1399. end if
  1400. return
  1401. end if
  1402. if(okdebug)then
  1403. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, jband=.true. )
  1404. IF_NOTOK_RETURN(status=1)
  1405. end if
  1406. ! Buffers anchors
  1407. ! ----------------
  1408. ijsr(:,1) = (/ i0, i1+1-halo /) ! i-start location of buffer to send
  1409. ijsr(:,2) = (/ i0-halo, i1+1 /) ! i-start location of buffer to receive
  1410. ! pack data
  1411. !----------
  1412. call MPI_TYPE_VECTOR (sz(2)*sz(3), halo, sz(1), my_real, wetype, ierr)
  1413. call MPI_TYPE_COMMIT (wetype, ierr)
  1414. ! Communicate
  1415. ! ----------------
  1416. tag_snd = (/1,3/)
  1417. tag_rcv = (/3,1/)
  1418. nghbr = (/1,3/)
  1419. neigh : do k=1,2 ! only east and west
  1420. call MPI_ISEND( dist_array(ijsr(k,1),1,1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1421. call MPI_IRECV( dist_array(ijsr(k,2),1,1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1422. end do neigh
  1423. call MPI_WAITALL(4, req, stat, ierr)
  1424. IF_NOTOK_MPI(status=1)
  1425. call MPI_TYPE_FREE(wetype, ierr)
  1426. #else
  1427. if ((halo/=0).and.DistGrid%is_periodic) then
  1428. i0 = DistGrid%i_strt
  1429. i1 = DistGrid%i_stop
  1430. dist_array(i0-halo:i0-1,:,:) = dist_array(i1-halo+1:i1,:,:)
  1431. dist_array(i1+1:i1+halo,:,:) = dist_array(i0:i0+halo-1,:,:)
  1432. end if
  1433. #endif
  1434. status = 0
  1435. END SUBROUTINE UPDATE_HALO_JBAND_3D_R
  1436. !EOC
  1437. !--------------------------------------------------------------------------
  1438. ! TM5 !
  1439. !--------------------------------------------------------------------------
  1440. !BOP
  1441. !
  1442. ! !IROUTINE: UPDATE_HALO_JBAND_4D_R
  1443. !
  1444. ! !DESCRIPTION: update halo cells of a decomposed zonal 4d slice
  1445. !\\
  1446. !\\
  1447. ! !INTERFACE:
  1448. !
  1449. SUBROUTINE UPDATE_HALO_JBAND_4D_R( DistGrid, dist_array, halo, status )
  1450. !
  1451. ! !INPUT PARAMETERS:
  1452. !
  1453. type(dist_grid), intent(in) :: DistGrid
  1454. integer, intent(in) :: halo
  1455. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,:,:,:)
  1456. !
  1457. ! !OUTPUT PARAMETERS:
  1458. !
  1459. integer, intent(out) :: status
  1460. !
  1461. ! !REVISION HISTORY:
  1462. ! 20 Feb 2012 - P. Le Sager - v0
  1463. !
  1464. !EOP
  1465. !------------------------------------------------------------------------
  1466. !BOC
  1467. character(len=*), parameter :: rname = mname//'update_halo_jband_4d_r'
  1468. integer :: i0, i1
  1469. #ifdef MPI
  1470. integer :: stat(MPI_STATUS_SIZE,4), req(4), wetype
  1471. integer :: k, sz(4), tag_snd(2), tag_rcv(2)
  1472. integer :: ijsr(2,2), nghbr(2)
  1473. status = 0
  1474. ! check input
  1475. if ( halo == 0 ) return
  1476. sz = shape(dist_array)
  1477. i0 = DistGrid%i_strt
  1478. i1 = DistGrid%i_stop
  1479. ! degenerate case
  1480. if (npe_lon==1) then
  1481. if (DistGrid%is_periodic) then
  1482. dist_array(i0-halo:i0-1,:,:,:) = dist_array(i1-halo+1:i1,:,:,:)
  1483. dist_array(i1+1:i1+halo,:,:,:) = dist_array(i0:i0+halo-1,:,:,:)
  1484. end if
  1485. return
  1486. end if
  1487. if(okdebug)then
  1488. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, jband=.true. )
  1489. IF_NOTOK_RETURN(status=1)
  1490. end if
  1491. ! Buffers anchors
  1492. ! ----------------
  1493. ijsr(:,1) = (/ i0, i1+1-halo /) ! i-start location of buffer to send
  1494. ijsr(:,2) = (/ i0-halo, i1+1 /) ! i-start location of buffer to receive
  1495. ! pack data
  1496. !----------
  1497. call MPI_TYPE_VECTOR (sz(2)*sz(3)*sz(4), halo, sz(1), my_real, wetype, ierr)
  1498. call MPI_TYPE_COMMIT (wetype, ierr)
  1499. ! Communicate
  1500. ! ----------------
  1501. tag_snd = (/1,3/)
  1502. tag_rcv = (/3,1/)
  1503. nghbr = (/1,3/)
  1504. neigh : do k=1,2 ! only east and west
  1505. call MPI_ISEND( dist_array(ijsr(k,1),1,1,1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1506. call MPI_IRECV( dist_array(ijsr(k,2),1,1,1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1507. end do neigh
  1508. call MPI_WAITALL(4, req, stat, ierr)
  1509. IF_NOTOK_MPI(status=1)
  1510. call MPI_TYPE_FREE(wetype, ierr)
  1511. #else
  1512. if ((halo/=0).and.DistGrid%is_periodic) then
  1513. i0 = DistGrid%i_strt
  1514. i1 = DistGrid%i_stop
  1515. dist_array(i0-halo:i0-1,:,:,:) = dist_array(i1-halo+1:i1,:,:,:)
  1516. dist_array(i1+1:i1+halo,:,:,:) = dist_array(i0:i0+halo-1,:,:,:)
  1517. end if
  1518. #endif
  1519. status = 0
  1520. END SUBROUTINE UPDATE_HALO_JBAND_4D_R
  1521. !EOC
  1522. !--------------------------------------------------------------------------
  1523. ! TM5 !
  1524. !--------------------------------------------------------------------------
  1525. !BOP
  1526. !
  1527. ! !IROUTINE: UPDATE_HALO_2D_R
  1528. !
  1529. ! !DESCRIPTION: update halo cells of a distributed 2D real array
  1530. !\\
  1531. !\\
  1532. ! !INTERFACE:
  1533. !
  1534. SUBROUTINE UPDATE_HALO_2D_R( DistGrid, dist_array, halo, status, i_only, j_only )
  1535. !
  1536. ! !INPUT PARAMETERS:
  1537. !
  1538. type(dist_grid), intent(in) :: DistGrid
  1539. integer, intent(in) :: halo
  1540. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  1541. logical, optional, intent(in) :: i_only ! update East & West halo only
  1542. logical, optional, intent(in) :: j_only ! update North & South halo only
  1543. !
  1544. ! !OUTPUT PARAMETERS:
  1545. !
  1546. integer, intent(out) :: status
  1547. !
  1548. ! !REVISION HISTORY:
  1549. ! 01 Nov 2011 - P. Le Sager - v0
  1550. !
  1551. !EOP
  1552. !------------------------------------------------------------------------
  1553. !BOC
  1554. character(len=*), parameter :: rname = mname//'update_halo_2d_r'
  1555. integer :: i1, i2, j1, j2
  1556. #ifdef MPI
  1557. integer :: stat(MPI_STATUS_SIZE,8), req(8)
  1558. integer :: k, sz(2), tag_snd(4), tag_rcv(4)
  1559. integer :: srtype(4), ijsr(4,4)
  1560. ! check input
  1561. if ( halo == 0 ) return
  1562. sz = shape(dist_array)
  1563. if(okdebug)then
  1564. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status )
  1565. IF_NOTOK_RETURN(status=1)
  1566. end if
  1567. ! get types and I/J start
  1568. CALL GET_HALO_TYPE( DistGrid, sz, halo, my_real, srtype, ijsr, status )
  1569. IF_NOTOK_RETURN(status=1)
  1570. ! Communicate
  1571. tag_snd = (/1,2,3,4/)
  1572. tag_rcv = (/3,4,1,2/)
  1573. neigh : do k=1,4
  1574. call MPI_ISEND( dist_array( ijsr(k,1), ijsr(k,2)), 1, srtype(k), DistGrid%neighbors(k), tag_snd(k), localComm, req(k), ierr)
  1575. call MPI_IRECV( dist_array( ijsr(k,3), ijsr(k,4)), 1, srtype(k), DistGrid%neighbors(k), &
  1576. tag_rcv(k), localComm, req(k+4), ierr)
  1577. end do neigh
  1578. call MPI_WAITALL(8, req, stat, ierr)
  1579. IF_NOTOK_MPI(status=1)
  1580. call FREE_DERIVED_TYPE( srtype )
  1581. #else
  1582. if ((halo/=0).and.DistGrid%is_periodic) then
  1583. i1 = DistGrid%i_strt
  1584. i2 = DistGrid%i_stop
  1585. j1 = DistGrid%j_strt
  1586. j2 = DistGrid%j_stop
  1587. dist_array(i1-halo:i1-1,j1:j2) = dist_array(i2-halo+1:i2,j1:j2)
  1588. dist_array(i2+1:i2+halo,j1:j2) = dist_array(i1:i1+halo-1,j1:j2)
  1589. end if
  1590. #endif
  1591. status = 0
  1592. END SUBROUTINE UPDATE_HALO_2D_R
  1593. !EOC
  1594. !--------------------------------------------------------------------------
  1595. ! TM5 !
  1596. !--------------------------------------------------------------------------
  1597. !BOP
  1598. !
  1599. ! !IROUTINE: UPDATE_HALO_2D_I
  1600. !
  1601. ! !DESCRIPTION: update halo cells of a distributed 2D integer array
  1602. !\\
  1603. !\\
  1604. ! !INTERFACE:
  1605. !
  1606. SUBROUTINE UPDATE_HALO_2D_I( DistGrid, dist_array, halo, status )
  1607. !
  1608. ! !INPUT PARAMETERS:
  1609. !
  1610. type(dist_grid), intent(in) :: DistGrid
  1611. integer, intent(in) :: halo
  1612. integer, intent(inout) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  1613. !
  1614. ! !OUTPUT PARAMETERS:
  1615. !
  1616. integer, intent(out) :: status
  1617. !
  1618. ! !REVISION HISTORY:
  1619. ! 01 Nov 2011 - P. Le Sager - v0
  1620. !
  1621. ! !REMARKS:
  1622. ! (1) not tested yet, but the version for 'real' has been...
  1623. !
  1624. !EOP
  1625. !------------------------------------------------------------------------
  1626. !BOC
  1627. character(len=*), parameter :: rname = mname//'update_halo_2D_i'
  1628. integer :: i1, i2, j1, j2
  1629. #ifdef MPI
  1630. integer :: stat(MPI_STATUS_SIZE,8), req(8), k, sz(2)
  1631. integer :: srtype(4), ijsr(4,4), tag_snd(4), tag_rcv(4)
  1632. ! check input
  1633. if ( halo == 0 ) return
  1634. sz = shape(dist_array)
  1635. if(okdebug)then
  1636. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status )
  1637. IF_NOTOK_RETURN(status=1)
  1638. end if
  1639. ! get types and I/J start
  1640. CALL GET_HALO_TYPE( distGrid, sz, halo, MPI_INTEGER, srtype, ijsr, status )
  1641. IF_NOTOK_RETURN(status=1)
  1642. ! Communicate
  1643. tag_snd = (/1,2,3,4/)
  1644. tag_rcv = (/3,4,1,2/)
  1645. neigh : do k=1,4
  1646. call MPI_ISEND( dist_array( ijsr(k,1), ijsr(k,2)), 1, srtype(k), DistGrid%neighbors(k), tag_snd(k), localComm, req(k), ierr)
  1647. call MPI_IRECV( dist_array( ijsr(k,3), ijsr(k,4)), 1, srtype(k), DistGrid%neighbors(k), &
  1648. tag_rcv(k), localComm, req(k+4), ierr)
  1649. end do neigh
  1650. call MPI_WAITALL(8, req, stat, ierr)
  1651. call FREE_DERIVED_TYPE( srtype )
  1652. #else
  1653. if ((halo/=0).and.DistGrid%is_periodic) then
  1654. i1 = DistGrid%i_strt
  1655. i2 = DistGrid%i_stop
  1656. j1 = DistGrid%j_strt
  1657. j2 = DistGrid%j_stop
  1658. dist_array(i1-halo:i1-1,j1:j2) = dist_array(i2-halo+1:i2,j1:j2)
  1659. dist_array(i2+1:i2+halo,j1:j2) = dist_array(i1:i1+halo-1,j1:j2)
  1660. end if
  1661. #endif
  1662. status = 0
  1663. END SUBROUTINE UPDATE_HALO_2D_I
  1664. !EOC
  1665. !--------------------------------------------------------------------------
  1666. ! TM5 !
  1667. !--------------------------------------------------------------------------
  1668. !BOP
  1669. !
  1670. ! !IROUTINE: UPDATE_HALO_3D_R
  1671. !
  1672. ! !DESCRIPTION: update halo cells of a distributed 3D real array
  1673. !\\
  1674. !\\
  1675. ! !INTERFACE:
  1676. !
  1677. SUBROUTINE UPDATE_HALO_3D_R( DistGrid, dist_array, halo, status )
  1678. !
  1679. ! !INPUT PARAMETERS:
  1680. !
  1681. type(dist_grid), intent(in) :: DistGrid
  1682. integer, intent(in) :: halo
  1683. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:)
  1684. !
  1685. ! !OUTPUT PARAMETERS:
  1686. !
  1687. integer, intent(out) :: status
  1688. !
  1689. ! !REVISION HISTORY:
  1690. ! 01 Nov 2011 - P. Le Sager - v0
  1691. !
  1692. !EOP
  1693. !------------------------------------------------------------------------
  1694. !BOC
  1695. character(len=*), parameter :: rname = mname//'update_halo_3d_r'
  1696. integer :: i1, i2, j1, j2
  1697. #ifdef MPI
  1698. integer :: stat(MPI_STATUS_SIZE,8), req(8), k, sz(3)
  1699. integer :: srtype(4), ijsr(4,4), tag_snd(4), tag_rcv(4)
  1700. ! check input
  1701. if ( halo == 0 ) return
  1702. sz = shape(dist_array)
  1703. if(okdebug)then
  1704. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status )
  1705. IF_NOTOK_RETURN(status=1)
  1706. end if
  1707. ! get types and I/J start
  1708. CALL GET_HALO_TYPE( DistGrid, sz, halo, my_real, srtype, ijsr, status )
  1709. IF_NOTOK_RETURN(status=1)
  1710. ! Communicate
  1711. tag_snd = (/1,2,3,4/)
  1712. tag_rcv = (/3,4,1,2/)
  1713. neigh : do k=1,4
  1714. call MPI_ISEND( dist_array( ijsr(k,1), ijsr(k,2), 1), 1, srtype(k), DistGrid%neighbors(k), tag_snd(k), localComm, &
  1715. req(k), ierr)
  1716. call MPI_IRECV( dist_array( ijsr(k,3), ijsr(k,4), 1), 1, srtype(k), DistGrid%neighbors(k), tag_rcv(k), localComm, &
  1717. req(k+4), ierr)
  1718. end do neigh
  1719. call MPI_WAITALL(8, req, stat, ierr)
  1720. IF_NOTOK_MPI(status=1)
  1721. call FREE_DERIVED_TYPE( srtype )
  1722. #else
  1723. if ((halo/=0).and.DistGrid%is_periodic) then
  1724. i1 = DistGrid%i_strt
  1725. i2 = DistGrid%i_stop
  1726. j1 = DistGrid%j_strt
  1727. j2 = DistGrid%j_stop
  1728. dist_array(i1-halo:i1-1,j1:j2,:) = dist_array(i2-halo+1:i2,j1:j2,:)
  1729. dist_array(i2+1:i2+halo,j1:j2,:) = dist_array(i1:i1+halo-1,j1:j2,:)
  1730. end if
  1731. #endif
  1732. status = 0
  1733. END SUBROUTINE UPDATE_HALO_3D_R
  1734. !EOC
  1735. !--------------------------------------------------------------------------
  1736. ! TM5 !
  1737. !--------------------------------------------------------------------------
  1738. !BOP
  1739. !
  1740. ! !IROUTINE: UPDATE_HALO_4D_R
  1741. !
  1742. ! !DESCRIPTION: update halo cells of a distributed 4D real array
  1743. !\\
  1744. !\\
  1745. ! !INTERFACE:
  1746. !
  1747. SUBROUTINE UPDATE_HALO_4D_R( DistGrid, dist_array, halo, status )
  1748. !
  1749. ! !INPUT PARAMETERS:
  1750. !
  1751. type(dist_grid), intent(in) :: DistGrid
  1752. integer, intent(in) :: halo
  1753. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:)
  1754. !
  1755. ! !OUTPUT PARAMETERS:
  1756. !
  1757. integer, intent(out) :: status
  1758. !
  1759. ! !REVISION HISTORY:
  1760. ! 01 Nov 2011 - P. Le Sager - v0
  1761. !
  1762. !EOP
  1763. !------------------------------------------------------------------------
  1764. !BOC
  1765. character(len=*), parameter :: rname = mname//'update_halo_4D_r'
  1766. integer :: i1, i2, j1, j2
  1767. #ifdef MPI
  1768. integer :: stat(MPI_STATUS_SIZE,8), req(8), k, sz(4)
  1769. integer :: srtype(4), ijsr(4,4), tag_snd(4), tag_rcv(4)
  1770. ! check input
  1771. if ( halo == 0 ) return
  1772. sz = shape(dist_array)
  1773. if(okdebug)then
  1774. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status )
  1775. IF_NOTOK_RETURN(status=1)
  1776. end if
  1777. ! get types and I/J start
  1778. CALL GET_HALO_TYPE( DistGrid, sz, halo, my_real, srtype, ijsr, status )
  1779. IF_NOTOK_RETURN(status=1)
  1780. ! Communicate
  1781. tag_snd = (/1,2,3,4/)
  1782. tag_rcv = (/3,4,1,2/)
  1783. neigh : do k=1,4
  1784. call MPI_ISEND( dist_array( ijsr(k,1), ijsr(k,2), 1, 1), 1, srtype(k), DistGrid%neighbors(k), tag_snd(k), &
  1785. localComm, req(k), ierr)
  1786. call MPI_IRECV( dist_array( ijsr(k,3), ijsr(k,4), 1, 1), 1, srtype(k), DistGrid%neighbors(k), tag_rcv(k), &
  1787. localComm, req(k+4), ierr)
  1788. end do neigh
  1789. call MPI_WAITALL(8, req, stat, ierr)
  1790. IF_NOTOK_MPI(status=1)
  1791. call FREE_DERIVED_TYPE( srtype )
  1792. #else
  1793. if ((halo/=0).and.DistGrid%is_periodic) then
  1794. i1 = DistGrid%i_strt
  1795. i2 = DistGrid%i_stop
  1796. j1 = DistGrid%j_strt
  1797. j2 = DistGrid%j_stop
  1798. dist_array(i1-halo:i1-1,j1:j2,:,:) = dist_array(i2-halo+1:i2,j1:j2,:,:)
  1799. dist_array(i2+1:i2+halo,j1:j2,:,:) = dist_array(i1:i1+halo-1,j1:j2,:,:)
  1800. end if
  1801. #endif
  1802. status = 0
  1803. END SUBROUTINE UPDATE_HALO_4D_R
  1804. !EOC
  1805. !--------------------------------------------------------------------------
  1806. ! TM5 !
  1807. !--------------------------------------------------------------------------
  1808. !BOP
  1809. !
  1810. ! !IROUTINE: GATHER_2D_R
  1811. !
  1812. ! !DESCRIPTION: gather local 2D arrays into a global 2D array
  1813. !\\
  1814. !\\
  1815. ! !INTERFACE:
  1816. !
  1817. SUBROUTINE GATHER_2D_R( DistGrid, dist_array, glbl_array, halo, status )
  1818. !
  1819. ! !INPUT PARAMETERS:
  1820. !
  1821. type(dist_grid), intent(in) :: DistGrid
  1822. integer, intent(in) :: halo
  1823. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  1824. !
  1825. ! !OUTPUT PARAMETERS:
  1826. !
  1827. real, intent(out) :: glbl_array(:,:)
  1828. integer, intent(out) :: status
  1829. !
  1830. ! !REVISION HISTORY:
  1831. ! 01 Nov 2011 - P. Le Sager - v0
  1832. !
  1833. ! !REMARKS:
  1834. ! (1) I have not use mpi_gatherv because of varying *receiving* size
  1835. !
  1836. !EOP
  1837. !------------------------------------------------------------------------
  1838. !BOC
  1839. character(len=*), parameter :: rname = mname//'gather_2d_r'
  1840. #ifndef MPI
  1841. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop )
  1842. status = 0
  1843. #else
  1844. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  1845. integer :: i, j, klm, i0, j0, i1, j1, sz(2)
  1846. status=0
  1847. ! check input, get derived types
  1848. !-------------------------------
  1849. sz = shape(dist_array)
  1850. if(okdebug)then
  1851. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  1852. IF_NOTOK_RETURN(status=1)
  1853. end if
  1854. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  1855. IF_NOTOK_RETURN(status=1)
  1856. i0 = DistGrid%i_strt
  1857. j0 = DistGrid%j_strt
  1858. ! ------- GATHER -------------
  1859. if ( isRoot ) then
  1860. ! get first chunk locally
  1861. i1 = DistGrid%i_stop
  1862. j1 = DistGrid%j_stop
  1863. glbl_array(i0:i1,j0:j1) = dist_array(i0:i1,j0:j1)
  1864. ! get remaining chunks from other pes
  1865. do klm=1,npes-1
  1866. i = DistGrid%bounds(1,klm)
  1867. j = DistGrid%bounds(3,klm)
  1868. call MPI_RECV( glbl_array(i,j), 1, ginterior(klm), klm, 1, &
  1869. localComm, stat, ierr)
  1870. end do
  1871. call FREE_DERIVED_TYPE( ginterior )
  1872. else
  1873. call MPI_SEND( dist_array(i0,j0), 1, linterior, root, 1, localComm, ierr)
  1874. call MPI_TYPE_FREE(linterior, ierr)
  1875. end if
  1876. #endif
  1877. END SUBROUTINE GATHER_2D_R
  1878. !EOC
  1879. !--------------------------------------------------------------------------
  1880. ! TM5 !
  1881. !--------------------------------------------------------------------------
  1882. !BOP
  1883. !
  1884. ! !IROUTINE: GATHER_2D_I
  1885. !
  1886. ! !DESCRIPTION: gather local 2D arrays into a global 2D array (integer version)
  1887. !\\
  1888. !\\
  1889. ! !INTERFACE:
  1890. !
  1891. SUBROUTINE GATHER_2D_I( DistGrid, dist_array, glbl_array, halo, status )
  1892. !
  1893. ! !INPUT PARAMETERS:
  1894. !
  1895. type(dist_grid), intent(in) :: DistGrid
  1896. integer, intent(in) :: halo
  1897. integer, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  1898. !
  1899. ! !OUTPUT PARAMETERS:
  1900. !
  1901. integer, intent(out) :: glbl_array(:,:)
  1902. integer, intent(out) :: status
  1903. !
  1904. ! !REVISION HISTORY:
  1905. ! 01 Nov 2011 - P. Le Sager - v0
  1906. !
  1907. !EOP
  1908. !------------------------------------------------------------------------
  1909. !BOC
  1910. character(len=*), parameter :: rname = mname//'gather_2d_i'
  1911. #ifndef MPI
  1912. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop )
  1913. status = 0
  1914. #else
  1915. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  1916. integer :: i, j, klm, i0, j0, i1, j1, sz(2)
  1917. status=0
  1918. ! check input, get derived types
  1919. !-------------------------------
  1920. sz = shape(dist_array)
  1921. if(okdebug)then
  1922. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  1923. IF_NOTOK_RETURN(status=1)
  1924. end if
  1925. CALL GET_INTERIOR_TYPE( DistGrid, sz, MPI_INTEGER, linterior, ginterior, status )
  1926. IF_NOTOK_RETURN(status=1)
  1927. i0 = DistGrid%i_strt
  1928. j0 = DistGrid%j_strt
  1929. ! ------- GATHER -------------
  1930. if ( isRoot ) then
  1931. ! get first chunk locally
  1932. i1 = DistGrid%i_stop
  1933. j1 = DistGrid%j_stop
  1934. glbl_array(i0:i1,j0:j1) = dist_array(i0:i1,j0:j1)
  1935. ! get remaining chunks from other pes
  1936. do klm=1,npes-1
  1937. i = DistGrid%bounds(1,klm)
  1938. j = DistGrid%bounds(3,klm)
  1939. call MPI_RECV( glbl_array(i,j), 1, ginterior(klm), klm, 1, &
  1940. localComm, stat, ierr)
  1941. end do
  1942. call FREE_DERIVED_TYPE( ginterior )
  1943. else
  1944. call MPI_SEND( dist_array(i0,j0), 1, linterior, root, 1, localComm, ierr)
  1945. call MPI_TYPE_FREE(linterior, ierr)
  1946. end if
  1947. #endif
  1948. END SUBROUTINE GATHER_2D_I
  1949. !EOC
  1950. !--------------------------------------------------------------------------
  1951. ! TM5 !
  1952. !--------------------------------------------------------------------------
  1953. !BOP
  1954. !
  1955. ! !IROUTINE: GATHER_3D_R
  1956. !
  1957. ! !DESCRIPTION: gather local 3D arrays into a global 3D array
  1958. !\\
  1959. !\\
  1960. ! !INTERFACE:
  1961. !
  1962. SUBROUTINE GATHER_3D_R( DistGrid, dist_array, glbl_array, halo, status )
  1963. !
  1964. ! !INPUT PARAMETERS:
  1965. !
  1966. type(dist_grid), intent(in) :: DistGrid
  1967. integer, intent(in) :: halo
  1968. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:)
  1969. !
  1970. ! !OUTPUT PARAMETERS:
  1971. !
  1972. real, intent(out) :: glbl_array(:,:,:)
  1973. integer, intent(out) :: status
  1974. !
  1975. ! !REVISION HISTORY:
  1976. ! 01 Nov 2011 - P. Le Sager - v0
  1977. !
  1978. !EOP
  1979. !------------------------------------------------------------------------
  1980. !BOC
  1981. character(len=*), parameter :: rname = mname//'gather_3d_r'
  1982. #ifndef MPI
  1983. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop, : )
  1984. status = 0
  1985. #else
  1986. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  1987. integer :: i, j, klm, i0, j0, i1, j1, sz(3)
  1988. status=0
  1989. ! ------- Check input & get derived types
  1990. sz = shape(dist_array)
  1991. if(okdebug)then
  1992. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  1993. IF_NOTOK_RETURN(status=1)
  1994. end if
  1995. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  1996. IF_NOTOK_RETURN(status=1)
  1997. i0 = DistGrid%i_strt
  1998. j0 = DistGrid%j_strt
  1999. ! ------- GATHER -------------
  2000. if ( isRoot ) then
  2001. ! get first chunk locally
  2002. i1 = DistGrid%i_stop
  2003. j1 = DistGrid%j_stop
  2004. glbl_array(i0:i1,j0:j1,:) = dist_array(i0:i1,j0:j1,:)
  2005. ! get remaining chunks from other pes
  2006. do klm=1,npes-1
  2007. i = DistGrid%bounds(1,klm)
  2008. j = DistGrid%bounds(3,klm)
  2009. call MPI_RECV( glbl_array(i,j,1), 1, ginterior(klm), klm, 1, &
  2010. localComm, stat, ierr)
  2011. end do
  2012. call FREE_DERIVED_TYPE( ginterior )
  2013. else
  2014. call MPI_SEND( dist_array(i0,j0,1), 1, linterior, root, 1, localComm, ierr)
  2015. call MPI_TYPE_FREE(linterior, ierr)
  2016. end if
  2017. #endif
  2018. END SUBROUTINE GATHER_3D_R
  2019. !EOC
  2020. !--------------------------------------------------------------------------
  2021. ! TM5 !
  2022. !--------------------------------------------------------------------------
  2023. !BOP
  2024. !
  2025. ! !IROUTINE: GATHER_4D_R
  2026. !
  2027. ! !DESCRIPTION: gather local 4D arrays into a global 4D array
  2028. !\\
  2029. !\\
  2030. ! !INTERFACE:
  2031. !
  2032. SUBROUTINE GATHER_4D_R( DistGrid, dist_array, glbl_array, halo, status )
  2033. !
  2034. ! !INPUT PARAMETERS:
  2035. !
  2036. type(dist_grid), intent(in) :: DistGrid
  2037. integer, intent(in) :: halo
  2038. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:)
  2039. !
  2040. ! !OUTPUT PARAMETERS:
  2041. !
  2042. real, intent(out) :: glbl_array(:,:,:,:)
  2043. integer, intent(out) :: status
  2044. !
  2045. ! !REVISION HISTORY:
  2046. ! 01 Nov 2011 - P. Le Sager - v0
  2047. !
  2048. ! !REMARKS:
  2049. ! (1) global array has to really be global on root only
  2050. !
  2051. !EOP
  2052. !------------------------------------------------------------------------
  2053. !BOC
  2054. character(len=*), parameter :: rname = mname//'gather_4d_r'
  2055. #ifndef MPI
  2056. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop, :, :)
  2057. status = 0
  2058. #else
  2059. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2060. integer :: i, j, klm, i0, j0, i1, j1, sz(4)
  2061. status=0
  2062. ! ------- Check input & get derived types
  2063. sz = shape(dist_array)
  2064. if(okdebug)then
  2065. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2066. IF_NOTOK_RETURN(status=1)
  2067. end if
  2068. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2069. IF_NOTOK_RETURN(status=1)
  2070. i0 = DistGrid%i_strt
  2071. j0 = DistGrid%j_strt
  2072. ! ------- GATHER -------------
  2073. if ( isRoot ) then ! RECV
  2074. ! get first chunk locally
  2075. i1 = DistGrid%i_stop
  2076. j1 = DistGrid%j_stop
  2077. glbl_array(i0:i1,j0:j1,:,:) = dist_array(i0:i1,j0:j1,:,:)
  2078. ! send remaining chunks to other pes
  2079. do klm=1,npes-1
  2080. i = DistGrid%bounds(1,klm)
  2081. j = DistGrid%bounds(3,klm)
  2082. call MPI_RECV( glbl_array(i,j,1,1), 1, ginterior(klm), klm, 1, &
  2083. localComm, stat, ierr)
  2084. end do
  2085. call FREE_DERIVED_TYPE( ginterior )
  2086. else !SEND
  2087. call MPI_SEND( dist_array(i0,j0,1,1), 1, linterior, root, 1, localComm, ierr)
  2088. call MPI_TYPE_FREE(linterior, ierr)
  2089. end if
  2090. #endif
  2091. END SUBROUTINE GATHER_4D_R
  2092. !EOC
  2093. !--------------------------------------------------------------------------
  2094. ! TM5 !
  2095. !--------------------------------------------------------------------------
  2096. !BOP
  2097. !
  2098. ! !IROUTINE: GATHER_5D_R
  2099. !
  2100. ! !DESCRIPTION: gather local 5D arrays into a global 5D array
  2101. !\\
  2102. !\\
  2103. ! !INTERFACE:
  2104. !
  2105. SUBROUTINE GATHER_5D_R( DistGrid, dist_array, glbl_array, halo, status )
  2106. !
  2107. ! !INPUT PARAMETERS:
  2108. !
  2109. type(dist_grid), intent(in) :: DistGrid
  2110. integer, intent(in) :: halo
  2111. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:,:)
  2112. !
  2113. ! !OUTPUT PARAMETERS:
  2114. !
  2115. real, intent(out) :: glbl_array(:,:,:,:,:)
  2116. integer, intent(out) :: status
  2117. !
  2118. ! !REVISION HISTORY:
  2119. ! 01 Nov 2011 - P. Le Sager - v0
  2120. !
  2121. ! !REMARKS:
  2122. ! (1) global array has to really be global on root only
  2123. !
  2124. !EOP
  2125. !------------------------------------------------------------------------
  2126. !BOC
  2127. character(len=*), parameter :: rname = mname//'gather_5d_r'
  2128. #ifndef MPI
  2129. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop, :,:,:)
  2130. status = 0
  2131. #else
  2132. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2133. integer :: i, j, klm, i0, j0, i1, j1, sz(5)
  2134. status=0
  2135. ! ------- Check input & get derived types
  2136. sz = shape(dist_array)
  2137. if(okdebug)then
  2138. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2139. IF_NOTOK_RETURN(status=1)
  2140. end if
  2141. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2142. IF_NOTOK_RETURN(status=1)
  2143. i0 = DistGrid%i_strt
  2144. j0 = DistGrid%j_strt
  2145. ! ------- GATHER -------------
  2146. if ( isRoot ) then ! RECV
  2147. ! get first chunk locally
  2148. i1 = DistGrid%i_stop
  2149. j1 = DistGrid%j_stop
  2150. glbl_array(i0:i1,j0:j1,:,:,:) = dist_array(i0:i1,j0:j1,:,:,:)
  2151. ! send remaining chunks to other pes
  2152. do klm=1,npes-1
  2153. i = DistGrid%bounds(1,klm)
  2154. j = DistGrid%bounds(3,klm)
  2155. call MPI_RECV( glbl_array(i,j,1,1,1), 1, ginterior(klm), klm, 1, &
  2156. localComm, stat, ierr)
  2157. end do
  2158. call FREE_DERIVED_TYPE( ginterior )
  2159. else !SEND
  2160. call MPI_SEND( dist_array(i0,j0,1,1,1), 1, linterior, root, 1, localComm, ierr)
  2161. call MPI_TYPE_FREE(linterior, ierr)
  2162. end if
  2163. #endif
  2164. END SUBROUTINE GATHER_5D_R
  2165. !EOC
  2166. !--------------------------------------------------------------------------
  2167. ! TM5 !
  2168. !--------------------------------------------------------------------------
  2169. !BOP
  2170. !
  2171. ! !IROUTINE: SCATTER_2D_R
  2172. !
  2173. ! !DESCRIPTION: scatter a 2D global real array
  2174. !\\
  2175. !\\
  2176. ! !INTERFACE:
  2177. !
  2178. SUBROUTINE SCATTER_2D_R( DistGrid, dist_array, glbl_array, halo, status )
  2179. !
  2180. ! !INPUT PARAMETERS:
  2181. !
  2182. type(dist_grid), intent(in) :: DistGrid
  2183. real, intent(in) :: glbl_array(:,:)
  2184. integer, intent(in) :: halo
  2185. !
  2186. ! !OUTPUT PARAMETERS:
  2187. !
  2188. real, intent(out) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  2189. integer, intent(out) :: status
  2190. !
  2191. ! !REVISION HISTORY:
  2192. ! 01 Nov 2011 - P. Le Sager - v0
  2193. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2194. !
  2195. ! !REMARKS: exactly the same as GATHER_2D_R, but inverting send/recv
  2196. !
  2197. !EOP
  2198. !------------------------------------------------------------------------
  2199. !BOC
  2200. character(len=*), parameter :: rname = mname//'scatter_2d_r'
  2201. #ifndef MPI
  2202. dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop ) = glbl_array
  2203. status = 0
  2204. #else
  2205. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2206. integer :: i, j, klm, i0, j0, i1, j1, sz(2)
  2207. status=0
  2208. ! ------- Check input & get derived types
  2209. sz = shape(dist_array)
  2210. if(okdebug)then
  2211. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2212. IF_NOTOK_RETURN(status=1)
  2213. end if
  2214. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2215. IF_NOTOK_RETURN(status=1)
  2216. i0 = DistGrid%i_strt
  2217. j0 = DistGrid%j_strt
  2218. ! ------- SCATTER -------------
  2219. if ( isRoot ) then ! send
  2220. ! get first chunk locally
  2221. i1 = DistGrid%i_stop
  2222. j1 = DistGrid%j_stop
  2223. dist_array(i0:i1,j0:j1) = glbl_array(i0:i1,j0:j1)
  2224. ! send remaining chunks to other pes
  2225. do klm=1,npes-1
  2226. i = DistGrid%bounds(1,klm)
  2227. j = DistGrid%bounds(3,klm)
  2228. call MPI_SSEND( glbl_array(i,j), 1, ginterior(klm), klm, klm, localComm, ierr)
  2229. IF_NOTOK_MPI(status=1)
  2230. end do
  2231. call FREE_DERIVED_TYPE( ginterior )
  2232. else
  2233. call MPI_COMM_RANK(localComm, klm, ierr)
  2234. IF_NOTOK_MPI(status=1)
  2235. call MPI_RECV( dist_array(i0,j0), 1, linterior, root, klm, localComm, stat, ierr)
  2236. IF_NOTOK_MPI(status=1)
  2237. call MPI_TYPE_FREE(linterior, ierr)
  2238. IF_NOTOK_MPI(status=1)
  2239. end if
  2240. #endif
  2241. END SUBROUTINE SCATTER_2D_R
  2242. !EOC
  2243. !--------------------------------------------------------------------------
  2244. ! TM5 !
  2245. !--------------------------------------------------------------------------
  2246. !BOP
  2247. !
  2248. ! !IROUTINE: SCATTER_3D_R
  2249. !
  2250. ! !DESCRIPTION: scatter 3D global real array
  2251. !\\
  2252. !\\
  2253. ! !INTERFACE:
  2254. !
  2255. SUBROUTINE SCATTER_3D_R( DistGrid, dist_array, glbl_array, halo, status )
  2256. !
  2257. ! !INPUT PARAMETERS:
  2258. !
  2259. type(dist_grid), intent(in) :: DistGrid
  2260. integer, intent(in) :: halo
  2261. real, intent(in) :: glbl_array(:,:,:)
  2262. !
  2263. ! !OUTPUT PARAMETERS:
  2264. !
  2265. real, intent(out) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:)
  2266. integer, intent(out) :: status
  2267. !
  2268. ! !REVISION HISTORY:
  2269. ! 01 Nov 2011 - P. Le Sager - v0
  2270. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2271. !
  2272. ! !REMARKS: global array has to really be global on root only
  2273. !
  2274. !EOP
  2275. !------------------------------------------------------------------------
  2276. !BOC
  2277. character(len=*), parameter :: rname = mname//'scatter_3d_r'
  2278. #ifndef MPI
  2279. dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop, :) = glbl_array
  2280. status = 0
  2281. #else
  2282. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2283. integer :: i, j, klm, i0, j0, i1, j1, sz(3)
  2284. status=0
  2285. ! ------- Check input & get derived types
  2286. sz = shape(dist_array)
  2287. if(okdebug)then
  2288. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2289. IF_NOTOK_RETURN(status=1)
  2290. end if
  2291. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2292. IF_NOTOK_RETURN(status=1)
  2293. i0 = DistGrid%i_strt
  2294. j0 = DistGrid%j_strt
  2295. ! ------- SCATTER -------------
  2296. if ( isRoot ) then ! send
  2297. ! get first chunk locally
  2298. i1 = DistGrid%i_stop
  2299. j1 = DistGrid%j_stop
  2300. dist_array(i0:i1,j0:j1,:) = glbl_array(i0:i1,j0:j1,:)
  2301. ! send remaining chunks to other pes
  2302. do klm=1,npes-1
  2303. i = DistGrid%bounds(1,klm)
  2304. j = DistGrid%bounds(3,klm)
  2305. call MPI_SSEND( glbl_array(i,j,1), 1, ginterior(klm), klm, 1, localComm, ierr)
  2306. end do
  2307. call FREE_DERIVED_TYPE( ginterior )
  2308. else
  2309. call MPI_RECV( dist_array(i0,j0,1), 1, linterior, root, 1, localComm, stat, ierr)
  2310. call MPI_TYPE_FREE(linterior, ierr)
  2311. end if
  2312. #endif
  2313. END SUBROUTINE SCATTER_3D_R
  2314. !EOC
  2315. !--------------------------------------------------------------------------
  2316. ! TM5 !
  2317. !--------------------------------------------------------------------------
  2318. !BOP
  2319. !
  2320. ! !IROUTINE: SCATTER_4D_R
  2321. !
  2322. ! !DESCRIPTION: scatter 4D real array
  2323. !\\
  2324. !\\
  2325. ! !INTERFACE:
  2326. !
  2327. SUBROUTINE SCATTER_4D_R( DistGrid, dist_array, glbl_array, halo, status )
  2328. !
  2329. ! !INPUT PARAMETERS:
  2330. !
  2331. type(dist_grid), intent(in) :: DistGrid
  2332. integer, intent(in) :: halo
  2333. real, intent(in) :: glbl_array(:,:,:,:)
  2334. !
  2335. ! !OUTPUT PARAMETERS:
  2336. !
  2337. real, intent(out) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:)
  2338. integer, intent(out) :: status
  2339. !
  2340. ! !REVISION HISTORY:
  2341. ! 01 Nov 2011 - P. Le Sager - v0
  2342. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2343. !
  2344. !EOP
  2345. !------------------------------------------------------------------------
  2346. !BOC
  2347. character(len=*), parameter :: rname = mname//'scatter_4d_r'
  2348. #ifndef MPI
  2349. dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop,:,:) = glbl_array
  2350. status = 0
  2351. #else
  2352. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2353. integer :: i, j, klm, i0, j0, i1, j1, sz(4)
  2354. status=0
  2355. ! ------- Check input & get derived types
  2356. sz = shape(dist_array)
  2357. if(okdebug)then
  2358. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2359. IF_NOTOK_RETURN(status=1)
  2360. end if
  2361. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2362. IF_NOTOK_RETURN(status=1)
  2363. i0 = DistGrid%i_strt
  2364. j0 = DistGrid%j_strt
  2365. ! ------- SCATTER -------------
  2366. if ( isRoot ) then ! send
  2367. ! get first chunk locally
  2368. i1 = DistGrid%i_stop
  2369. j1 = DistGrid%j_stop
  2370. dist_array(i0:i1,j0:j1,:,:) = glbl_array(i0:i1,j0:j1,:,:)
  2371. ! send remaining chunks to other pes
  2372. do klm=1,npes-1
  2373. i = DistGrid%bounds(1,klm)
  2374. j = DistGrid%bounds(3,klm)
  2375. call MPI_SSEND( glbl_array(i,j,1,1), 1, ginterior(klm), klm, 1, localComm, ierr)
  2376. end do
  2377. call FREE_DERIVED_TYPE( ginterior )
  2378. else
  2379. call MPI_RECV( dist_array(i0,j0,1,1), 1, linterior, root, 1, localComm, stat, ierr)
  2380. call MPI_TYPE_FREE(linterior, ierr)
  2381. end if
  2382. #endif
  2383. END SUBROUTINE SCATTER_4D_R
  2384. !EOC
  2385. !--------------------------------------------------------------------------
  2386. ! TM5 !
  2387. !--------------------------------------------------------------------------
  2388. !BOP
  2389. !
  2390. ! !IROUTINE: SCATTER_IBAND_1D_R
  2391. !
  2392. ! !DESCRIPTION: scatter a meridional real vector (1D) from root
  2393. !\\
  2394. !\\
  2395. ! !INTERFACE:
  2396. !
  2397. SUBROUTINE SCATTER_IBAND_1D_R( DistGrid, dist_array, glbl_array, status, iref )
  2398. !
  2399. ! !INPUT PARAMETERS:
  2400. !
  2401. type(dist_grid), intent(in) :: DistGrid
  2402. real, intent(in) :: glbl_array(:)
  2403. integer, optional, intent(in) :: iref ! to find targets, default=1
  2404. !
  2405. ! !OUTPUT PARAMETERS:
  2406. !
  2407. real, intent(out) :: dist_array(DistGrid%j_strt:)
  2408. integer, intent(out) :: status
  2409. !
  2410. ! !REVISION HISTORY:
  2411. ! 01 Nov 2011 - P. Le Sager - v0
  2412. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2413. !
  2414. ! !REMARKS: 1D version, along J index, of scatter_2d_r
  2415. !
  2416. !EOP
  2417. !------------------------------------------------------------------------
  2418. !BOC
  2419. character(len=*), parameter :: rname = mname//'scatter_iband_1d_r'
  2420. #ifndef MPI
  2421. dist_array( DistGrid%j_strt:DistGrid%j_stop ) = glbl_array
  2422. status = 0
  2423. #else
  2424. integer :: stat(MPI_STATUS_SIZE)
  2425. integer :: x_iref, n, klm, i0, j0, i1, j1, sz(1), i0t, j0t, i1t, j1t
  2426. status=0
  2427. ! ------- Check inputs
  2428. x_iref=1
  2429. if(present(iref)) x_iref=iref
  2430. sz = shape(dist_array)
  2431. i0 = DistGrid%i_strt
  2432. i1 = DistGrid%i_stop
  2433. j0 = DistGrid%j_strt
  2434. j1 = DistGrid%j_stop
  2435. ! ------- SEND/RECV -------------
  2436. if(okdebug)then
  2437. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, shape(glbl_array), iband=.true.)
  2438. IF_NOTOK_RETURN(status=1)
  2439. end if
  2440. if ( isRoot ) then
  2441. ! local case
  2442. if((x_iref>=i0).and.(x_iref<=i1)) dist_array(j0:j1) = glbl_array(j0:j1)
  2443. ! send remaining chunks to other pes
  2444. do klm=1,npes-1
  2445. i0t = DistGrid%bounds(1,klm)
  2446. i1t = DistGrid%bounds(2,klm)
  2447. j0t = DistGrid%bounds(3,klm)
  2448. j1t = DistGrid%bounds(4,klm)
  2449. ! is klm a target processor?
  2450. if((x_iref<i0t).or.(x_iref>i1t))cycle
  2451. n=j1t-j0t+1
  2452. call MPI_SSEND( glbl_array(j0t), n, my_real, klm, 1, localComm, ierr)
  2453. end do
  2454. else
  2455. ! are we on a target processor?
  2456. if((x_iref<i0).or.(x_iref>i1))return
  2457. n=j1-j0+1
  2458. call MPI_RECV( dist_array(j0), n, my_real, root, 1, localComm, stat, ierr)
  2459. end if
  2460. #endif
  2461. END SUBROUTINE SCATTER_IBAND_1D_R
  2462. !EOC
  2463. !--------------------------------------------------------------------------
  2464. ! TM5 !
  2465. !--------------------------------------------------------------------------
  2466. !BOP
  2467. !
  2468. ! !IROUTINE: SCATTER_JBAND_1D_R
  2469. !
  2470. ! !DESCRIPTION: scatter a zonal real vector (1D) from root
  2471. !\\
  2472. !\\
  2473. ! !INTERFACE:
  2474. !
  2475. SUBROUTINE SCATTER_JBAND_1D_R( DistGrid, dist_array, glbl_array, status, jref )
  2476. !
  2477. ! !INPUT PARAMETERS:
  2478. !
  2479. type(dist_grid), intent(in) :: DistGrid
  2480. real, intent(in) :: glbl_array(:)
  2481. integer, optional, intent(in) :: jref ! to find targets, default=1
  2482. !
  2483. ! !OUTPUT PARAMETERS:
  2484. !
  2485. real, intent(out) :: dist_array(DistGrid%i_strt:)
  2486. integer, intent(out) :: status
  2487. !
  2488. ! !REVISION HISTORY:
  2489. ! 01 Nov 2011 - P. Le Sager - v0
  2490. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2491. !
  2492. ! !REMARKS: 1D version, along I index, of scatter_2d_r
  2493. !
  2494. !EOP
  2495. !------------------------------------------------------------------------
  2496. !BOC
  2497. character(len=*), parameter :: rname = mname//'scatter_jband_1d_r'
  2498. #ifndef MPI
  2499. dist_array( DistGrid%i_strt:DistGrid%i_stop ) = glbl_array
  2500. status = 0
  2501. #else
  2502. integer :: stat(MPI_STATUS_SIZE)
  2503. integer :: x_jref, n, klm, i0, j0, i1, j1, sz(1), i0t, j0t, i1t, j1t
  2504. status=0
  2505. ! ------- Check inputs
  2506. x_jref=1
  2507. if(present(jref)) x_jref=jref
  2508. sz = shape(dist_array)
  2509. if(okdebug)then
  2510. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, shape(glbl_array), jband=.true.)
  2511. IF_NOTOK_RETURN(status=1)
  2512. end if
  2513. i0 = DistGrid%i_strt
  2514. i1 = DistGrid%i_stop
  2515. j0 = DistGrid%j_strt
  2516. j1 = DistGrid%j_stop
  2517. ! ------- SEND/RECV -------------
  2518. if ( isRoot ) then
  2519. ! local case
  2520. if((x_jref>=j0).and.(x_jref<=j1)) dist_array(i0:i1) = glbl_array(i0:i1)
  2521. ! send remaining chunks to other pes
  2522. do klm=1,npes-1
  2523. i0t = DistGrid%bounds(1,klm)
  2524. i1t = DistGrid%bounds(2,klm)
  2525. j0t = DistGrid%bounds(3,klm)
  2526. j1t = DistGrid%bounds(4,klm)
  2527. ! is klm a target processor?
  2528. if((x_jref<j0t).or.(x_jref>j1t))cycle
  2529. n=i1t-i0t+1
  2530. call MPI_SSEND( glbl_array(i0t), n, my_real, klm, 1, localComm, ierr)
  2531. end do
  2532. else
  2533. ! are we on a target processor?
  2534. if((x_jref<j0).or.(x_jref>j1))return
  2535. n=i1-i0+1
  2536. call MPI_RECV( dist_array(i0), n, my_real, root, 1, localComm, stat, ierr)
  2537. end if
  2538. #endif
  2539. END SUBROUTINE SCATTER_JBAND_1D_R
  2540. !EOC
  2541. !--------------------------------------------------------------------------
  2542. ! TM5 !
  2543. !--------------------------------------------------------------------------
  2544. !BOP
  2545. !
  2546. ! !IROUTINE: SCATTER_JBAND_2D_R
  2547. !
  2548. ! !DESCRIPTION: scatter a zonal slice (2D) from root
  2549. !\\
  2550. !\\
  2551. ! !INTERFACE:
  2552. !
  2553. SUBROUTINE SCATTER_JBAND_2D_R( DistGrid, dist_array, glbl_array, status, jref )
  2554. !
  2555. ! !INPUT PARAMETERS:
  2556. !
  2557. type(dist_grid), intent(in) :: DistGrid
  2558. real, intent(in) :: glbl_array(:,:)
  2559. integer, optional, intent(in) :: jref ! to find targets, default=1
  2560. !
  2561. ! !OUTPUT PARAMETERS:
  2562. !
  2563. real, intent(out) :: dist_array(DistGrid%i_strt:,:)
  2564. integer, intent(out) :: status
  2565. !
  2566. ! !REVISION HISTORY:
  2567. ! 01 Nov 2011 - P. Le Sager - v0
  2568. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2569. !
  2570. ! !REMARKS: 2D version, along I index, of scatter_3d_r
  2571. !
  2572. !EOP
  2573. !------------------------------------------------------------------------
  2574. !BOC
  2575. character(len=*), parameter :: rname = mname//'scatter_jband_2d_r'
  2576. #ifndef MPI
  2577. dist_array( DistGrid%i_strt:DistGrid%i_stop,: ) = glbl_array
  2578. status = 0
  2579. #else
  2580. integer :: stat(MPI_STATUS_SIZE), subarr
  2581. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2582. integer :: x_jref, n, klm, sz(2), gsz(2)
  2583. status=0
  2584. ! ------- Check inputs
  2585. x_jref=1
  2586. if(present(jref)) x_jref=jref
  2587. sz = shape(dist_array)
  2588. gsz = shape(glbl_array)
  2589. if(okdebug)then
  2590. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true.)
  2591. IF_NOTOK_RETURN(status=1)
  2592. end if
  2593. i0 = DistGrid%i_strt
  2594. i1 = DistGrid%i_stop
  2595. j0 = DistGrid%j_strt
  2596. j1 = DistGrid%j_stop
  2597. ! ------- SEND/RECV -------------
  2598. if ( isRoot ) then
  2599. !local case
  2600. if((x_jref>=j0).and.(x_jref<=j1)) dist_array(i0:i1,:) = glbl_array(i0:i1,:)
  2601. ! send remaining chunks to other pes
  2602. do klm=1,npes-1
  2603. i0t = DistGrid%bounds(1,klm)
  2604. i1t = DistGrid%bounds(2,klm)
  2605. j0t = DistGrid%bounds(3,klm)
  2606. j1t = DistGrid%bounds(4,klm)
  2607. ! if klm is a target processor, pack and send
  2608. if((x_jref<j0t).or.(x_jref>j1t))cycle
  2609. n=i1t-i0t+1
  2610. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, subarr, ierr)
  2611. call MPI_TYPE_COMMIT (subarr, ierr)
  2612. call MPI_SSEND( glbl_array(i0t,1), 1, subarr, klm, 1, localComm, ierr)
  2613. call MPI_TYPE_FREE (subarr, ierr)
  2614. end do
  2615. else
  2616. ! are we on a target processor?
  2617. if((x_jref<j0).or.(x_jref>j1))return
  2618. n=i1-i0+1
  2619. call MPI_RECV( dist_array(i0,1), n*sz(2), my_real, root, 1, localComm, stat, ierr)
  2620. end if
  2621. #endif
  2622. END SUBROUTINE SCATTER_JBAND_2D_R
  2623. !EOC
  2624. !--------------------------------------------------------------------------
  2625. ! TM5 !
  2626. !--------------------------------------------------------------------------
  2627. !BOP
  2628. !
  2629. ! !IROUTINE: SCATTER_IBAND_2D_R
  2630. !
  2631. ! !DESCRIPTION: scatter a meridional real array (2D) from root
  2632. !\\
  2633. !\\
  2634. ! !INTERFACE:
  2635. !
  2636. SUBROUTINE SCATTER_IBAND_2D_R( DistGrid, dist_array, glbl_array, status, iref )
  2637. !
  2638. ! !INPUT PARAMETERS:
  2639. !
  2640. type(dist_grid), intent(in) :: DistGrid
  2641. real, intent(in) :: glbl_array(:,:)
  2642. integer, optional, intent(in) :: iref ! to find targets, default=1
  2643. !
  2644. ! !OUTPUT PARAMETERS:
  2645. !
  2646. real, intent(out) :: dist_array(DistGrid%j_strt:,:)
  2647. integer, intent(out) :: status
  2648. !
  2649. ! !REVISION HISTORY:
  2650. ! 01 Nov 2011 - P. Le Sager - v0
  2651. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2652. !
  2653. ! !REMARKS: 2D version, along J index, of scatter_3d_r
  2654. !
  2655. !EOP
  2656. !------------------------------------------------------------------------
  2657. !BOC
  2658. character(len=*), parameter :: rname = mname//'scatter_iband_2d_r'
  2659. #ifndef MPI
  2660. dist_array( DistGrid%j_strt:DistGrid%j_stop,: ) = glbl_array
  2661. status = 0
  2662. #else
  2663. integer :: stat(MPI_STATUS_SIZE), subarr
  2664. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2665. integer :: x_iref, n, klm, sz(2), gsz(2)
  2666. status=0
  2667. ! ------- Check inputs
  2668. x_iref=1
  2669. if(present(iref)) x_iref=iref
  2670. sz = shape(dist_array)
  2671. gsz = shape(glbl_array)
  2672. if(okdebug)then
  2673. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, iband=.true.)
  2674. IF_NOTOK_RETURN(status=1)
  2675. end if
  2676. i0 = DistGrid%i_strt
  2677. i1 = DistGrid%i_stop
  2678. j0 = DistGrid%j_strt
  2679. j1 = DistGrid%j_stop
  2680. ! ------- SEND/RECV -------------
  2681. if ( isRoot ) then
  2682. ! local case
  2683. if((x_iref>=i0).and.(x_iref<=i1)) dist_array(j0:j1,:) = glbl_array(j0:j1,:)
  2684. ! send remaining chunks to other pes
  2685. do klm=1,npes-1
  2686. i0t = DistGrid%bounds(1,klm)
  2687. i1t = DistGrid%bounds(2,klm)
  2688. j0t = DistGrid%bounds(3,klm)
  2689. j1t = DistGrid%bounds(4,klm)
  2690. ! is klm a target processor?
  2691. if((x_iref<i0t).or.(x_iref>i1t))cycle
  2692. n=j1t-j0t+1
  2693. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, subarr, ierr)
  2694. call MPI_TYPE_COMMIT (subarr, ierr)
  2695. call MPI_SSEND( glbl_array(j0t,1), 1, subarr, klm, 1, localComm, ierr)
  2696. call MPI_TYPE_FREE (subarr, ierr)
  2697. end do
  2698. else
  2699. ! are we on a target processor?
  2700. if((x_iref<i0).or.(x_iref>i1))return
  2701. n=j1-j0+1
  2702. call MPI_RECV( dist_array(j0,1), n*sz(2), my_real, root, 1, localComm, stat, ierr)
  2703. end if
  2704. #endif
  2705. END SUBROUTINE SCATTER_IBAND_2D_R
  2706. !EOC
  2707. !--------------------------------------------------------------------------
  2708. ! TM5 !
  2709. !--------------------------------------------------------------------------
  2710. !BOP
  2711. !
  2712. ! !IROUTINE: SCATTER_JBAND_3D_R
  2713. !
  2714. ! !DESCRIPTION: scatter a zonal slice (2D) from root
  2715. !\\
  2716. !\\
  2717. ! !INTERFACE:
  2718. !
  2719. SUBROUTINE SCATTER_JBAND_3D_R( DistGrid, dist_array, glbl_array, status, jref, rowcom )
  2720. !
  2721. ! !INPUT PARAMETERS:
  2722. !
  2723. type(dist_grid), intent(in) :: DistGrid
  2724. real, intent(in) :: glbl_array(:,:,:)
  2725. integer, optional, intent(in) :: jref ! to find targets, default=1
  2726. logical, optional, intent(in) :: rowcom ! to scatter from row_root instead of global root
  2727. !
  2728. ! !OUTPUT PARAMETERS:
  2729. !
  2730. real, intent(out) :: dist_array(DistGrid%i_strt:,:,:)
  2731. integer, intent(out) :: status
  2732. !
  2733. ! !REVISION HISTORY:
  2734. !
  2735. ! !REMARKS: 2D version, along I index, of scatter_3d_r
  2736. !
  2737. !EOP
  2738. !------------------------------------------------------------------------
  2739. !BOC
  2740. character(len=*), parameter :: rname = mname//'scatter_jband_3d_r'
  2741. #ifndef MPI
  2742. dist_array( DistGrid%i_strt:DistGrid%i_stop,:,: ) = glbl_array
  2743. status = 0
  2744. #else
  2745. integer :: stat(MPI_STATUS_SIZE), subarr
  2746. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2747. integer :: x_jref, n, klm, sz(3), gsz(3), slab, tgt_root
  2748. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  2749. logical :: selectRoot
  2750. status=0
  2751. ! ------- Check inputs
  2752. x_jref=1
  2753. if(present(jref)) x_jref=jref
  2754. i0 = DistGrid%i_strt
  2755. i1 = DistGrid%i_stop
  2756. j0 = DistGrid%j_strt
  2757. j1 = DistGrid%j_stop
  2758. ! by default scatter from global root
  2759. selectRoot = isRoot
  2760. tgt_root = root
  2761. if (present(rowcom)) then
  2762. if (rowcom) then
  2763. selectRoot = isRowRoot.and.(x_jref>=j0).and.(x_jref<=j1)
  2764. tgt_root = RowRootId
  2765. endif
  2766. endif
  2767. sz = shape(dist_array)
  2768. gsz = shape(glbl_array)
  2769. if(okdebug)then
  2770. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true., has_global=selectRoot)
  2771. IF_NOTOK_RETURN(status=1)
  2772. end if
  2773. ! ------- SEND/RECV -------------
  2774. if ( selectRoot ) then
  2775. !local case
  2776. if((x_jref>=j0).and.(x_jref<=j1)) dist_array(i0:i1,:,:) = glbl_array(i0:i1,:,:)
  2777. ! send remaining chunks to other pes
  2778. do klm=0,npes-1
  2779. if (klm==myid) cycle ! skip local case
  2780. i0t = DistGrid%bounds(1,klm)
  2781. i1t = DistGrid%bounds(2,klm)
  2782. j0t = DistGrid%bounds(3,klm)
  2783. j1t = DistGrid%bounds(4,klm)
  2784. ! if klm is a target processor, pack and send
  2785. if((x_jref<j0t).or.(x_jref>j1t))cycle
  2786. n=i1t-i0t+1
  2787. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  2788. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  2789. stride = gsz(1)*gsz(2)*sizeoftype
  2790. call MPI_TYPE_CREATE_HVECTOR (sz(3), 1, stride, slab, subarr, ierr)
  2791. call MPI_TYPE_FREE (slab, ierr)
  2792. call MPI_TYPE_COMMIT (subarr, ierr)
  2793. call MPI_SSEND( glbl_array(i0t,1,1), 1, subarr, klm, 1, localComm, ierr)
  2794. call MPI_TYPE_FREE (subarr, ierr)
  2795. end do
  2796. else
  2797. ! are we on a target processor?
  2798. if((x_jref<j0).or.(x_jref>j1))return
  2799. n=i1-i0+1
  2800. call MPI_RECV( dist_array(i0,1,1), n*sz(2)*sz(3), my_real, tgt_root, 1, localComm, stat, ierr)
  2801. end if
  2802. #endif
  2803. END SUBROUTINE SCATTER_JBAND_3D_R
  2804. !EOC
  2805. !--------------------------------------------------------------------------
  2806. ! TM5 !
  2807. !--------------------------------------------------------------------------
  2808. !BOP
  2809. !
  2810. ! !IROUTINE: SCATTER_JBAND_4D_R
  2811. !
  2812. ! !DESCRIPTION: scatter a zonal slice (4D) from root
  2813. !\\
  2814. !\\
  2815. ! !INTERFACE:
  2816. !
  2817. SUBROUTINE SCATTER_JBAND_4D_R( DistGrid, dist_array, glbl_array, status, jref, rowcom )
  2818. !
  2819. ! !INPUT PARAMETERS:
  2820. !
  2821. type(dist_grid), intent(in) :: DistGrid
  2822. real, intent(in) :: glbl_array(:,:,:,:)
  2823. integer, optional, intent(in) :: jref ! to find targets, default=1
  2824. logical, optional, intent(in) :: rowcom ! to scatter from row_root instead of global root
  2825. !
  2826. ! !OUTPUT PARAMETERS:
  2827. !
  2828. real, intent(out) :: dist_array(DistGrid%i_strt:,:,:,:)
  2829. integer, intent(out) :: status
  2830. !
  2831. ! !REVISION HISTORY:
  2832. ! 17 Feb 2015 - Ph. Le Sager - v0
  2833. !
  2834. ! !REMARKS: 2D version, along I index, of scatter_4d_r
  2835. !
  2836. !EOP
  2837. !------------------------------------------------------------------------
  2838. !BOC
  2839. character(len=*), parameter :: rname = mname//'scatter_jband_4d_r'
  2840. #ifndef MPI
  2841. dist_array( DistGrid%i_strt:DistGrid%i_stop,:,: ) = glbl_array
  2842. status = 0
  2843. #else
  2844. integer :: stat(MPI_STATUS_SIZE), subarr
  2845. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2846. integer :: x_jref, n, klm, sz(4), gsz(4), slab, tgt_root
  2847. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  2848. logical :: selectRoot
  2849. status=0
  2850. ! ------- Check inputs
  2851. x_jref=1
  2852. if(present(jref)) x_jref=jref
  2853. i0 = DistGrid%i_strt
  2854. i1 = DistGrid%i_stop
  2855. j0 = DistGrid%j_strt
  2856. j1 = DistGrid%j_stop
  2857. ! by default scatter from global root
  2858. selectRoot = isRoot
  2859. tgt_root = root
  2860. if (present(rowcom)) then
  2861. if (rowcom) then
  2862. selectRoot = isRowRoot.and.(x_jref>=j0).and.(x_jref<=j1)
  2863. tgt_root = RowRootId
  2864. endif
  2865. endif
  2866. sz = shape(dist_array)
  2867. gsz = shape(glbl_array)
  2868. if(okdebug)then
  2869. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true., has_global=selectRoot)
  2870. IF_NOTOK_RETURN(status=1)
  2871. end if
  2872. ! ------- SEND/RECV -------------
  2873. if ( selectRoot ) then
  2874. !local case
  2875. if((x_jref>=j0).and.(x_jref<=j1)) dist_array(i0:i1,:,:,:) = glbl_array(i0:i1,:,:,:)
  2876. ! send remaining chunks to other pes
  2877. do klm=0,npes-1
  2878. if (klm==myid) cycle ! skip local case
  2879. i0t = DistGrid%bounds(1,klm)
  2880. i1t = DistGrid%bounds(2,klm)
  2881. j0t = DistGrid%bounds(3,klm)
  2882. j1t = DistGrid%bounds(4,klm)
  2883. ! if klm is a target processor, pack and send
  2884. if((x_jref<j0t).or.(x_jref>j1t))cycle
  2885. n=i1t-i0t+1
  2886. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  2887. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  2888. stride = gsz(1)*gsz(2)*sizeoftype
  2889. call MPI_TYPE_CREATE_HVECTOR (sz(3)*sz(4), 1, stride, slab, subarr, ierr)
  2890. call MPI_TYPE_FREE (slab, ierr)
  2891. call MPI_TYPE_COMMIT (subarr, ierr)
  2892. call MPI_SSEND( glbl_array(i0t,1,1,1), 1, subarr, klm, 1, localComm, ierr)
  2893. call MPI_TYPE_FREE (subarr, ierr)
  2894. end do
  2895. else
  2896. ! are we on a target processor?
  2897. if((x_jref<j0).or.(x_jref>j1))return
  2898. n=i1-i0+1
  2899. call MPI_RECV( dist_array(i0,1,1,1), n*sz(2)*sz(3)*sz(4), my_real, tgt_root, 1, localComm, stat, ierr)
  2900. end if
  2901. #endif
  2902. END SUBROUTINE SCATTER_JBAND_4D_R
  2903. !EOC
  2904. !--------------------------------------------------------------------------
  2905. ! TM5 !
  2906. !--------------------------------------------------------------------------
  2907. !BOP
  2908. !
  2909. ! !IROUTINE: GATHER_JBAND_2D_R
  2910. !
  2911. ! !DESCRIPTION: gather a zonal slice (2D) on root. For 2D arrays, with first
  2912. ! dimension distributed along I (making it a J-band), and the
  2913. ! other dimension is *not* distributed along J. For example:
  2914. ! [i1:i2, lev], or [i1:i2, trac]
  2915. !\\
  2916. !\\
  2917. ! !INTERFACE:
  2918. !
  2919. SUBROUTINE GATHER_JBAND_2D_R( DistGrid, dist_array, glbl_array, status, jref )
  2920. !
  2921. ! !INPUT PARAMETERS:
  2922. !
  2923. type(dist_grid), intent(in) :: DistGrid
  2924. real, intent(in) :: dist_array(DistGrid%i_strt:,:)
  2925. integer, intent(in) :: jref ! to find sources
  2926. !
  2927. ! !OUTPUT PARAMETERS:
  2928. !
  2929. real, intent(out) :: glbl_array(:,:)
  2930. integer, intent(out) :: status
  2931. !
  2932. ! !REVISION HISTORY:
  2933. ! 01 Nov 2011 - P. Le Sager - v0
  2934. !
  2935. ! !REMARKS:
  2936. !
  2937. !EOP
  2938. !------------------------------------------------------------------------
  2939. !BOC
  2940. character(len=*), parameter :: rname = mname//'gather_jband_2d_r'
  2941. #ifndef MPI
  2942. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop,: )
  2943. status = 0
  2944. #else
  2945. integer :: stat(MPI_STATUS_SIZE), subarr
  2946. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2947. integer :: n, klm, sz(2), gsz(2)
  2948. status=0
  2949. ! ------- Check inputs
  2950. sz = shape(dist_array)
  2951. gsz = shape(glbl_array)
  2952. if(okdebug)then
  2953. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true.)
  2954. IF_NOTOK_RETURN(status=1)
  2955. end if
  2956. i0 = DistGrid%i_strt
  2957. i1 = DistGrid%i_stop
  2958. j0 = DistGrid%j_strt
  2959. j1 = DistGrid%j_stop
  2960. ! ------- SEND/RECV -------------
  2961. if ( isRoot ) then
  2962. ! local case
  2963. if((jref>=j0).and.(jref<=j1)) glbl_array(i0:i1,:) = dist_array(i0:i1,:)
  2964. ! receive remaining chunks from other pes
  2965. do klm=1, npes-1
  2966. i0t = DistGrid%bounds(1,klm)
  2967. i1t = DistGrid%bounds(2,klm)
  2968. j0t = DistGrid%bounds(3,klm)
  2969. j1t = DistGrid%bounds(4,klm)
  2970. ! if klm is a source processor, receive from it
  2971. if((jref<j0t).or.(jref>j1t))cycle
  2972. n=i1t-i0t+1
  2973. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, subarr, ierr)
  2974. call MPI_TYPE_COMMIT (subarr, ierr)
  2975. call MPI_RECV( glbl_array(i0t,1), 1, subarr, klm, jref, localComm, stat, ierr)
  2976. call MPI_TYPE_FREE (subarr, ierr)
  2977. end do
  2978. else
  2979. ! are we on a src processor?
  2980. if((jref<j0).or.(jref>j1))return
  2981. n=i1-i0+1
  2982. call MPI_SEND( dist_array(i0,1), n*sz(2), my_real, root, jref, localComm, ierr)
  2983. end if
  2984. #endif
  2985. END SUBROUTINE GATHER_JBAND_2D_R
  2986. !EOC
  2987. !--------------------------------------------------------------------------
  2988. ! TM5 !
  2989. !--------------------------------------------------------------------------
  2990. !BOP
  2991. !
  2992. ! !IROUTINE: GATHER_JBAND_3D_R
  2993. !
  2994. ! !DESCRIPTION: Gather a zonal slab (3D) on root or rowroot(jref) [i.e. the
  2995. ! root of the row of procs].
  2996. ! Local arrays [i1:i2, a:b, c:d] are gathered into the root
  2997. ! proc as [1:im, 1:b-a+1, 1:d-c+1]. Caller has to ensure that at least
  2998. ! **ALL** the ROW procs call this routine, plus root if needed.
  2999. ! No halo possible yet.
  3000. !\\
  3001. !\\
  3002. ! !INTERFACE:
  3003. !
  3004. SUBROUTINE GATHER_JBAND_3D_R( DistGrid, dist_array, glbl_array, status, jref, rowcom)
  3005. !
  3006. ! !INPUT PARAMETERS:
  3007. !
  3008. type(dist_grid), intent(in) :: DistGrid
  3009. real, intent(in) :: dist_array(DistGrid%i_strt:,:,:)
  3010. integer, intent(in) :: jref ! to find sources (defines the row we want)
  3011. logical, optional, intent(in) :: rowcom ! to gather on row_root instead of global root
  3012. !
  3013. ! !OUTPUT PARAMETERS:
  3014. !
  3015. real, intent(out) :: glbl_array(:,:,:)
  3016. integer, intent(out) :: status
  3017. !
  3018. ! !REVISION HISTORY:
  3019. ! 01 Nov 2011 - P. Le Sager - v0
  3020. !
  3021. ! !REMARKS: use in budget for gathering fluxes, advect_cfl
  3022. !
  3023. !EOP
  3024. !------------------------------------------------------------------------
  3025. !BOC
  3026. character(len=*), parameter :: rname = mname//'gather_jband_3d_r'
  3027. #ifndef MPI
  3028. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop,:,:)
  3029. status = 0
  3030. #else
  3031. integer :: stat(MPI_STATUS_SIZE), subarr
  3032. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  3033. integer :: n, klm, sz(3), gsz(3), slab, tgt_root
  3034. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  3035. logical :: selectRoot
  3036. status=0
  3037. ! ------- Check inputs
  3038. i0 = DistGrid%i_strt
  3039. i1 = DistGrid%i_stop
  3040. j0 = DistGrid%j_strt
  3041. j1 = DistGrid%j_stop
  3042. ! by default gather into global root
  3043. selectRoot = isRoot
  3044. tgt_root = root
  3045. if (present(rowcom)) then
  3046. if (rowcom) then
  3047. selectRoot = isRowRoot.and.(jref>=j0).and.(jref<=j1)
  3048. tgt_root = RowRootId
  3049. endif
  3050. endif
  3051. sz = shape(dist_array)
  3052. gsz = shape(glbl_array)
  3053. if(okdebug)then
  3054. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true., has_global=selectRoot)
  3055. IF_NOTOK_RETURN(status=1)
  3056. end if
  3057. ! ------- SEND/RECV -------------
  3058. if ( selectRoot ) then
  3059. ! local case
  3060. if((jref>=j0).and.(jref<=j1)) glbl_array(i0:i1,:,:) = dist_array(i0:i1,:,:)
  3061. ! receive remaining chunks from other pes
  3062. do klm=0,npes-1
  3063. if (klm==myid) cycle ! skip local case
  3064. i0t = DistGrid%bounds(1,klm)
  3065. i1t = DistGrid%bounds(2,klm)
  3066. j0t = DistGrid%bounds(3,klm)
  3067. j1t = DistGrid%bounds(4,klm)
  3068. ! if klm is a source processor, receive from it
  3069. if((jref<j0t).or.(jref>j1t))cycle
  3070. n=i1t-i0t+1
  3071. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  3072. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  3073. stride = gsz(1)*gsz(2)*sizeoftype
  3074. call MPI_TYPE_CREATE_HVECTOR (sz(3), 1, stride, slab, subarr, ierr)
  3075. call MPI_TYPE_FREE (slab, ierr)
  3076. call MPI_TYPE_COMMIT (subarr, ierr)
  3077. call MPI_RECV( glbl_array(i0t,1,1), 1, subarr, klm, jref, localComm, stat, ierr)
  3078. call MPI_TYPE_FREE (subarr, ierr)
  3079. end do
  3080. else
  3081. ! are we on a src processor?
  3082. if((jref<j0).or.(jref>j1))return
  3083. n=i1-i0+1
  3084. call MPI_SEND( dist_array(i0,1,1), n*sz(2)*sz(3), my_real, tgt_root, jref, localComm, ierr)
  3085. end if
  3086. #endif
  3087. END SUBROUTINE GATHER_JBAND_3D_R
  3088. !EOC
  3089. !--------------------------------------------------------------------------
  3090. ! TM5 !
  3091. !--------------------------------------------------------------------------
  3092. !BOP
  3093. !
  3094. ! !IROUTINE: GATHER_JBAND_4D_R
  3095. !
  3096. ! !DESCRIPTION: Gather a zonal slab (4D) on root or rowroot(jref) [i.e. the
  3097. ! root of the row of procs].
  3098. ! Local arrays [i1:i2, a:b, c:d, e:f] are gathered into the root
  3099. ! proc as [1:im, 1:b-a+1, 1:d-c+1, 1:f-e+1]. Caller has to
  3100. ! ensure that at least **ALL** the ROW procs call this routine,
  3101. ! plus root if needed.
  3102. ! No halo possible yet.
  3103. !\\
  3104. !\\
  3105. ! !INTERFACE:
  3106. !
  3107. SUBROUTINE GATHER_JBAND_4D_R( DistGrid, dist_array, glbl_array, status, jref, rowcom)
  3108. !
  3109. ! !INPUT PARAMETERS:
  3110. !
  3111. type(dist_grid), intent(in) :: DistGrid
  3112. real, intent(in) :: dist_array(DistGrid%i_strt:,:,:,:)
  3113. integer, intent(in) :: jref ! to find sources (defines the row we want)
  3114. logical, optional, intent(in) :: rowcom ! to gather on row_root instead of global root
  3115. !
  3116. ! !OUTPUT PARAMETERS:
  3117. !
  3118. real, intent(out) :: glbl_array(:,:,:,:)
  3119. integer, intent(out) :: status
  3120. !
  3121. ! !REVISION HISTORY:
  3122. ! 17 Feb 2015 - Ph. Le Sager - v0
  3123. !
  3124. ! !REMARKS: use in advectx
  3125. !
  3126. !EOP
  3127. !------------------------------------------------------------------------
  3128. !BOC
  3129. character(len=*), parameter :: rname = mname//'gather_jband_4d_r'
  3130. #ifndef MPI
  3131. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop,:,:,:)
  3132. status = 0
  3133. #else
  3134. integer :: stat(MPI_STATUS_SIZE), subarr
  3135. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  3136. integer :: n, klm, sz(4), gsz(4), slab, tgt_root, stack
  3137. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  3138. logical :: selectRoot
  3139. status=0
  3140. ! ------- Check inputs
  3141. i0 = DistGrid%i_strt
  3142. i1 = DistGrid%i_stop
  3143. j0 = DistGrid%j_strt
  3144. j1 = DistGrid%j_stop
  3145. ! by default gather into global root
  3146. selectRoot = isRoot
  3147. tgt_root = root
  3148. if (present(rowcom)) then
  3149. if (rowcom) then
  3150. selectRoot = isRowRoot.and.(jref>=j0).and.(jref<=j1)
  3151. tgt_root = RowRootId
  3152. endif
  3153. endif
  3154. sz = shape(dist_array)
  3155. gsz = shape(glbl_array)
  3156. stack = sz(3)*sz(4)
  3157. if(okdebug)then
  3158. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true., has_global=selectRoot)
  3159. IF_NOTOK_RETURN(status=1)
  3160. end if
  3161. ! ------- SEND/RECV -------------
  3162. if ( selectRoot ) then
  3163. ! local case
  3164. if((jref>=j0).and.(jref<=j1)) glbl_array(i0:i1,:,:,:) = dist_array(i0:i1,:,:,:)
  3165. ! receive remaining chunks from other pes
  3166. do klm=0,npes-1
  3167. if (klm==myid) cycle ! skip local case
  3168. i0t = DistGrid%bounds(1,klm)
  3169. i1t = DistGrid%bounds(2,klm)
  3170. j0t = DistGrid%bounds(3,klm)
  3171. j1t = DistGrid%bounds(4,klm)
  3172. ! if klm is a source processor, receive from it
  3173. if((jref<j0t).or.(jref>j1t))cycle
  3174. n=i1t-i0t+1
  3175. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  3176. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  3177. stride = gsz(1)*gsz(2)*sizeoftype
  3178. call MPI_TYPE_CREATE_HVECTOR (stack, 1, stride, slab, subarr, ierr)
  3179. call MPI_TYPE_FREE (slab, ierr)
  3180. call MPI_TYPE_COMMIT (subarr, ierr)
  3181. call MPI_RECV( glbl_array(i0t,1,1,1), 1, subarr, klm, jref, localComm, stat, ierr)
  3182. call MPI_TYPE_FREE (subarr, ierr)
  3183. end do
  3184. else
  3185. ! are we on a src processor?
  3186. if((jref<j0).or.(jref>j1))return
  3187. n=i1-i0+1
  3188. call MPI_SEND( dist_array(i0,1,1,1), n*sz(2)*sz(3)*sz(4), my_real, tgt_root, jref, localComm, ierr)
  3189. end if
  3190. #endif
  3191. END SUBROUTINE GATHER_JBAND_4D_R
  3192. !EOC
  3193. !--------------------------------------------------------------------------
  3194. ! TM5 !
  3195. !--------------------------------------------------------------------------
  3196. !BOP
  3197. !
  3198. ! !IROUTINE: GATHER_IBAND_1D_R
  3199. !
  3200. ! !DESCRIPTION: gather a meridional (with dimension distributed along J)
  3201. ! vector on root. For example: [j1:j2]
  3202. !\\
  3203. !\\
  3204. ! !INTERFACE:
  3205. !
  3206. SUBROUTINE GATHER_IBAND_1D_R( DistGrid, dist_array, glbl_array, status, iref )
  3207. !
  3208. ! !INPUT PARAMETERS:
  3209. !
  3210. type(dist_grid), intent(in) :: DistGrid
  3211. real, intent(in) :: dist_array(DistGrid%j_strt:)
  3212. integer, intent(in) :: iref ! to define source processors
  3213. !
  3214. ! !OUTPUT PARAMETERS:
  3215. !
  3216. real, intent(out) :: glbl_array(:)
  3217. integer, intent(out) :: status
  3218. !
  3219. ! !REVISION HISTORY:
  3220. ! 01 Nov 2011 - P. Le Sager - v0
  3221. !
  3222. ! !REMARKS:
  3223. ! - all processors with an i-range containing IREF are used.
  3224. !
  3225. !EOP
  3226. !------------------------------------------------------------------------
  3227. !BOC
  3228. character(len=*), parameter :: rname = mname//'gather_iband_1d_r'
  3229. #ifndef MPI
  3230. glbl_array = dist_array( DistGrid%j_strt:DistGrid%j_stop )
  3231. status = 0
  3232. #else
  3233. integer :: stat(MPI_STATUS_SIZE), subarr
  3234. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  3235. integer :: n, klm, sz(1), gsz(1)
  3236. status=0
  3237. ! ------- Check inputs
  3238. sz = shape(dist_array)
  3239. gsz = shape(glbl_array)
  3240. if(okdebug)then
  3241. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, iband=.true.)
  3242. ! write(gol,*)"lbound m",lbound(dist_array); call goPr
  3243. ! write(gol,*)"ubound m",ubound(dist_array); call goPr
  3244. IF_NOTOK_RETURN(status=1)
  3245. end if
  3246. i0 = DistGrid%i_strt
  3247. i1 = DistGrid%i_stop
  3248. j0 = DistGrid%j_strt
  3249. j1 = DistGrid%j_stop
  3250. ! ------- SEND/RECV -------------
  3251. if ( isRoot ) then
  3252. ! local case
  3253. if((iref>=i0).and.(iref<=i1)) glbl_array(j0:j1) = dist_array(j0:j1)
  3254. ! receive remaining chunks from other pes
  3255. do klm=1, npes-1
  3256. i0t = DistGrid%bounds(1,klm)
  3257. i1t = DistGrid%bounds(2,klm)
  3258. j0t = DistGrid%bounds(3,klm)
  3259. j1t = DistGrid%bounds(4,klm)
  3260. ! if klm is a source processor, receive from it
  3261. if((iref<i0t).or.(iref>i1t))cycle
  3262. n=j1t-j0t+1
  3263. call MPI_RECV( glbl_array(j0t), n, my_real, klm, iref, localComm, stat, ierr)
  3264. end do
  3265. else
  3266. ! are we on a src processor?
  3267. if((iref<i0).or.(iref>i1)) return
  3268. n=j1-j0+1
  3269. call MPI_SEND( dist_array(j0), n, my_real, root, iref, localComm, ierr)
  3270. end if
  3271. #endif
  3272. END SUBROUTINE GATHER_IBAND_1D_R
  3273. !EOC
  3274. !--------------------------------------------------------------------------
  3275. ! TM5 !
  3276. !--------------------------------------------------------------------------
  3277. !BOP
  3278. !
  3279. ! !IROUTINE: GATHER_IBAND_3D_R
  3280. !
  3281. ! !DESCRIPTION: gather a meridional slice (3D) on root. For arrays like:
  3282. ! [j1:j2, lev, trac], that is without a distributed I dim.
  3283. !\\
  3284. !\\
  3285. ! !INTERFACE:
  3286. !
  3287. SUBROUTINE GATHER_IBAND_3D_R( DistGrid, dist_array, glbl_array, status, iref )
  3288. !
  3289. ! !INPUT PARAMETERS:
  3290. !
  3291. type(dist_grid), intent(in) :: DistGrid
  3292. real, intent(in) :: dist_array(DistGrid%j_strt:,:,:)
  3293. integer, intent(in) :: iref ! to find sources
  3294. !
  3295. ! !OUTPUT PARAMETERS:
  3296. !
  3297. real, intent(out) :: glbl_array(:,:,:)
  3298. integer, intent(out) :: status
  3299. !
  3300. ! !REVISION HISTORY:
  3301. ! 01 Nov 2011 - P. Le Sager - v0
  3302. !
  3303. ! !REMARKS: use in budget for gathering fluxes
  3304. !
  3305. !EOP
  3306. !------------------------------------------------------------------------
  3307. !BOC
  3308. character(len=*), parameter :: rname = mname//'gather_iband_3d_r'
  3309. #ifndef MPI
  3310. glbl_array = dist_array( DistGrid%j_strt:DistGrid%j_stop,:,:)
  3311. status = 0
  3312. #else
  3313. integer :: stat(MPI_STATUS_SIZE), subarr
  3314. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  3315. integer :: n, klm, sz(3), gsz(3), slab
  3316. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  3317. status=0
  3318. ! ------- Check inputs
  3319. sz = shape(dist_array)
  3320. gsz = shape(glbl_array)
  3321. if(okdebug)then
  3322. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, iband=.true.)
  3323. IF_NOTOK_RETURN(status=1)
  3324. end if
  3325. i0 = DistGrid%i_strt
  3326. i1 = DistGrid%i_stop
  3327. j0 = DistGrid%j_strt
  3328. j1 = DistGrid%j_stop
  3329. ! ------- SEND/RECV -------------
  3330. if ( isRoot ) then
  3331. ! local case
  3332. if((iref>=i0).and.(iref<=i1)) glbl_array(j0:j1,:,:) = dist_array(j0:j1,:,:)
  3333. ! receive remaining chunks from other pes
  3334. do klm=1,npes-1
  3335. i0t = DistGrid%bounds(1,klm)
  3336. i1t = DistGrid%bounds(2,klm)
  3337. j0t = DistGrid%bounds(3,klm)
  3338. j1t = DistGrid%bounds(4,klm)
  3339. ! if klm is a source processor, receive from it
  3340. if((iref<i0t).or.(iref>i1t))cycle
  3341. n=j1t-j0t+1
  3342. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  3343. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  3344. stride = gsz(1)*gsz(2)*sizeoftype
  3345. call MPI_TYPE_CREATE_HVECTOR (sz(3), 1, stride, slab, subarr, ierr)
  3346. call MPI_TYPE_FREE (slab, ierr)
  3347. call MPI_TYPE_COMMIT (subarr, ierr)
  3348. call MPI_RECV( glbl_array(j0t,1,1), 1, subarr, klm, iref, localComm, stat, ierr)
  3349. call MPI_TYPE_FREE (subarr, ierr)
  3350. end do
  3351. else
  3352. ! are we on a src processor?
  3353. if((iref<i0).or.(iref>i1))return
  3354. n=j1-j0+1
  3355. call MPI_SEND( dist_array(j0,1,1), n*sz(2)*sz(3), my_real, root, iref, localComm, ierr)
  3356. end if
  3357. #endif
  3358. END SUBROUTINE GATHER_IBAND_3D_R
  3359. !EOC
  3360. !--------------------------------------------------------------------------
  3361. ! TM5 !
  3362. !--------------------------------------------------------------------------
  3363. !BOP
  3364. !
  3365. ! !IROUTINE: REDUCE_2D_R
  3366. !
  3367. ! !DESCRIPTION: Global reduction. Data are gathered on root, where OP is
  3368. ! then done, instead of OPing on each proc and then calling
  3369. ! MPI_REDUCE. This ensures bitwise agreement with serial code
  3370. ! results in case of SUM.
  3371. !\\
  3372. !\\
  3373. ! !INTERFACE:
  3374. !
  3375. SUBROUTINE REDUCE_2D_R( DistGrid, dist_array, halo, op, resultat, status, all, debug)
  3376. !
  3377. ! !INPUT PARAMETERS:
  3378. !
  3379. type(dist_grid), intent(in) :: DistGrid
  3380. integer, intent(in) :: halo
  3381. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  3382. character(len=3), intent(in) :: op ! 'MAX', 'MIN' or 'SUM'
  3383. logical, intent(in), optional :: all ! mimic mpi_allreduce instead of mpi_reduce
  3384. logical, intent(in), optional :: debug ! print additional information: location of Min/Max
  3385. !
  3386. ! !OUTPUT PARAMETERS:
  3387. !
  3388. real, intent(out) :: resultat
  3389. integer, intent(out) :: status
  3390. !
  3391. ! !REVISION HISTORY:
  3392. ! 01 Nov 2011 - P. Le Sager - v0
  3393. !
  3394. !EOP
  3395. !------------------------------------------------------------------------
  3396. !BOC
  3397. character(len=*), parameter :: rname = mname//'REDUCE_2D_R'
  3398. logical :: x_debug
  3399. integer :: shp(2)
  3400. real, allocatable :: glbl_array(:,:)
  3401. x_debug=.false.
  3402. if(present(debug)) x_debug=debug
  3403. #ifdef MPI
  3404. if (isRoot) then
  3405. allocate( glbl_array( DistGrid%im_region, DistGrid%jm_region ))
  3406. else
  3407. allocate( glbl_array(1,1) )
  3408. end if
  3409. call gather(DistGrid, dist_array, glbl_array, halo, status)
  3410. IF_NOTOK_RETURN(status=1)
  3411. if (isRoot) then
  3412. select case( op )
  3413. case('sum', 'Sum', 'SUM')
  3414. resultat = sum(glbl_array)
  3415. case('max', 'Max', 'MAX')
  3416. resultat = maxval(glbl_array)
  3417. if(x_debug) then
  3418. shp=maxloc(glbl_array)
  3419. write(gol,*) rname //": Max location =", shp; call goPr
  3420. end if
  3421. case('min', 'Min', 'MIN')
  3422. resultat = minval(glbl_array)
  3423. if(x_debug) then
  3424. shp=minloc(glbl_array)
  3425. write(gol,*) rname //": Min location =", shp; call goPr
  3426. end if
  3427. case default
  3428. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3429. IF_NOTOK_RETURN(status=1)
  3430. end select
  3431. end if
  3432. if (present(all)) then
  3433. if (all) call MPI_bcast(resultat, 1, my_real, root, localComm, ierr)
  3434. end if
  3435. deallocate(glbl_array)
  3436. #else
  3437. select case( op )
  3438. case('sum', 'Sum', 'SUM')
  3439. resultat = sum(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3440. case('max', 'Max', 'MAX')
  3441. resultat = maxval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3442. if(x_debug) then
  3443. shp=maxloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3444. write(gol,*) rname //": Max location =", shp; call goPr
  3445. end if
  3446. case('min', 'Min', 'MIN')
  3447. resultat = minval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3448. if(x_debug) then
  3449. shp=minloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3450. write(gol,*) rname //": Min location =", shp; call goPr
  3451. end if
  3452. case default
  3453. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3454. IF_NOTOK_RETURN(status=1)
  3455. end select
  3456. #endif
  3457. status=0
  3458. END SUBROUTINE REDUCE_2D_R
  3459. !EOC
  3460. !--------------------------------------------------------------------------
  3461. ! TM5 !
  3462. !--------------------------------------------------------------------------
  3463. !BOP
  3464. !
  3465. ! !IROUTINE: REDUCE_3D_R
  3466. !
  3467. ! !DESCRIPTION: same as REDUCE_2D_R, for 3D arrays.
  3468. !\\
  3469. !\\
  3470. ! !INTERFACE:
  3471. !
  3472. SUBROUTINE REDUCE_3D_R( DistGrid, dist_array, halo, op, resultat, status, all, debug)
  3473. !
  3474. ! !INPUT PARAMETERS:
  3475. !
  3476. type(dist_grid), intent(in) :: DistGrid
  3477. integer, intent(in) :: halo
  3478. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:)
  3479. character(len=3), intent(in) :: op ! 'MAX', 'MIN' or 'SUM'
  3480. logical, intent(in), optional :: all ! mimic MPI_ALLREDUCE instead of MPI_REDUCE
  3481. logical, intent(in), optional :: debug ! print additional information: location of Min/Max
  3482. !
  3483. ! !OUTPUT PARAMETERS:
  3484. !
  3485. real, intent(out) :: resultat
  3486. integer, intent(out) :: status
  3487. !
  3488. ! !REVISION HISTORY:
  3489. ! 01 Nov 2011 - P. Le Sager - v0
  3490. !
  3491. !EOP
  3492. !------------------------------------------------------------------------
  3493. !BOC
  3494. character(len=*), parameter :: rname = mname//'REDUCE_3D_R'
  3495. integer :: shp(3)
  3496. logical :: x_debug
  3497. real, allocatable :: glbl_array(:,:,:)
  3498. x_debug=.false.
  3499. if(present(debug)) x_debug=debug
  3500. #ifdef MPI
  3501. shp = shape( dist_array )
  3502. if (isRoot) then
  3503. allocate( glbl_array( DistGrid%im_region, DistGrid%jm_region, shp(3)) )
  3504. else
  3505. allocate( glbl_array(1,1,1) )
  3506. end if
  3507. call gather(DistGrid, dist_array, glbl_array, halo, status)
  3508. IF_NOTOK_RETURN(status=1)
  3509. if (isRoot) then
  3510. select case( op )
  3511. case('sum', 'Sum', 'SUM')
  3512. resultat = sum(glbl_array)
  3513. case('max', 'Max', 'MAX')
  3514. resultat = maxval(glbl_array)
  3515. if(x_debug) then
  3516. shp=maxloc(glbl_array)
  3517. write(gol,*) rname //": Max location =", shp; call goPr
  3518. end if
  3519. case('min', 'Min', 'MIN')
  3520. resultat = minval(glbl_array)
  3521. if(x_debug) then
  3522. shp=minloc(glbl_array)
  3523. write(gol,*) rname //": Min location =", shp; call goPr
  3524. end if
  3525. case default
  3526. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3527. IF_NOTOK_RETURN(status=1)
  3528. end select
  3529. end if
  3530. if (present(all)) then
  3531. if (all) call MPI_bcast(resultat, 1, my_real, root, localComm, ierr)
  3532. end if
  3533. #else
  3534. select case( op )
  3535. case('sum', 'Sum', 'SUM')
  3536. resultat = sum(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3537. case('max', 'Max', 'MAX')
  3538. resultat = maxval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3539. if(x_debug) then
  3540. shp=maxloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3541. write(gol,*) rname //": Max location =", shp; call goPr
  3542. end if
  3543. case('min', 'Min', 'MIN')
  3544. resultat = minval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3545. if(x_debug) then
  3546. shp=minloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3547. write(gol,*) rname //": Min location =", shp; call goPr
  3548. end if
  3549. case default
  3550. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3551. IF_NOTOK_RETURN(status=1)
  3552. end select
  3553. #endif
  3554. status=0
  3555. END SUBROUTINE REDUCE_3D_R
  3556. !EOC
  3557. !--------------------------------------------------------------------------
  3558. ! TM5 !
  3559. !--------------------------------------------------------------------------
  3560. !BOP
  3561. !
  3562. ! !IROUTINE: REDUCE_4D_R
  3563. !
  3564. ! !DESCRIPTION: same as REDUCE_2D_R, for 4D arrays.
  3565. !\\
  3566. !\\
  3567. ! !INTERFACE:
  3568. !
  3569. SUBROUTINE REDUCE_4D_R( DistGrid, dist_array, halo, op, resultat, status, all, debug)
  3570. !
  3571. ! !INPUT PARAMETERS:
  3572. !
  3573. type(dist_grid), intent(in) :: DistGrid
  3574. integer, intent(in) :: halo
  3575. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:)
  3576. character(len=3), intent(in) :: op ! 'MAX', 'MIN' or 'SUM'
  3577. logical, intent(in), optional :: all ! mimic MPI_ALLREDUCE instead of MPI_REDUCE
  3578. logical, intent(in), optional :: debug ! print additional information: location of Min/Max
  3579. !
  3580. ! !OUTPUT PARAMETERS:
  3581. !
  3582. real, intent(out) :: resultat
  3583. integer, intent(out) :: status
  3584. !
  3585. ! !REVISION HISTORY:
  3586. ! 01 Nov 2011 - P. Le Sager - v0
  3587. !
  3588. !EOP
  3589. !------------------------------------------------------------------------
  3590. !BOC
  3591. character(len=*), parameter :: rname = mname//'reduce_4d_r'
  3592. integer :: shp(4)
  3593. logical :: x_debug
  3594. real, allocatable :: glbl_array(:,:,:,:)
  3595. x_debug=.false.
  3596. if(present(debug)) x_debug=debug
  3597. #ifdef MPI
  3598. shp = shape( dist_array )
  3599. if (isRoot) then
  3600. allocate( glbl_array( DistGrid%im_region, DistGrid%jm_region, shp(3), shp(4)) )
  3601. else
  3602. allocate( glbl_array(1,1,1,1) )
  3603. end if
  3604. call gather(DistGrid, dist_array, glbl_array, halo, status)
  3605. IF_NOTOK_RETURN(status=1)
  3606. if (isRoot) then
  3607. select case( op )
  3608. case('sum', 'Sum', 'SUM')
  3609. resultat = sum(glbl_array)
  3610. case('max', 'Max', 'MAX')
  3611. resultat = maxval(glbl_array)
  3612. if(x_debug) then
  3613. shp=maxloc(glbl_array)
  3614. write(gol,*) rname //": Max location =", shp; call goPr
  3615. end if
  3616. case('min', 'Min', 'MIN')
  3617. resultat = minval(glbl_array)
  3618. if(x_debug) then
  3619. shp=minloc(glbl_array)
  3620. write(gol,*) rname //": Min location =", shp; call goPr
  3621. end if
  3622. case default
  3623. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3624. IF_NOTOK_RETURN(status=1)
  3625. end select
  3626. end if
  3627. if (present(all)) then
  3628. if (all) call MPI_bcast(resultat, 1, my_real, root, localComm, ierr)
  3629. end if
  3630. #else
  3631. select case( op )
  3632. case('sum', 'Sum', 'SUM')
  3633. resultat = sum(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3634. case('max', 'Max', 'MAX')
  3635. resultat = maxval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3636. if(x_debug) then
  3637. shp=maxloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3638. write(gol,*) rname //": Max location =", shp; call goPr
  3639. end if
  3640. case('min', 'Min', 'MIN')
  3641. resultat = minval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3642. if(x_debug) then
  3643. shp=minloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3644. write(gol,*) rname //": Min location =", shp; call goPr
  3645. end if
  3646. case default
  3647. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3648. IF_NOTOK_RETURN(status=1)
  3649. end select
  3650. #endif
  3651. status=0
  3652. END SUBROUTINE REDUCE_4D_R
  3653. !EOC
  3654. !--------------------------------------------------------------------------
  3655. ! TM5 !
  3656. !--------------------------------------------------------------------------
  3657. !BOP
  3658. !
  3659. ! !IROUTINE: DIST_ARR_STAT_2D_R
  3660. !
  3661. ! !DESCRIPTION: calculate and print the MIN, MEAN, MAX of a 2D distributed
  3662. ! real field. This is basically a wrapper around several calls
  3663. ! to REDUCE.
  3664. !\\
  3665. !\\
  3666. ! !INTERFACE:
  3667. !
  3668. SUBROUTINE DIST_ARR_STAT_2D_R( DistGrid, name, arr, halo, status)
  3669. !
  3670. ! !INPUT PARAMETERS:
  3671. !
  3672. type(dist_grid), intent(in) :: DistGrid
  3673. character(len=*), intent(in) :: name ! verbose helper
  3674. integer, intent(in) :: halo
  3675. real, intent(in) :: arr(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  3676. !
  3677. ! !OUTPUT PARAMETERS:
  3678. !
  3679. integer, intent(out):: status
  3680. !
  3681. ! !REVISION HISTORY:
  3682. ! 7 Mar 2012 - P. Le Sager - adapted for lon-lat MPI domain
  3683. ! decomposition, from DD_FIELD_STATISTICS in
  3684. ! both DIFFUSION.F90 and DRY_DEPOSITION.F90
  3685. !
  3686. ! !REMARKS:
  3687. ! (1) FIXME : does not compute the mean of only non-zero data anymore
  3688. !EOP
  3689. !------------------------------------------------------------------------
  3690. !BOC
  3691. character(len=*), parameter :: rname=mname//'dist_arr_stat_2d_r'
  3692. integer :: ntel_non_zero, nx, ny
  3693. real :: maxf, minf, meanf, mean_non_zero
  3694. ! --- begin -------------------------------
  3695. call reduce( DistGrid, arr, halo, 'MAX', maxf, status)
  3696. IF_NOTOK_RETURN(status=1)
  3697. call reduce( DistGrid, arr, halo, 'MIN', minf, status)
  3698. IF_NOTOK_RETURN(status=1)
  3699. call reduce( DistGrid, arr, halo, 'SUM', meanf,status)
  3700. IF_NOTOK_RETURN(status=1)
  3701. ! get mean
  3702. nx = DistGrid%im_region
  3703. ny = DistGrid%jm_region
  3704. meanf = meanf / ( nx*ny )
  3705. if(isRoot) then
  3706. write(gol,'(a10,3(a5,1x,1pe10.3))') name,' max=', maxf,' min=',minf,'mean=',meanf!,'mn0',mean_non_zero
  3707. call goPr
  3708. end if
  3709. status=0
  3710. END SUBROUTINE DIST_ARR_STAT_2D_R
  3711. !EOC
  3712. !--------------------------------------------------------------------------
  3713. ! TM5 !
  3714. !--------------------------------------------------------------------------
  3715. !BOP
  3716. !
  3717. ! !IROUTINE: PRINT_DISTGRID
  3718. !
  3719. ! !DESCRIPTION: utility that prints information about a distributed grid
  3720. !\\
  3721. !\\
  3722. ! !INTERFACE:
  3723. !
  3724. SUBROUTINE PRINT_DISTGRID ( DistGrid )
  3725. !
  3726. ! !INPUT PARAMETERS:
  3727. !
  3728. type(dist_grid), intent(in) :: DistGrid
  3729. !
  3730. ! !REVISION HISTORY:
  3731. ! 01 Nov 2011 - P. Le Sager - v0
  3732. !
  3733. !EOP
  3734. !------------------------------------------------------------------------
  3735. !BOC
  3736. integer, parameter :: maxrow=5
  3737. integer, parameter :: maxcol=5
  3738. integer :: sz1(1), i1
  3739. ! header
  3740. write(gol,*) "========== Start Distributed Grid ==================="; call goPr
  3741. ! dist_grid
  3742. write(gol,*) "im_region :" , DistGrid%im_region ; call goPr
  3743. write(gol,*) "jm_region :" , DistGrid%jm_region ; call goPr
  3744. write(gol,*) "i_strt :" , DistGrid%i_strt ; call goPr
  3745. write(gol,*) "i_stop :" , DistGrid%i_stop ; call goPr
  3746. write(gol,*) "i_strt_halo :" , DistGrid%i_strt_halo ; call goPr
  3747. write(gol,*) "i_stop_halo :" , DistGrid%i_stop_halo ; call goPr
  3748. write(gol,*) "j_strt :" , DistGrid%j_strt ; call goPr
  3749. write(gol,*) "j_stop :" , DistGrid%j_stop ; call goPr
  3750. write(gol,*) "j_strt_halo :" , DistGrid%j_strt_halo ; call goPr
  3751. write(gol,*) "j_stop_halo :" , DistGrid%j_stop_halo ; call goPr
  3752. write(gol,*) "has_north_pole:" , DistGrid%has_north_pole ; call goPr
  3753. write(gol,*) "has_south_pole:" , DistGrid%has_south_pole ; call goPr
  3754. ! physical grid
  3755. write(gol,*) '------------- physical grid -------------------' ; call goPr
  3756. write(gol,*) "llgrid name:", trim(DistGrid%lli%name) ; call goPr
  3757. write(gol,*) "R[m] :", DistGrid%lli%R ; call goPr
  3758. write(gol,*) "dlon[deg] :", DistGrid%lli%dlon_deg ; call goPr
  3759. write(gol,*) "dlat[deg] :", DistGrid%lli%dlat_deg ; call goPr
  3760. write(gol,*) "im :", DistGrid%lli%im ; call goPr
  3761. write(gol,*) "jm :", DistGrid%lli%jm ; call goPr
  3762. if (associated(DistGrid%lli%lon_deg)) then
  3763. sz1 = shape(DistGrid%lli%lon_deg)
  3764. i1 = min(maxcol,sz1(1))
  3765. write(gol,*) "lon = ",DistGrid%lli%lon_deg(1:i1); call goPr
  3766. endif
  3767. if (associated(DistGrid%lli%lat_deg)) then
  3768. sz1=shape(DistGrid%lli%lat_deg)
  3769. i1=min(maxrow,sz1(1))
  3770. write(gol,*) "lat = ",DistGrid%lli%lat_deg(1:i1); call goPr
  3771. endif
  3772. ! footer
  3773. write(gol,*) "========== End Distributed Grid ===================" ; call goPr
  3774. END SUBROUTINE PRINT_DISTGRID
  3775. !EOC
  3776. !--------------------------------------------------------------------------
  3777. ! TM5 !
  3778. !--------------------------------------------------------------------------
  3779. !BOP
  3780. !
  3781. ! !IROUTINE: TESTCOMM
  3782. !
  3783. ! !DESCRIPTION: check if the communications are working as expected.
  3784. ! Currently checked:
  3785. ! - GATHER, SCATTER, UPDATE_HALO (2D, 3D, 4D)
  3786. ! - SCATTER_I_BAND, SCATTER_J_BAND (1D, 2D)
  3787. !\\
  3788. !\\
  3789. ! !INTERFACE:
  3790. !
  3791. SUBROUTINE TESTCOMM( DistGrid, halo, status )
  3792. !
  3793. ! !INPUT PARAMETERS:
  3794. !
  3795. type(dist_grid), intent(in) :: DistGrid
  3796. integer, intent(in) :: halo
  3797. !
  3798. ! !OUTPUT PARAMETERS:
  3799. !
  3800. integer, intent(out) :: status
  3801. !
  3802. ! !REVISION HISTORY:
  3803. ! 01 Nov 2011 - P. Le Sager - v0
  3804. !
  3805. ! !REMARKS:
  3806. ! (1) to run with different halo sizes
  3807. ! (2) note that will not catch some errors in halo_update if using too few processors
  3808. !
  3809. !EOP
  3810. !------------------------------------------------------------------------
  3811. !BOC
  3812. character(len=*), parameter :: rname = mname//'testcomm'
  3813. ! real, allocatable :: src1(:), tgt1(:), res1(:)
  3814. ! real, allocatable :: src2(:,:), tgt2(:,:), res2(:,:)
  3815. ! real, allocatable :: larray2a(:,:), larray2b(:,:), glb2a(:,:), glb2b(:,:)
  3816. ! real, allocatable :: larray3a(:,:,:), larray3b(:,:,:), glb3a(:,:,:), glb3b(:,:,:)
  3817. ! real, allocatable :: larray4a(:,:,:,:), larray4b(:,:,:,:)
  3818. ! real, allocatable :: glb4a(:,:,:,:), glb4b(:,:,:,:)
  3819. ! integer :: i0, j0, i1, j1, x_halo, k, levels, l, trac, iref(2), jref(2)
  3820. ! logical :: south, north, west, east, test
  3821. ! character(len=*), parameter :: form='(f4.0)'
  3822. ! levels=17
  3823. ! trac=5
  3824. ! ! General
  3825. ! call Get_DistGrid( DistGrid, I_STRT=i0, I_STOP=i1, J_STRT=j0, J_STOP=j1, &
  3826. ! hasEastBorder=east, hasWestBorder=west, &
  3827. ! hasSouthBorder=south, hasNorthBorder=north )
  3828. ! x_halo=halo
  3829. ! status=0
  3830. ! if(isRoot) print*, "========= TESTING COMM FOR HALO=",x_HALO
  3831. ! call par_barrier()
  3832. ! ! *************************** SCATTER BAND ***************************
  3833. ! iref=(/ 1, DistGrid%im_region/) ! to test i_band along west and east border
  3834. ! jref=(/ 1, DistGrid%jm_region/) ! to test j_band along south and north border
  3835. ! if(isRoot) then
  3836. ! allocate(src1(DistGrid%im_region))
  3837. ! else
  3838. ! allocate(src1(1))
  3839. ! end if
  3840. ! allocate(tgt1(i0:i1), res1(i0:i1))
  3841. ! if (isRoot) src1 = (/(k, k=1,DistGrid%im_region)/)
  3842. ! res1 = (/(k, k=i0,i1)/)
  3843. ! do trac=1,2
  3844. ! tgt1=0
  3845. ! call scatter_j_band(distgrid, tgt1, src1, status, jref=jref(trac))
  3846. ! IF_NOTOK_RETURN(status=1)
  3847. ! test=((trac==1).and.south).or.((trac==2).and.north)
  3848. ! ! diff should be 0 along borders only
  3849. ! if (maxval(abs(res1-tgt1)) /= 0.) then
  3850. ! if(test) then
  3851. ! print*, "test scatter_J_band 1D FAILED at border:"
  3852. ! !print*, i0,i1,tgt1(i0), tgt1(i1), res1(i0), res1(i1)
  3853. ! status=1
  3854. ! !else ! Expected only if tgt1 has inout attribute in scatter routine
  3855. ! ! print*, "test scatter_J_band 1D PASSED inside:"
  3856. ! ! print*, i0,i1,tgt1(i0), tgt1(i1), res1(i0), res1(i1)
  3857. ! end if
  3858. ! else
  3859. ! if(test) then
  3860. ! print*, "test scatter_J_band 1D PASSED at border"
  3861. ! !print*, i0,i1,tgt1(i0), tgt1(i1), res1(i0), res1(i1)
  3862. ! !else ! Expected only if tgt1 has inout attribute in scatter routine
  3863. ! ! print*, "test scatter_J_band 1D FAILED inside"
  3864. ! ! print*, i0,i1,tgt1(i0), tgt1(i1), res1(i0), res1(i1)
  3865. ! ! status=1
  3866. ! end if
  3867. ! end if
  3868. ! IF_NOTOK_RETURN(status=1)
  3869. ! end do
  3870. ! deallocate(src1, tgt1, res1)
  3871. ! if(isRoot) then
  3872. ! allocate(src1(DistGrid%jm_region))
  3873. ! else
  3874. ! allocate(src1(1))
  3875. ! end if
  3876. ! allocate(tgt1(j0:j1), res1(j0:j1))
  3877. ! if (isRoot) src1 = (/(k, k=1,DistGrid%jm_region)/)
  3878. ! res1 = (/(k, k=j0,j1)/)
  3879. ! do trac=1,2
  3880. ! tgt1=0
  3881. ! call scatter_i_band(distgrid, tgt1, src1, status, iref=iref(trac))
  3882. ! IF_NOTOK_RETURN(status=1)
  3883. ! test=((trac==1).and.west).or.((trac==2).and.east)
  3884. ! ! diff should be 0 along borders only
  3885. ! if (maxval(abs(res1-tgt1)) /= 0.) then
  3886. ! if(test) then
  3887. ! print*, "test scatter_I_band 1D FAILED at border"
  3888. ! status=1
  3889. ! !else
  3890. ! ! print*, "test scatter_I_band 1D PASSED inside"
  3891. ! end if
  3892. ! else
  3893. ! if(test) then
  3894. ! print*, "test scatter_I_band 1D PASSED at border"
  3895. ! !else
  3896. ! ! print*, "test scatter_I_band 1D FAILED inside"
  3897. ! ! status=1
  3898. ! end if
  3899. ! end if
  3900. ! IF_NOTOK_RETURN(status=1)
  3901. ! end do
  3902. ! deallocate(src1, tgt1, res1)
  3903. ! ! ---------------- 2D
  3904. ! if(isRoot) then
  3905. ! allocate(src2(DistGrid%im_region, levels))
  3906. ! else
  3907. ! allocate(src2(1,1))
  3908. ! end if
  3909. ! allocate(tgt2(i0:i1,levels), res2(i0:i1,levels))
  3910. ! do l=1,levels
  3911. ! if (isRoot) src2(:,l) = (/(k, k=1,DistGrid%im_region)/) * l
  3912. ! res2(:,l) = (/(k, k=i0,i1)/)* l
  3913. ! end do
  3914. ! do trac=1,2
  3915. ! tgt2=0
  3916. ! call scatter_j_band(distgrid, tgt2, src2, status, jref=jref(trac))
  3917. ! IF_NOTOK_RETURN(status=1)
  3918. ! test=((trac==1).and.south).or.((trac==2).and.north)
  3919. ! ! diff should be 0 along borders only
  3920. ! if (maxval(abs(res2-tgt2)) /= 0.) then
  3921. ! if(test) then
  3922. ! print*, "test scatter_J_band 2D FAILED at border"
  3923. ! status=1
  3924. ! !else
  3925. ! ! print*, "test scatter_J_band 2D PASSED inside"
  3926. ! end if
  3927. ! else
  3928. ! if(test) then
  3929. ! print*, "test scatter_J_band 2D PASSED at border"
  3930. ! !else
  3931. ! ! print*, "test scatter_J_band 2D FAILED inside"
  3932. ! ! status=1
  3933. ! end if
  3934. ! end if
  3935. ! IF_NOTOK_RETURN(status=1)
  3936. ! end do
  3937. ! deallocate(src2, tgt2, res2)
  3938. ! if(isRoot) then
  3939. ! allocate(src2(DistGrid%jm_region,levels))
  3940. ! else
  3941. ! allocate(src2(1,1))
  3942. ! end if
  3943. ! allocate(tgt2(j0:j1,levels), res2(j0:j1,levels))
  3944. ! do l=1,levels
  3945. ! if (isRoot) src2(:,l) = (/(k, k=1,DistGrid%jm_region)/)*l
  3946. ! res2(:,l) = (/(k, k=j0,j1)/)*l
  3947. ! enddo
  3948. ! do trac=1,2
  3949. ! tgt2=0
  3950. ! call scatter_i_band(distgrid, tgt2, src2, status, iref=iref(trac))
  3951. ! IF_NOTOK_RETURN(status=1)
  3952. ! test=((trac==1).and.west).or.((trac==2).and.east)
  3953. ! ! diff should be 0 along borders only
  3954. ! if (maxval(abs(res2-tgt2)) /= 0.) then
  3955. ! if(test) then
  3956. ! print*, "test scatter_I_band 2D FAILED at border"
  3957. ! status=1
  3958. ! !else
  3959. ! ! print*, "test scatter_I_band 2D PASSED inside"
  3960. ! end if
  3961. ! else
  3962. ! if(test) then
  3963. ! print*, "test scatter_I_band 2D PASSED at border"
  3964. ! !else
  3965. ! ! print*, "test scatter_I_band 2D FAILED inside"
  3966. ! ! status=1
  3967. ! end if
  3968. ! end if
  3969. ! IF_NOTOK_RETURN(status=1)
  3970. ! end do
  3971. ! deallocate(src2, tgt2, res2)
  3972. ! ! *************************** GATHER/SCATTER ***************************
  3973. ! !----------------------
  3974. ! ! Allocate 2D
  3975. ! !----------------------
  3976. ! allocate( larray2a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo) )
  3977. ! allocate( larray2b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo) )
  3978. ! allocate( glb2a(DistGrid%im_region, DistGrid%jm_region) )
  3979. ! ! in halo, 0, elsewhere myid
  3980. ! larray2a=0.
  3981. ! larray2a(i0:i1,j0:j1)=real(myid)
  3982. ! ! glb2b is global array, used in root only
  3983. ! if (isRoot) then
  3984. ! allocate( glb2b( DistGrid%im_region, DistGrid%jm_region) )
  3985. ! else
  3986. ! allocate( glb2b(1,1) )
  3987. ! end if
  3988. ! !----------------------
  3989. ! ! test GATHER_2D_R
  3990. ! !----------------------
  3991. ! glb2b=0.
  3992. ! larray2b=0.
  3993. ! ! gather
  3994. ! call gather( DistGrid, larray2a, glb2b, x_halo, status)
  3995. ! IF_NOTOK_RETURN(status=1)
  3996. ! ! broadcast result
  3997. ! if (isRoot) glb2a = glb2b
  3998. ! #ifdef MPI
  3999. ! call MPI_bcast(glb2a, size(glb2a), my_real, root, localComm, ierr)
  4000. ! #endif
  4001. ! larray2b(i0:i1,j0:j1) = glb2a(i0:i1,j0:j1)
  4002. ! larray2b = larray2a-larray2b
  4003. ! ! diff should be 0
  4004. ! if (maxval(abs(larray2b)) /= 0.) then
  4005. ! print*, "test gather 2d FAILED"
  4006. ! status=1
  4007. ! else
  4008. ! print*, "test gather 2d PASSED"
  4009. ! end if
  4010. ! IF_NOTOK_RETURN(status=1)
  4011. ! call par_barrier()
  4012. ! !----------------------
  4013. ! ! test SCATTER_2D_R
  4014. ! !----------------------
  4015. ! larray2b=0.
  4016. ! call scatter( DistGrid, larray2b, glb2b, x_halo, status)
  4017. ! IF_NOTOK_RETURN(status=1)
  4018. ! larray2b=larray2a-larray2b
  4019. ! ! diff should be 0
  4020. ! if (maxval(abs(larray2b)) /= 0.) then
  4021. ! print*, "test scatter 2d FAILED"
  4022. ! status=1
  4023. ! else
  4024. ! print*, "test scatter 2d PASSED"
  4025. ! end if
  4026. ! IF_NOTOK_RETURN(status=1)
  4027. ! ! CLEANUP
  4028. ! deallocate(larray2a,larray2b,glb2a,glb2b)
  4029. ! call par_barrier()
  4030. ! !----------------------
  4031. ! ! Allocate 3D
  4032. ! !----------------------
  4033. ! allocate( larray3a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels) )
  4034. ! allocate( larray3b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels) )
  4035. ! allocate( glb3a( DistGrid%im_region, DistGrid%jm_region, levels) )
  4036. ! ! in halo, 0, elsewhere myid*level
  4037. ! larray3a=0.
  4038. ! do k=1,levels
  4039. ! larray3a(i0:i1,j0:j1,k)=real(myid*k)
  4040. ! end do
  4041. ! ! glb2b is global array, used in root only
  4042. ! if (isRoot) then
  4043. ! allocate( glb3b( DistGrid%im_region, DistGrid%jm_region, levels) )
  4044. ! else
  4045. ! allocate( glb3b(1,1,1) )
  4046. ! end if
  4047. ! !----------------------
  4048. ! ! test GATHER_3D_R
  4049. ! !----------------------
  4050. ! glb3b=0.
  4051. ! larray3b=0.
  4052. ! ! gather
  4053. ! call gather( DistGrid, larray3a, glb3b, x_halo, status)
  4054. ! IF_NOTOK_RETURN(status=1)
  4055. ! ! broadcast result
  4056. ! if (isRoot) glb3a = glb3b
  4057. ! #ifdef MPI
  4058. ! call MPI_bcast(glb3a, size(glb3a), my_real, root, localComm, ierr)
  4059. ! #endif
  4060. ! larray3b(i0:i1,j0:j1,:) = glb3a(i0:i1,j0:j1,:)
  4061. ! larray3b = larray3a-larray3b
  4062. ! ! diff should be 0
  4063. ! if (maxval(abs(larray3b)) /= 0.) then
  4064. ! print*, "test gather 3d FAILED"
  4065. ! status=1
  4066. ! else
  4067. ! print*, "test gather 3d PASSED"
  4068. ! end if
  4069. ! IF_NOTOK_RETURN(status=1)
  4070. ! call par_barrier()
  4071. ! !----------------------
  4072. ! ! test SCATTER_3D_R
  4073. ! !----------------------
  4074. ! larray3b=0.
  4075. ! call scatter( DistGrid, larray3b, glb3b, x_halo, status)
  4076. ! IF_NOTOK_RETURN(status=1)
  4077. ! larray3b=larray3a-larray3b
  4078. ! ! diff should be 0
  4079. ! if (maxval(abs(larray3b)) /= 0.) then
  4080. ! print*, "test scatter 3d FAILED"
  4081. ! status=1
  4082. ! else
  4083. ! print*, "test scatter 3d PASSED"
  4084. ! end if
  4085. ! IF_NOTOK_RETURN(status=1)
  4086. ! ! CLEANUP
  4087. ! deallocate(larray3a,larray3b,glb3a,glb3b)
  4088. ! call par_barrier()
  4089. ! !----------------------
  4090. ! ! Allocate 4D
  4091. ! !----------------------
  4092. ! allocate( larray4a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels, trac) )
  4093. ! allocate( larray4b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels, trac) )
  4094. ! allocate( glb4a( DistGrid%im_region, DistGrid%jm_region, levels, trac) )
  4095. ! ! in halo, 0, elsewhere (myid+1)*level*trac
  4096. ! larray4a=0.
  4097. ! do l=1,trac
  4098. ! do k=1,levels
  4099. ! larray4a(i0:i1,j0:j1,k,l)=real((myid+1)*k*l)
  4100. ! end do
  4101. ! end do
  4102. ! ! glb4b is global array, used in root only
  4103. ! if (isRoot) then
  4104. ! allocate( glb4b( DistGrid%im_region, DistGrid%jm_region, levels, trac) )
  4105. ! else
  4106. ! allocate( glb4b(1,1,1,1) )
  4107. ! end if
  4108. ! !----------------------
  4109. ! ! test GATHER_4D_R
  4110. ! !----------------------
  4111. ! glb4b=0.
  4112. ! larray4b=0.
  4113. ! ! gather
  4114. ! call gather( DistGrid, larray4a, glb4b, x_halo, status)
  4115. ! IF_NOTOK_RETURN(status=1)
  4116. ! ! broadcast result
  4117. ! if (isRoot) glb4a = glb4b
  4118. ! #ifdef MPI
  4119. ! call MPI_bcast(glb4a, size(glb4a), my_real, root, localComm, ierr)
  4120. ! #endif
  4121. ! larray4b(i0:i1,j0:j1,:,:) = glb4a(i0:i1,j0:j1,:,:)
  4122. ! larray4b = larray4a-larray4b
  4123. ! ! diff should be 0
  4124. ! if (maxval(abs(larray4b)) /= 0.) then
  4125. ! print*, "test gather 4d FAILED"
  4126. ! status=1
  4127. ! else
  4128. ! print*, "test gather 4d PASSED"
  4129. ! end if
  4130. ! IF_NOTOK_RETURN(status=1)
  4131. ! call par_barrier()
  4132. ! !----------------------
  4133. ! ! test SCATTER_4D_R
  4134. ! !----------------------
  4135. ! larray4b=0.
  4136. ! call scatter( DistGrid, larray4b, glb4b, x_halo, status)
  4137. ! IF_NOTOK_RETURN(status=1)
  4138. ! larray4b=larray4a-larray4b
  4139. ! ! diff should be 0
  4140. ! if (maxval(abs(larray4b)) /= 0.) then
  4141. ! print*, "test scatter 4d FAILED"
  4142. ! status=1
  4143. ! else
  4144. ! print*, "test scatter 4d PASSED"
  4145. ! end if
  4146. ! IF_NOTOK_RETURN(status=1)
  4147. ! ! CLEANUP
  4148. ! deallocate(larray4a,larray4b,glb4a,glb4b)
  4149. ! call par_barrier()
  4150. ! ! *************************************** HALO ***************************
  4151. ! !----------------------
  4152. ! ! test UPDATE_HALO_2D_R
  4153. ! !----------------------
  4154. ! ! Allocate 2D
  4155. ! allocate( larray2a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo) )
  4156. ! allocate( larray2b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo) )
  4157. ! ! array to update : in halo set to 0, elsewhere to myid
  4158. ! larray2b=0.
  4159. ! larray2b(i0:i1,j0:j1)=real(myid)
  4160. ! ! test array : fill halo with neighbors rank
  4161. ! larray2a=0.
  4162. ! larray2a( i0-x_halo:i0-1, j0:j1 ) = DistGrid%neighbors(1) ! west halo
  4163. ! larray2a( i1+1:i1+x_halo, j0:j1 ) = DistGrid%neighbors(3) ! east halo
  4164. ! larray2a( i0:i1, j1+1:j1+x_halo ) = DistGrid%neighbors(2) ! north halo
  4165. ! larray2a( i0:i1, j0-x_halo:j0-1 ) = DistGrid%neighbors(4) ! south halo
  4166. ! larray2a(i0:i1,j0:j1)=real(myid)
  4167. ! where (larray2a == MPI_PROC_NULL) larray2a=0.
  4168. ! ! update
  4169. ! CALL UPDATE_HALO( DISTGRID, larray2b, x_halo, status)
  4170. ! IF_NOTOK_RETURN(status=1)
  4171. ! if (isRoot.and.(x_halo==1)) then ! 32 is size to look good for 2x2 processes and halo=1
  4172. ! print*, "----------------------------"
  4173. ! print '(32F4.0)', larray2a
  4174. ! print*, "----------------------------"
  4175. ! print '(32F4.0)', larray2b
  4176. ! print*, "----------------------------"
  4177. ! end if
  4178. ! ! compare (diff should be 0)
  4179. ! larray2b=larray2a-larray2b
  4180. ! if (maxval(abs(larray2b)) /= 0.) then
  4181. ! print*, "test update_halo 2d FAILED"
  4182. ! status=1
  4183. ! else
  4184. ! print*, "test update_halo 2d PASSED"
  4185. ! end if
  4186. ! IF_NOTOK_RETURN(status=1)
  4187. ! ! CLEANUP
  4188. ! deallocate(larray2a,larray2b)
  4189. ! call par_barrier()
  4190. ! !----------------------
  4191. ! ! test UPDATE_HALO_3D_R
  4192. ! !----------------------
  4193. ! ! Allocate 3D
  4194. ! allocate( larray3a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels) )
  4195. ! allocate( larray3b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels) )
  4196. ! ! array to update : in halo set to 0, elsewhere to myid
  4197. ! larray3b=0.
  4198. ! larray3b(i0:i1,j0:j1,:)=real(myid)
  4199. ! ! test array : fill halo with neighbors rank
  4200. ! larray3a=0.
  4201. ! larray3a( i0-x_halo:i0-1, j0:j1, : ) = DistGrid%neighbors(1) ! west halo
  4202. ! larray3a( i1+1:i1+x_halo, j0:j1, : ) = DistGrid%neighbors(3) ! east halo
  4203. ! larray3a( i0:i1, j1+1:j1+x_halo, : ) = DistGrid%neighbors(2) ! north halo
  4204. ! larray3a( i0:i1, j0-x_halo:j0-1, : ) = DistGrid%neighbors(4) ! south halo
  4205. ! larray3a(i0:i1,j0:j1,:)=real(myid) ! interior
  4206. ! where (larray3a == MPI_PROC_NULL) larray3a=0. !if no neighbor
  4207. ! ! update
  4208. ! CALL UPDATE_HALO( DISTGRID, larray3b, x_halo, status)
  4209. ! IF_NOTOK_RETURN(status=1)
  4210. ! ! compare (diff should be 0)
  4211. ! larray3b=larray3a-larray3b
  4212. ! if (maxval(abs(larray3b)) /= 0.) then
  4213. ! print*, "test update_halo 3d FAILED"
  4214. ! status=1
  4215. ! else
  4216. ! print*, "test update_halo 3d PASSED"
  4217. ! end if
  4218. ! IF_NOTOK_RETURN(status=1)
  4219. ! ! CLEANUP
  4220. ! deallocate(larray3a,larray3b)
  4221. ! call par_barrier()
  4222. ! !----------------------
  4223. ! ! test UPDATE_HALO_4D_R
  4224. ! !----------------------
  4225. ! ! Allocate 4D
  4226. ! allocate( larray4a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels, trac) )
  4227. ! allocate( larray4b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels, trac) )
  4228. ! ! array to update : in halo set to 0, elsewhere to myid
  4229. ! larray4b=0.
  4230. ! larray4b(i0:i1,j0:j1,:,:)=real(myid)
  4231. ! ! test array : fill halo with neighbors rank
  4232. ! larray4a=0.
  4233. ! larray4a( i0-x_halo:i0-1, j0:j1, :,: ) = DistGrid%neighbors(1) ! west halo
  4234. ! larray4a( i1+1:i1+x_halo, j0:j1, :,: ) = DistGrid%neighbors(3) ! east halo
  4235. ! larray4a( i0:i1, j1+1:j1+x_halo, :,: ) = DistGrid%neighbors(2) ! north halo
  4236. ! larray4a( i0:i1, j0-x_halo:j0-1, :,: ) = DistGrid%neighbors(4) ! south halo
  4237. ! larray4a(i0:i1,j0:j1,:,:)=real(myid) ! interior
  4238. ! where (larray4a == MPI_PROC_NULL) larray4a=0. !if no neighbor
  4239. ! ! update
  4240. ! CALL UPDATE_HALO( DISTGRID, larray4b, x_halo, status)
  4241. ! IF_NOTOK_RETURN(status=1)
  4242. ! ! compare (diff should be 0)
  4243. ! larray4b=larray4a-larray4b
  4244. ! if (maxval(abs(larray4b)) /= 0.) then
  4245. ! print*, "test update_halo 4d FAILED"
  4246. ! status=1
  4247. ! else
  4248. ! print*, "test update_halo 4d PASSED"
  4249. ! end if
  4250. ! IF_NOTOK_RETURN(status=1)
  4251. ! ! CLEANUP
  4252. ! deallocate(larray4a,larray4b)
  4253. ! call par_barrier()
  4254. status=0
  4255. END SUBROUTINE TESTCOMM
  4256. !EOC
  4257. END MODULE DOMAIN_DECOMP