domain_decomp.F90 156 KB

1234567891011121314151617181920212223242526272829303132333435363738394041424344454647484950515253545556575859606162636465666768697071727374757677787980818283848586878889909192939495969798991001011021031041051061071081091101111121131141151161171181191201211221231241251261271281291301311321331341351361371381391401411421431441451461471481491501511521531541551561571581591601611621631641651661671681691701711721731741751761771781791801811821831841851861871881891901911921931941951961971981992002012022032042052062072082092102112122132142152162172182192202212222232242252262272282292302312322332342352362372382392402412422432442452462472482492502512522532542552562572582592602612622632642652662672682692702712722732742752762772782792802812822832842852862872882892902912922932942952962972982993003013023033043053063073083093103113123133143153163173183193203213223233243253263273283293303313323333343353363373383393403413423433443453463473483493503513523533543553563573583593603613623633643653663673683693703713723733743753763773783793803813823833843853863873883893903913923933943953963973983994004014024034044054064074084094104114124134144154164174184194204214224234244254264274284294304314324334344354364374384394404414424434444454464474484494504514524534544554564574584594604614624634644654664674684694704714724734744754764774784794804814824834844854864874884894904914924934944954964974984995005015025035045055065075085095105115125135145155165175185195205215225235245255265275285295305315325335345355365375385395405415425435445455465475485495505515525535545555565575585595605615625635645655665675685695705715725735745755765775785795805815825835845855865875885895905915925935945955965975985996006016026036046056066076086096106116126136146156166176186196206216226236246256266276286296306316326336346356366376386396406416426436446456466476486496506516526536546556566576586596606616626636646656666676686696706716726736746756766776786796806816826836846856866876886896906916926936946956966976986997007017027037047057067077087097107117127137147157167177187197207217227237247257267277287297307317327337347357367377387397407417427437447457467477487497507517527537547557567577587597607617627637647657667677687697707717727737747757767777787797807817827837847857867877887897907917927937947957967977987998008018028038048058068078088098108118128138148158168178188198208218228238248258268278288298308318328338348358368378388398408418428438448458468478488498508518528538548558568578588598608618628638648658668678688698708718728738748758768778788798808818828838848858868878888898908918928938948958968978988999009019029039049059069079089099109119129139149159169179189199209219229239249259269279289299309319329339349359369379389399409419429439449459469479489499509519529539549559569579589599609619629639649659669679689699709719729739749759769779789799809819829839849859869879889899909919929939949959969979989991000100110021003100410051006100710081009101010111012101310141015101610171018101910201021102210231024102510261027102810291030103110321033103410351036103710381039104010411042104310441045104610471048104910501051105210531054105510561057105810591060106110621063106410651066106710681069107010711072107310741075107610771078107910801081108210831084108510861087108810891090109110921093109410951096109710981099110011011102110311041105110611071108110911101111111211131114111511161117111811191120112111221123112411251126112711281129113011311132113311341135113611371138113911401141114211431144114511461147114811491150115111521153115411551156115711581159116011611162116311641165116611671168116911701171117211731174117511761177117811791180118111821183118411851186118711881189119011911192119311941195119611971198119912001201120212031204120512061207120812091210121112121213121412151216121712181219122012211222122312241225122612271228122912301231123212331234123512361237123812391240124112421243124412451246124712481249125012511252125312541255125612571258125912601261126212631264126512661267126812691270127112721273127412751276127712781279128012811282128312841285128612871288128912901291129212931294129512961297129812991300130113021303130413051306130713081309131013111312131313141315131613171318131913201321132213231324132513261327132813291330133113321333133413351336133713381339134013411342134313441345134613471348134913501351135213531354135513561357135813591360136113621363136413651366136713681369137013711372137313741375137613771378137913801381138213831384138513861387138813891390139113921393139413951396139713981399140014011402140314041405140614071408140914101411141214131414141514161417141814191420142114221423142414251426142714281429143014311432143314341435143614371438143914401441144214431444144514461447144814491450145114521453145414551456145714581459146014611462146314641465146614671468146914701471147214731474147514761477147814791480148114821483148414851486148714881489149014911492149314941495149614971498149915001501150215031504150515061507150815091510151115121513151415151516151715181519152015211522152315241525152615271528152915301531153215331534153515361537153815391540154115421543154415451546154715481549155015511552155315541555155615571558155915601561156215631564156515661567156815691570157115721573157415751576157715781579158015811582158315841585158615871588158915901591159215931594159515961597159815991600160116021603160416051606160716081609161016111612161316141615161616171618161916201621162216231624162516261627162816291630163116321633163416351636163716381639164016411642164316441645164616471648164916501651165216531654165516561657165816591660166116621663166416651666166716681669167016711672167316741675167616771678167916801681168216831684168516861687168816891690169116921693169416951696169716981699170017011702170317041705170617071708170917101711171217131714171517161717171817191720172117221723172417251726172717281729173017311732173317341735173617371738173917401741174217431744174517461747174817491750175117521753175417551756175717581759176017611762176317641765176617671768176917701771177217731774177517761777177817791780178117821783178417851786178717881789179017911792179317941795179617971798179918001801180218031804180518061807180818091810181118121813181418151816181718181819182018211822182318241825182618271828182918301831183218331834183518361837183818391840184118421843184418451846184718481849185018511852185318541855185618571858185918601861186218631864186518661867186818691870187118721873187418751876187718781879188018811882188318841885188618871888188918901891189218931894189518961897189818991900190119021903190419051906190719081909191019111912191319141915191619171918191919201921192219231924192519261927192819291930193119321933193419351936193719381939194019411942194319441945194619471948194919501951195219531954195519561957195819591960196119621963196419651966196719681969197019711972197319741975197619771978197919801981198219831984198519861987198819891990199119921993199419951996199719981999200020012002200320042005200620072008200920102011201220132014201520162017201820192020202120222023202420252026202720282029203020312032203320342035203620372038203920402041204220432044204520462047204820492050205120522053205420552056205720582059206020612062206320642065206620672068206920702071207220732074207520762077207820792080208120822083208420852086208720882089209020912092209320942095209620972098209921002101210221032104210521062107210821092110211121122113211421152116211721182119212021212122212321242125212621272128212921302131213221332134213521362137213821392140214121422143214421452146214721482149215021512152215321542155215621572158215921602161216221632164216521662167216821692170217121722173217421752176217721782179218021812182218321842185218621872188218921902191219221932194219521962197219821992200220122022203220422052206220722082209221022112212221322142215221622172218221922202221222222232224222522262227222822292230223122322233223422352236223722382239224022412242224322442245224622472248224922502251225222532254225522562257225822592260226122622263226422652266226722682269227022712272227322742275227622772278227922802281228222832284228522862287228822892290229122922293229422952296229722982299230023012302230323042305230623072308230923102311231223132314231523162317231823192320232123222323232423252326232723282329233023312332233323342335233623372338233923402341234223432344234523462347234823492350235123522353235423552356235723582359236023612362236323642365236623672368236923702371237223732374237523762377237823792380238123822383238423852386238723882389239023912392239323942395239623972398239924002401240224032404240524062407240824092410241124122413241424152416241724182419242024212422242324242425242624272428242924302431243224332434243524362437243824392440244124422443244424452446244724482449245024512452245324542455245624572458245924602461246224632464246524662467246824692470247124722473247424752476247724782479248024812482248324842485248624872488248924902491249224932494249524962497249824992500250125022503250425052506250725082509251025112512251325142515251625172518251925202521252225232524252525262527252825292530253125322533253425352536253725382539254025412542254325442545254625472548254925502551255225532554255525562557255825592560256125622563256425652566256725682569257025712572257325742575257625772578257925802581258225832584258525862587258825892590259125922593259425952596259725982599260026012602260326042605260626072608260926102611261226132614261526162617261826192620262126222623262426252626262726282629263026312632263326342635263626372638263926402641264226432644264526462647264826492650265126522653265426552656265726582659266026612662266326642665266626672668266926702671267226732674267526762677267826792680268126822683268426852686268726882689269026912692269326942695269626972698269927002701270227032704270527062707270827092710271127122713271427152716271727182719272027212722272327242725272627272728272927302731273227332734273527362737273827392740274127422743274427452746274727482749275027512752275327542755275627572758275927602761276227632764276527662767276827692770277127722773277427752776277727782779278027812782278327842785278627872788278927902791279227932794279527962797279827992800280128022803280428052806280728082809281028112812281328142815281628172818281928202821282228232824282528262827282828292830283128322833283428352836283728382839284028412842284328442845284628472848284928502851285228532854285528562857285828592860286128622863286428652866286728682869287028712872287328742875287628772878287928802881288228832884288528862887288828892890289128922893289428952896289728982899290029012902290329042905290629072908290929102911291229132914291529162917291829192920292129222923292429252926292729282929293029312932293329342935293629372938293929402941294229432944294529462947294829492950295129522953295429552956295729582959296029612962296329642965296629672968296929702971297229732974297529762977297829792980298129822983298429852986298729882989299029912992299329942995299629972998299930003001300230033004300530063007300830093010301130123013301430153016301730183019302030213022302330243025302630273028302930303031303230333034303530363037303830393040304130423043304430453046304730483049305030513052305330543055305630573058305930603061306230633064306530663067306830693070307130723073307430753076307730783079308030813082308330843085308630873088308930903091309230933094309530963097309830993100310131023103310431053106310731083109311031113112311331143115311631173118311931203121312231233124312531263127312831293130313131323133313431353136313731383139314031413142314331443145314631473148314931503151315231533154315531563157315831593160316131623163316431653166316731683169317031713172317331743175317631773178317931803181318231833184318531863187318831893190319131923193319431953196319731983199320032013202320332043205320632073208320932103211321232133214321532163217321832193220322132223223322432253226322732283229323032313232323332343235323632373238323932403241324232433244324532463247324832493250325132523253325432553256325732583259326032613262326332643265326632673268326932703271327232733274327532763277327832793280328132823283328432853286328732883289329032913292329332943295329632973298329933003301330233033304330533063307330833093310331133123313331433153316331733183319332033213322332333243325332633273328332933303331333233333334333533363337333833393340334133423343334433453346334733483349335033513352335333543355335633573358335933603361336233633364336533663367336833693370337133723373337433753376337733783379338033813382338333843385338633873388338933903391339233933394339533963397339833993400340134023403340434053406340734083409341034113412341334143415341634173418341934203421342234233424342534263427342834293430343134323433343434353436343734383439344034413442344334443445344634473448344934503451345234533454345534563457345834593460346134623463346434653466346734683469347034713472347334743475347634773478347934803481348234833484348534863487348834893490349134923493349434953496349734983499350035013502350335043505350635073508350935103511351235133514351535163517351835193520352135223523352435253526352735283529353035313532353335343535353635373538353935403541354235433544354535463547354835493550355135523553355435553556355735583559356035613562356335643565356635673568356935703571357235733574357535763577357835793580358135823583358435853586358735883589359035913592359335943595359635973598359936003601360236033604360536063607360836093610361136123613361436153616361736183619362036213622362336243625362636273628362936303631363236333634363536363637363836393640364136423643364436453646364736483649365036513652365336543655365636573658365936603661366236633664366536663667366836693670367136723673367436753676367736783679368036813682368336843685368636873688368936903691369236933694369536963697369836993700370137023703370437053706370737083709371037113712371337143715371637173718371937203721372237233724372537263727372837293730373137323733373437353736373737383739374037413742374337443745374637473748374937503751375237533754375537563757375837593760376137623763376437653766376737683769377037713772377337743775377637773778377937803781378237833784378537863787378837893790379137923793379437953796379737983799380038013802380338043805380638073808380938103811381238133814381538163817381838193820382138223823382438253826382738283829383038313832383338343835383638373838383938403841384238433844384538463847384838493850385138523853385438553856385738583859386038613862386338643865386638673868386938703871387238733874387538763877387838793880388138823883388438853886388738883889389038913892389338943895389638973898389939003901390239033904390539063907390839093910391139123913391439153916391739183919392039213922392339243925392639273928392939303931393239333934393539363937393839393940394139423943394439453946394739483949395039513952395339543955395639573958395939603961396239633964396539663967396839693970397139723973397439753976397739783979398039813982398339843985398639873988398939903991399239933994399539963997399839994000400140024003400440054006400740084009401040114012401340144015401640174018401940204021402240234024402540264027402840294030403140324033403440354036403740384039404040414042404340444045404640474048404940504051405240534054405540564057405840594060406140624063406440654066406740684069407040714072407340744075407640774078407940804081408240834084408540864087408840894090409140924093409440954096409740984099410041014102410341044105410641074108410941104111411241134114411541164117411841194120412141224123412441254126412741284129413041314132413341344135413641374138413941404141414241434144414541464147414841494150415141524153415441554156415741584159416041614162416341644165416641674168416941704171417241734174417541764177417841794180418141824183418441854186418741884189419041914192419341944195419641974198419942004201420242034204420542064207420842094210421142124213421442154216421742184219422042214222422342244225422642274228422942304231423242334234423542364237423842394240424142424243424442454246424742484249425042514252425342544255425642574258425942604261426242634264426542664267426842694270427142724273427442754276427742784279428042814282428342844285428642874288428942904291429242934294429542964297429842994300430143024303430443054306430743084309431043114312431343144315431643174318431943204321432243234324432543264327432843294330433143324333433443354336433743384339434043414342434343444345434643474348434943504351435243534354435543564357435843594360436143624363436443654366436743684369437043714372437343744375437643774378437943804381438243834384438543864387438843894390439143924393439443954396439743984399440044014402440344044405440644074408440944104411441244134414441544164417441844194420442144224423442444254426442744284429443044314432443344344435443644374438443944404441444244434444444544464447444844494450445144524453445444554456445744584459446044614462446344644465446644674468446944704471447244734474447544764477447844794480448144824483448444854486448744884489449044914492449344944495449644974498449945004501450245034504450545064507450845094510451145124513451445154516451745184519452045214522452345244525452645274528452945304531453245334534453545364537453845394540454145424543454445454546454745484549455045514552455345544555455645574558455945604561456245634564456545664567456845694570457145724573457445754576457745784579458045814582458345844585458645874588458945904591459245934594459545964597459845994600460146024603460446054606460746084609461046114612461346144615461646174618461946204621462246234624462546264627462846294630463146324633463446354636463746384639464046414642464346444645464646474648464946504651465246534654465546564657465846594660466146624663466446654666466746684669467046714672467346744675467646774678467946804681468246834684468546864687468846894690469146924693469446954696469746984699470047014702470347044705470647074708470947104711471247134714471547164717471847194720472147224723472447254726472747284729473047314732473347344735473647374738473947404741474247434744474547464747474847494750475147524753475447554756475747584759476047614762476347644765476647674768476947704771477247734774477547764777477847794780478147824783478447854786478747884789479047914792479347944795479647974798479948004801480248034804480548064807480848094810481148124813481448154816481748184819482048214822482348244825482648274828482948304831483248334834483548364837483848394840484148424843484448454846484748484849485048514852485348544855485648574858485948604861486248634864486548664867486848694870487148724873487448754876487748784879488048814882488348844885488648874888488948904891489248934894489548964897489848994900490149024903490449054906490749084909491049114912491349144915491649174918491949204921492249234924492549264927492849294930493149324933493449354936493749384939494049414942494349444945494649474948494949504951495249534954495549564957495849594960496149624963496449654966496749684969497049714972497349744975497649774978497949804981498249834984498549864987498849894990499149924993499449954996499749984999500050015002500350045005500650075008500950105011501250135014501550165017501850195020502150225023502450255026502750285029503050315032503350345035503650375038503950405041504250435044504550465047504850495050505150525053505450555056505750585059506050615062506350645065506650675068506950705071507250735074507550765077507850795080508150825083508450855086508750885089509050915092509350945095509650975098509951005101510251035104510551065107510851095110511151125113511451155116511751185119512051215122512351245125512651275128512951305131513251335134513551365137513851395140514151425143514451455146514751485149515051515152515351545155515651575158515951605161516251635164516551665167516851695170517151725173517451755176517751785179518051815182518351845185518651875188518951905191519251935194519551965197519851995200520152025203520452055206520752085209521052115212521352145215521652175218521952205221522252235224
  1. !
  2. #define TRACEBACK write (gol,'("in ",a," (",a,", line",i5,")")') rname, __FILE__, __LINE__; call goErr
  3. #define IF_NOTOK_RETURN(action) if (status/=0) then; TRACEBACK; action; return; end if
  4. #define IF_ERROR_RETURN(action) if (status> 0) then; TRACEBACK; action; return; end if
  5. !
  6. #define IF_NOTOK_MPI(action) if (ierr/=MPI_SUCCESS) then; TRACEBACK; action; return; end if
  7. !
  8. #include "tm5.inc"
  9. !
  10. !----------------------------------------------------------------------------
  11. ! TM5 !
  12. !----------------------------------------------------------------------------
  13. !BOP
  14. !
  15. ! !MODULE: DOMAIN_DECOMP
  16. !
  17. ! !DESCRIPTION: Define a distributed grid object and its methods.
  18. ! Horizontal grid is decomposed across longitudes and latitudes.
  19. !\\
  20. !\\
  21. ! !INTERFACE:
  22. !
  23. MODULE DOMAIN_DECOMP
  24. !
  25. ! !USES:
  26. !
  27. use grid, only : TllGridInfo ! Type with Lon/Lat Grid Info
  28. use Go, only : goErr, goPr, gol ! go = general objects
  29. use dims, only : okdebug
  30. use partools ! to include mpif.h, ierr, localComm,...
  31. IMPLICIT NONE
  32. PRIVATE
  33. !
  34. ! !PUBLIC MEMBER FUNCTIONS:
  35. !
  36. public :: Init_DistGrid, Done_DistGrid ! life cycle routines
  37. public :: Get_DistGrid ! get bounds & grids
  38. public :: Print_DistGrid ! print utility (debug)
  39. public :: GATHER, SCATTER, UPDATE_HALO ! communication routines
  40. public :: GATHER_I_BAND, GATHER_J_BAND ! communication routines
  41. public :: SCATTER_I_BAND, SCATTER_J_BAND! for distributed slices
  42. public :: UPDATE_HALO_JBAND !
  43. public :: UPDATE_HALO_IBAND !
  44. public :: REDUCE ! mimic MPI_REDUCE / MPI_ALLREDUCE
  45. public :: DIST_ARR_STAT ! basic statitics of a distributed array
  46. public :: testcomm ! test communication (debug)
  47. !
  48. ! !PUBLIC TYPES:
  49. !
  50. TYPE, PUBLIC :: DIST_GRID
  51. PRIVATE
  52. ! parameters for global domain
  53. integer :: im_region ! number of longitudes
  54. integer :: jm_region ! number of latitudes
  55. ! parameters for local domain
  56. integer :: i_strt ! begin local domain longitude index
  57. integer :: i_stop ! end local domain longitude index
  58. integer :: i_strt_halo ! begin halo longitude index
  59. integer :: i_stop_halo ! end halo longitude index
  60. integer :: j_strt ! begin local domain latitude index
  61. integer :: j_stop ! end local domain latitude index
  62. integer :: j_strt_halo ! begin halo latitude index
  63. integer :: j_stop_halo ! end halo latitude index
  64. type(TllGridInfo) :: lli ! local Lat-Lon grid info
  65. type(TllGridInfo) :: glbl_lli ! global Lat-Lon grid info
  66. type(TllGridInfo) :: lli_z ! global meridional grid info
  67. integer :: neighbors(4) ! rank of (west, north, east, south) processes
  68. logical :: has_south_pole ! south pole is in local domain
  69. logical :: has_north_pole ! north pole is in local domain
  70. logical :: has_south_boundary ! south boundary is in local domain
  71. logical :: has_north_boundary ! north boundary is in local domain
  72. logical :: has_west_boundary ! west boundary is in local domain
  73. logical :: has_east_boundary ! east boundary is in local domain
  74. logical :: is_periodic ! covers [-180, 180]
  75. ! i_start, i_stop, j_start, j_stop of all PEs: (4,npes)
  76. integer, pointer :: bounds(:,:)
  77. END TYPE DIST_GRID
  78. !
  79. ! !PRIVATE DATA MEMBERS:
  80. !
  81. character(len=*), parameter :: mname='Domain_Decomp_MOD_'
  82. !
  83. ! !INTERFACE:
  84. !
  85. INTERFACE Init_DistGrid
  86. MODULE PROCEDURE INIT_DISTGRID_FROM_REGION
  87. MODULE PROCEDURE INIT_DISTGRID_FROM_LLI
  88. END INTERFACE
  89. INTERFACE Update_halo ! for arrays distributed accross both I and J (1st and 2nd dim)
  90. MODULE PROCEDURE Update_halo_2d_r
  91. MODULE PROCEDURE Update_halo_3d_r
  92. MODULE PROCEDURE Update_halo_4d_r
  93. MODULE PROCEDURE Update_halo_2d_i
  94. END INTERFACE
  95. INTERFACE Update_halo_jband ! for arrays distributed accross I (1st dim)
  96. MODULE PROCEDURE Update_halo_jband_1d_r
  97. MODULE PROCEDURE Update_halo_jband_2d_r
  98. MODULE PROCEDURE Update_halo_jband_3d_r
  99. MODULE PROCEDURE Update_halo_jband_4d_r
  100. END INTERFACE
  101. INTERFACE Update_halo_iband ! for arrays distributed accross J (1st dim)
  102. MODULE PROCEDURE Update_halo_iband_1d_r
  103. END INTERFACE
  104. INTERFACE Gather
  105. MODULE PROCEDURE gather_2d_i
  106. MODULE PROCEDURE gather_2d_r
  107. MODULE PROCEDURE gather_3d_r
  108. MODULE PROCEDURE gather_4d_r
  109. MODULE PROCEDURE gather_5d_r
  110. END INTERFACE
  111. INTERFACE Scatter
  112. MODULE PROCEDURE scatter_2d_r
  113. MODULE PROCEDURE scatter_3d_r
  114. MODULE PROCEDURE scatter_4d_r
  115. END INTERFACE
  116. INTERFACE Gather_I_Band
  117. MODULE PROCEDURE gather_iband_1d_r
  118. ! MODULE PROCEDURE gather_iband_2d_r
  119. MODULE PROCEDURE gather_iband_3d_r
  120. END INTERFACE
  121. INTERFACE Gather_J_Band
  122. ! MODULE PROCEDURE gather_jband_1d_r
  123. MODULE PROCEDURE gather_jband_2d_r
  124. MODULE PROCEDURE gather_jband_3d_r
  125. MODULE PROCEDURE gather_jband_4d_r
  126. END INTERFACE
  127. INTERFACE Scatter_I_Band
  128. MODULE PROCEDURE scatter_iband_1d_r
  129. MODULE PROCEDURE scatter_iband_2d_r
  130. END INTERFACE
  131. INTERFACE Scatter_J_Band
  132. MODULE PROCEDURE scatter_jband_1d_r
  133. MODULE PROCEDURE scatter_jband_2d_r
  134. MODULE PROCEDURE scatter_jband_3d_r
  135. MODULE PROCEDURE scatter_jband_4d_r
  136. END INTERFACE
  137. INTERFACE Reduce
  138. MODULE PROCEDURE reduce_2d_r
  139. MODULE PROCEDURE reduce_3d_r
  140. MODULE PROCEDURE reduce_4d_r
  141. END INTERFACE
  142. INTERFACE Dist_Arr_Stat
  143. MODULE PROCEDURE dist_arr_stat_2d_r
  144. END INTERFACE
  145. !
  146. ! !REVISION HISTORY:
  147. ! 01 Nov 2011 - P. Le Sager - v0
  148. ! 13 Feb 2013 - P. Le Sager - Remove deprecated MPI_GET_EXTENT and
  149. ! MPI_TYPE_HVECTOR.
  150. ! - Extra garbage clean.
  151. ! 21 Jun 2013 - P. Le Sager - Replace all MPI_SEND with MPI_SSEND in all
  152. ! scatter routines to avoid buffering.
  153. !
  154. ! !REMARKS:
  155. ! (1) There is two categories of subroutines here : one to define the
  156. ! distributed grid objects, the other for communication b/w processes
  157. ! (update_halo for one-to-one comm, and scatter/gather/reduce for
  158. ! collective comm)
  159. ! Communications routines are for data DECOMPOSED on the domain. For
  160. ! more general comm, see partools module.
  161. !
  162. ! (2) GATHER & SCATTER :
  163. ! - global arrays have to really be global on root only, and can be
  164. ! (1,1,..) on other processes.
  165. ! - global arrays are without halo.
  166. ! - if not using MPI, gather and scatter just copy arrays, so it may be
  167. ! better to not call them to save memory in that case.
  168. !
  169. ! (3) Be careful when passing a slice (or a pointer to a slice) as argument:
  170. !
  171. ! Passing a subarray will *NOT* work most of the time, unless it is a
  172. ! slice on the last dimension(s). This will give erroneous results:
  173. !
  174. ! allocate( gbl_arr(-3:imr, 1:jmr ))
  175. ! call gather( dgrid, local_arr, gbl_arr(1:imr,:), halo, status)
  176. !
  177. ! but passing full size array will work:
  178. !
  179. ! allocate( gbl_arr(-3:imr, 1:jmr ))
  180. ! allocate( temp(1:imr,1:jmr) )
  181. ! call gather( dgrid, local_arr, temp, halo, status)
  182. ! gbl_arr(1:imr,:) = temp
  183. !
  184. ! (4) Scatter[Gather]_I[J]_band are communications between a row or column
  185. ! of processors and the root processor (optionally the row_root in few
  186. ! cases).
  187. !EOP
  188. !------------------------------------------------------------------------
  189. CONTAINS
  190. !--------------------------------------------------------------------------
  191. ! TM5 !
  192. !--------------------------------------------------------------------------
  193. !BOP
  194. !
  195. ! !IROUTINE: DISTGRID_RANGE
  196. !
  197. ! !DESCRIPTION: Give range of indices covered by rank when using nprocs.
  198. ! This is used for one dimension, and thus called twice.
  199. ! Distribution is done evenly. Eg: it will distribute 11 boxes
  200. ! across 3 processes as 4,4,3 on each pe.
  201. !\\
  202. !\\
  203. ! !INTERFACE:
  204. !
  205. SUBROUTINE DISTGRID_RANGE(ij, rank, nprocs, istart, iend)
  206. !
  207. ! !INPUT PARAMETERS:
  208. !
  209. integer, intent(in) :: ij ! defines range (1,..,ij) to be distributed
  210. integer, intent(in) :: rank ! current process, in (0,.., nprocs-1)
  211. integer, intent(in) :: nprocs ! total # of processes
  212. !
  213. ! !OUTPUT PARAMETERS:
  214. !
  215. integer, intent(out):: istart, iend ! index range covered by rank
  216. !
  217. ! !REVISION HISTORY:
  218. ! 01 Nov 2011 - P. Le Sager - v0
  219. !
  220. !EOP
  221. !------------------------------------------------------------------------
  222. !BOC
  223. integer :: iwork1, iwork2
  224. iwork1 = ij/nprocs
  225. iwork2 = mod(ij,nprocs)
  226. istart = rank * iwork1 + 1 + min(rank, iwork2)
  227. iend = istart + iwork1 - 1
  228. if (iwork2 > rank) iend = iend + 1
  229. END SUBROUTINE DISTGRID_RANGE
  230. !EOC
  231. !--------------------------------------------------------------------------
  232. ! TM5 !
  233. !--------------------------------------------------------------------------
  234. !BOP
  235. !
  236. ! !IROUTINE: INIT_DISTGRID_FROM_REGION
  237. !
  238. ! !DESCRIPTION: initialize a distributed grid object for a TM5 region
  239. !\\
  240. !\\
  241. ! !INTERFACE:
  242. !
  243. SUBROUTINE INIT_DISTGRID_FROM_REGION( DistGrid, region, rank, nplon, nplat, halo, status)
  244. !
  245. ! !USES:
  246. !
  247. use grid, only : init
  248. use dims, only : xbeg, xend, dx, im, xcyc
  249. use dims, only : ybeg, yend, dy, jm, touch_np, touch_sp
  250. !
  251. ! !RETURN VALUE:
  252. !
  253. type(dist_grid), intent(inout) :: DistGrid
  254. !
  255. ! !INPUT PARAMETERS:
  256. !
  257. integer, intent(in) :: region ! TM5 region number
  258. integer, intent(in) :: rank ! current process in (0,.., nprocs-1)
  259. integer, intent(in) :: nplon ! number of processes along longitude
  260. integer, intent(in) :: nplat ! number of processes along latitude
  261. integer, intent(in), optional :: halo ! halo size (default=0)
  262. !
  263. ! !OUTPUT PARAMETERS:
  264. !
  265. integer, intent(out) :: status
  266. !
  267. ! !REVISION HISTORY:
  268. ! 01 Nov 2011 - P. Le Sager - v0
  269. !
  270. ! !REMARKS:
  271. !
  272. ! (1) Ideally: to call with WIDTH=<halo used in the code>... but not the
  273. ! same halo is used for all data sets. Then, in many subroutines, halo has
  274. ! to be independent of the halo carried by the distributed grid. We could
  275. ! simplify things by using a halo of 2 for all distributed data, or
  276. ! disregard halo in the dist_grid, but we just keep it general (as is).
  277. !
  278. ! (2) npe_lat/lon are also available thru partools, but may not have been
  279. ! initialized yet, so we keep the nplon/nplat inputs here. Could do
  280. ! without though...
  281. !
  282. !EOP
  283. !------------------------------------------------------------------------
  284. !BOC
  285. character(len=*), parameter :: rname = mname//'init_distgrid'
  286. integer :: start, end, iwidth, i, iml, jml, iwork(2), lshape(4)
  287. real :: yb, xb, dlon, dlat
  288. character(len=39) :: name
  289. ! for pretty print
  290. integer, parameter :: maxrow=5
  291. integer, parameter :: maxcol=5
  292. integer :: i1, j1, j2, k, work
  293. character(len=3) :: str1
  294. ! for virtual topology
  295. integer, allocatable :: dist_map(:,:) ! 2D process topology (np_lon, np_lat)
  296. integer, allocatable :: dist_mapw(:,:) ! 2D process topology with neighbors (np_lon+1, np_lat+1)
  297. integer :: rank_lon ! index of current process in azimuthal set (1,..,np_lon)
  298. integer :: rank_lat ! index of current process in meridional set (1,..,np_lat)
  299. !---------------------------------------------
  300. ! 2D process topology, and mapping 1D <-> 2D
  301. !---------------------------------------------
  302. ! besides the periodicity, topology is independent of the region
  303. allocate(dist_map(nplon, nplat))
  304. npes = nplat*nplon
  305. dist_map = reshape( (/ (i,i=0,npes-1) /), shape=(/ nplon, nplat /))
  306. ! store
  307. iwork = maxloc(dist_map, mask=dist_map.eq.rank)
  308. rank_lon = iwork(1) ! in [1..nplon]
  309. rank_lat = iwork(2) ! in [1..nplat]
  310. ! Neighbors = 2d process map with extra neighbors
  311. allocate(dist_mapw(0:nplon+1, 0:nplat+1))
  312. dist_mapw = MPI_PROC_NULL
  313. dist_mapw(1:nplon,1:nplat) = dist_map
  314. ! East-West periodicity
  315. DistGrid%is_periodic =.false.
  316. if (xcyc(region)==1) then
  317. dist_mapw( 0,1:nplat) = dist_map(nplon,:)
  318. dist_mapw(nplon+1,1:nplat) = dist_map(1,:)
  319. DistGrid%is_periodic =.true.
  320. end if
  321. DistGrid%neighbors = (/ dist_mapw(rank_lon-1, rank_lat ), & ! west
  322. dist_mapw(rank_lon, rank_lat+1), & ! north
  323. dist_mapw(rank_lon+1, rank_lat ), & ! east
  324. dist_mapw(rank_lon, rank_lat-1) /) ! south
  325. !---------------------------------------------
  326. ! fill in distributed grid info
  327. !---------------------------------------------
  328. ! region boundaries
  329. DistGrid%has_south_boundary = (rank_lat == 1 )
  330. DistGrid%has_north_boundary = (rank_lat == nplat)
  331. DistGrid%has_west_boundary = (rank_lon == 1 )
  332. DistGrid%has_east_boundary = (rank_lon == nplon)
  333. ! poles
  334. DistGrid%has_south_pole = DistGrid%has_south_boundary .and. (touch_sp(region) == 1)
  335. DistGrid%has_north_pole = DistGrid%has_north_boundary .and. (touch_np(region) == 1)
  336. ! max halo that will be used in the code
  337. iwidth=0
  338. if (present(halo)) iwidth=halo
  339. ! index ranges covered by current process
  340. call DistGrid_range(im(region), rank_lon-1, nplon, start, end)
  341. ! check we are within the limit, i.e. we must have #boxes >= halo and at least 1.
  342. if ( (end-start+1) < max(1,iwidth) ) then
  343. write(gol,*)" Too much processors in X (longitude) direction! ", nplon, iwidth, start, end
  344. call goErr
  345. status=1; TRACEBACK; return
  346. end if
  347. DistGrid%im_region = im(region)
  348. DistGrid%i_strt = start
  349. DistGrid%i_stop = end
  350. DistGrid%i_strt_halo = start-iwidth
  351. DistGrid%i_stop_halo = end+iwidth
  352. ! To think about it when/if needed:
  353. ! if (DistGrid%has_north_pole) DistGrid%i_stop_halo = end
  354. ! if (DistGrid%has_south_pole) DistGrid%i_strt_halo = start
  355. call DistGrid_range(jm(region), rank_lat-1, nplat, start, end)
  356. if ( (end-start+1) < max(1,iwidth) ) then
  357. write(gol,*)" Too much processors in Y (latitude) direction! ", nplat, iwidth, start, end
  358. call goErr
  359. status=1; TRACEBACK; return
  360. end if
  361. DistGrid%jm_region = jm(region)
  362. DistGrid%j_strt = start
  363. DistGrid%j_stop = end
  364. DistGrid%j_strt_halo = start-iwidth
  365. DistGrid%j_stop_halo = end+iwidth
  366. #ifdef parallel_cplng
  367. if ( modulo(im(region),nplon) /= 0) then
  368. write(gol,'("number of X processors ",i3," does not divide evenly the number of grid boxes",i3)') &
  369. nplon, im(region) ; call goErr
  370. status=1; TRACEBACK; return
  371. endif
  372. if ( modulo(jm(region),nplat) /= 0) then
  373. write(gol,'("number of Y processors ",i3," does not divide evenly the number of grid boxes ",i3)') &
  374. nplat, jm(region) ; call goErr
  375. status=1; TRACEBACK; return
  376. endif
  377. #endif
  378. !---------------------------------------------
  379. ! geophysical grids
  380. !---------------------------------------------
  381. ! grid spacing:
  382. dlon = real(xend(region)-xbeg(region))/im(region)
  383. dlat = real(yend(region)-ybeg(region))/jm(region)
  384. !------ local
  385. iml = DistGrid%i_stop - DistGrid%i_strt + 1
  386. jml = DistGrid%j_stop - DistGrid%j_strt + 1
  387. xb = xbeg(region) + ( DistGrid%i_strt - 0.5 ) * dlon
  388. yb = ybeg(region) + ( DistGrid%j_strt - 0.5 ) * dlat
  389. write(name, '("distributed grid on process ", i5.5)') rank
  390. call INIT( DistGrid%lli, xb, dlon, iml, yb, dlat, jml, status, name=name )
  391. IF_NOTOK_RETURN(status=1)
  392. !------ global
  393. xb = xbeg(region) + 0.5 * dlon
  394. yb = ybeg(region) + 0.5 * dlat
  395. write(name, '("global grid on process ", i5.5)') rank
  396. call INIT( DistGrid%glbl_lli, xb, dlon, im(region), yb, dlat, jm(region), status, name=name )
  397. IF_NOTOK_RETURN(status=1)
  398. !------ global meridional grid
  399. name = "merid " // trim(name)
  400. call INIT( DistGrid%lli_z, 0.0, 360., 1, yb, dlat, jm(region), status, name=name )
  401. IF_NOTOK_RETURN(status=1)
  402. !---------------------------------------------
  403. ! store shapes info of all PE on all PE (used only on root so far, but may
  404. ! become useful if we gather/scatter to/from non-root in the future)
  405. !---------------------------------------------
  406. #ifdef MPI
  407. allocate(DistGrid%bounds(4,0:npes-1))
  408. lshape = (/ DistGrid%i_strt, DistGrid%i_stop, DistGrid%j_strt, DistGrid%j_stop /)
  409. call MPI_ALLGATHER(lshape, 4, MPI_INTEGER, DistGrid%bounds, 4, MPI_INTEGER, localComm, ierr)
  410. #endif
  411. !---------------------------------------------
  412. ! PRINT (Debug) 2D process topology
  413. !---------------------------------------------
  414. if(okdebug)then
  415. write(gol,*) '------------- world view ----------------------' ; call goPr
  416. write(gol, '("process #", i5, " out of ", i5)') myid, npes ; call goPr
  417. write(gol, '("2D rank = [",i4,", ",i4,"]")') rank_lon, rank_lat ; call goPr
  418. i1=min(maxcol,nplon)
  419. str1='...'
  420. if (i1==nplon) str1=''
  421. work=nplat/2
  422. j1=min(work, maxrow)
  423. j2=max(nplat-maxrow+1, work+1)
  424. do k=nplat,j2,-1
  425. write(gol,*) dist_map(1:i1,k),str1 ; call goPr
  426. enddo
  427. if ((j2-1) /= j1) write(gol,*)" ..." ; call goPr
  428. do k=j1,1,-1
  429. write(gol,*) dist_map(1:i1,k),str1 ; call goPr
  430. enddo
  431. write(gol,*) ''
  432. write(gol,*) '-----------------------------------------------' ; call goPr
  433. end if
  434. ! Done
  435. deallocate(dist_map)
  436. deallocate(dist_mapw)
  437. status = 0
  438. END SUBROUTINE INIT_DISTGRID_FROM_REGION
  439. !EOC
  440. !--------------------------------------------------------------------------
  441. ! TM5 !
  442. !--------------------------------------------------------------------------
  443. !BOP
  444. !
  445. ! !IROUTINE: INIT_DISTGRID_FROM_LLI
  446. !
  447. ! !DESCRIPTION: initialize a distributed grid object from a lli object
  448. !\\
  449. !\\
  450. ! !INTERFACE:
  451. !
  452. SUBROUTINE INIT_DISTGRID_FROM_LLI( DistGrid, lli, rank, halo, status)
  453. !
  454. ! !USES:
  455. !
  456. use grid, only : init, assignment(=)
  457. !
  458. ! !INPUT/OUTPUT PARAMETERS:
  459. !
  460. type(dist_grid), intent(inout) :: DistGrid
  461. !
  462. ! !INPUT PARAMETERS:
  463. !
  464. type(TllGridInfo), intent(in) :: lli
  465. integer, intent(in) :: rank ! current process in (0,.., nprocs-1)
  466. integer, optional, intent(in) :: halo ! halo size (default=0)
  467. !
  468. ! !OUTPUT PARAMETERS:
  469. !
  470. integer, intent(out) :: status
  471. !
  472. ! !REVISION HISTORY:
  473. ! 18 Nov 2013 - Ph. Le Sager - v0, adapted from init_distgrid_from_region
  474. !
  475. ! !REMARKS:
  476. !
  477. ! (1) See INIT_DISTGRID_FROM_REGION for comment about halo input.
  478. ! (2) Uses npe_lat/lon from partools module : TM5_MPI_Init2 must have been
  479. ! called before.
  480. !EOP
  481. !------------------------------------------------------------------------
  482. !BOC
  483. character(len=*), parameter :: rname = mname//'init_distgrid_from_lli'
  484. integer :: start, end, iwidth, i, iml, jml, iwork(2), lshape(4)
  485. real :: yb, xb, dlon, dlat
  486. character(len=39) :: name
  487. integer :: nplon ! number of processes along longitude
  488. integer :: nplat ! number of processes along latitude
  489. ! for pretty print
  490. integer, parameter :: maxrow=5
  491. integer, parameter :: maxcol=5
  492. integer :: i1, j1, j2, k, work
  493. character(len=3) :: str1
  494. ! for virtual topology
  495. integer, allocatable :: dist_map(:,:) ! 2D process topology (np_lon, np_lat)
  496. integer, allocatable :: dist_mapw(:,:) ! 2D process topology with neighbors (np_lon+1, np_lat+1)
  497. integer :: rank_lon ! index of current process in azimuthal set (1,..,np_lon)
  498. integer :: rank_lat ! index of current process in meridional set (1,..,np_lat)
  499. !---------------------------------------------
  500. ! test for valid input
  501. !---------------------------------------------
  502. if (.not.(associated( lli%lon_deg ))) then
  503. write(gol,*)" The LLI object has not been initialized. " ; call goErr
  504. write(gol,*)" A distributed grid for MPI cannot be build." ; call goErr
  505. status=1; TRACEBACK; return
  506. end if
  507. nplat = npe_lat
  508. nplon = npe_lon
  509. !---------------------------------------------
  510. ! 2D process topology, and mapping 1D <-> 2D
  511. !---------------------------------------------
  512. ! besides the periodicity, topology is independent of the region
  513. allocate(dist_map(nplon, nplat))
  514. npes = nplat*nplon
  515. dist_map = reshape( (/ (i,i=0,npes-1) /), shape=(/ nplon, nplat /))
  516. ! store
  517. iwork = maxloc(dist_map, mask=dist_map.eq.rank)
  518. rank_lon = iwork(1) ! in [1..nplon]
  519. rank_lat = iwork(2) ! in [1..nplat]
  520. ! Neighbors = 2d process map with extra neighbors
  521. allocate(dist_mapw(0:nplon+1, 0:nplat+1))
  522. dist_mapw = MPI_PROC_NULL
  523. dist_mapw(1:nplon,1:nplat) = dist_map
  524. ! East-West periodicity
  525. DistGrid%is_periodic =.false.
  526. if ( lli%dlon_deg*lli%im == 360. ) then
  527. dist_mapw( 0,1:nplat) = dist_map(nplon,:)
  528. dist_mapw(nplon+1,1:nplat) = dist_map(1,:)
  529. DistGrid%is_periodic =.true.
  530. end if
  531. DistGrid%neighbors = (/ dist_mapw(rank_lon-1, rank_lat ), & ! west
  532. dist_mapw(rank_lon, rank_lat+1), & ! north
  533. dist_mapw(rank_lon+1, rank_lat ), & ! east
  534. dist_mapw(rank_lon, rank_lat-1) /) ! south
  535. !---------------------------------------------
  536. ! fill in distributed grid info
  537. !---------------------------------------------
  538. ! region boundaries
  539. DistGrid%has_south_boundary = (rank_lat == 1 )
  540. DistGrid%has_north_boundary = (rank_lat == nplat)
  541. DistGrid%has_west_boundary = (rank_lon == 1 )
  542. DistGrid%has_east_boundary = (rank_lon == nplon)
  543. ! poles
  544. DistGrid%has_south_pole = DistGrid%has_south_boundary .and. (lli%blat_deg(0) == -90.0)
  545. DistGrid%has_north_pole = DistGrid%has_north_boundary .and. (lli%blat_deg(lli%jm) == 90.0)
  546. ! max halo that will be used in the code
  547. iwidth=0
  548. if (present(halo)) iwidth=halo
  549. ! index ranges covered by current process
  550. call DistGrid_range(lli%im, rank_lon-1, nplon, start, end)
  551. ! check we are within the limit, i.e. we must have #boxes >= halo and at least 1.
  552. if ( (end-start+1) < max(1,iwidth) ) then
  553. write(gol,*)" Too much processors in X (longitude) direction:", nplon, iwidth, start, end
  554. call goErr
  555. status=1; TRACEBACK; return
  556. end if
  557. DistGrid%im_region = lli%im
  558. DistGrid%i_strt = start
  559. DistGrid%i_stop = end
  560. DistGrid%i_strt_halo = start-iwidth
  561. DistGrid%i_stop_halo = end+iwidth
  562. ! To think about it when/if needed:
  563. ! if (DistGrid%has_north_pole) DistGrid%i_stop_halo = end
  564. ! if (DistGrid%has_south_pole) DistGrid%i_strt_halo = start
  565. call DistGrid_range(lli%jm, rank_lat-1, nplat, start, end)
  566. if ( (end-start+1) < max(1,iwidth) ) then
  567. write(gol,*)" Too much processors in Y (latitude) direction:", nplat, iwidth, start, end
  568. call goErr
  569. status=1; TRACEBACK; return
  570. end if
  571. DistGrid%jm_region = lli%jm
  572. DistGrid%j_strt = start
  573. DistGrid%j_stop = end
  574. DistGrid%j_strt_halo = start-iwidth
  575. DistGrid%j_stop_halo = end+iwidth
  576. !---------------------------------------------
  577. ! geophysical grids
  578. !---------------------------------------------
  579. !------ tile
  580. iml = DistGrid%i_stop - DistGrid%i_strt + 1
  581. jml = DistGrid%j_stop - DistGrid%j_strt + 1
  582. xb = lli%lon_deg(1) + ( DistGrid%i_strt - 1 ) * lli%dlon_deg
  583. yb = lli%lat_deg(1) + ( DistGrid%j_strt - 1 ) * lli%dlat_deg
  584. write(name, '("distributed grid on process ", i5.5)') rank
  585. call INIT( DistGrid%lli, xb, lli%dlon_deg, iml, yb, lli%dlat_deg, jml, status, name=name )
  586. IF_NOTOK_RETURN(status=1)
  587. !------ world
  588. DistGrid%glbl_lli = lli
  589. !------ world meridional grid
  590. name = "merid "
  591. call INIT( DistGrid%lli_z, 0.0, 360., 1, yb, lli%dlat_deg, lli%jm, status, name=name )
  592. IF_NOTOK_RETURN(status=1)
  593. !---------------------------------------------
  594. ! store shapes info of all PE on all PE (used only on root so far, but may
  595. ! become useful if we gather/scatter to/from non-root in the future)
  596. !---------------------------------------------
  597. #ifdef MPI
  598. allocate(DistGrid%bounds(4,0:npes-1))
  599. lshape = (/ DistGrid%i_strt, DistGrid%i_stop, DistGrid%j_strt, DistGrid%j_stop /)
  600. call MPI_ALLGATHER(lshape, 4, MPI_INTEGER, DistGrid%bounds, 4, MPI_INTEGER, localComm, ierr)
  601. #endif
  602. !---------------------------------------------
  603. ! PRINT (Debug) 2D process topology
  604. !---------------------------------------------
  605. if(okdebug)then
  606. write(gol,*) '------------- world view ----------------------' ; call goPr
  607. write(gol, '("process #", i5, " out of ", i5)') myid, npes ; call goPr
  608. write(gol, '("2D rank = [",i4,", ",i4,"]")') rank_lon, rank_lat ; call goPr
  609. i1=min(maxcol,nplon)
  610. str1='...'
  611. if (i1==nplon) str1=''
  612. work=nplat/2
  613. j1=min(work, maxrow)
  614. j2=max(nplat-maxrow+1, work+1)
  615. do k=nplat,j2,-1
  616. write(gol,*) dist_map(1:i1,k),str1 ; call goPr
  617. enddo
  618. if ((j2-1) /= j1) write(gol,*)" ..." ; call goPr
  619. do k=j1,1,-1
  620. write(gol,*) dist_map(1:i1,k),str1 ; call goPr
  621. enddo
  622. write(gol,*) ''
  623. write(gol,*) '-----------------------------------------------' ; call goPr
  624. end if
  625. ! Done
  626. deallocate(dist_map)
  627. deallocate(dist_mapw)
  628. status = 0
  629. END SUBROUTINE INIT_DISTGRID_FROM_LLI
  630. !EOC
  631. !--------------------------------------------------------------------------
  632. ! TM5 !
  633. !--------------------------------------------------------------------------
  634. !BOP
  635. !
  636. ! !IROUTINE: DONE_DISTGRID
  637. !
  638. ! !DESCRIPTION: deallocate distributed grid object elements
  639. !\\
  640. !\\
  641. ! !INTERFACE:
  642. !
  643. SUBROUTINE DONE_DISTGRID( DistGrid, STATUS )
  644. !
  645. ! !USES:
  646. !
  647. use grid, only : done
  648. !
  649. ! !INPUT PARAMETERS:
  650. !
  651. type(dist_grid), intent(inout) :: DistGrid
  652. !
  653. ! !OUTPUT PARAMETERS:
  654. !
  655. integer, intent(out) :: status
  656. !
  657. ! !REVISION HISTORY:
  658. ! 01 Nov 2011 - P. Le Sager - v0
  659. !
  660. !EOP
  661. !------------------------------------------------------------------------
  662. !BOC
  663. character(len=*), parameter :: rname = mname//'Done_Distgrid'
  664. call done(DistGrid%lli, status)
  665. IF_NOTOK_RETURN(status=1)
  666. call done(DistGrid%lli_z, status)
  667. IF_NOTOK_RETURN(status=1)
  668. call done(DistGrid%glbl_lli, status)
  669. IF_NOTOK_RETURN(status=1)
  670. if (associated(DistGrid%bounds)) deallocate(DistGrid%bounds)
  671. status=0
  672. END SUBROUTINE DONE_DISTGRID
  673. !EOC
  674. !--------------------------------------------------------------------------
  675. ! TM5 !
  676. !--------------------------------------------------------------------------
  677. !BOP
  678. !
  679. ! !IROUTINE: GET_DISTGRID
  680. !
  681. ! !DESCRIPTION: provide quick access to object elements (bounds and grids),
  682. ! while keeping them private.
  683. !\\
  684. !\\
  685. ! !INTERFACE:
  686. !
  687. SUBROUTINE GET_DISTGRID(DistGrid, &
  688. i_strt, i_stop, i_strt_halo, i_stop_halo, &
  689. j_strt, j_stop, j_strt_halo, j_stop_halo, &
  690. i_wrld, j_wrld, &
  691. hasSouthPole, hasNorthPole, &
  692. hasSouthBorder, hasNorthBorder, &
  693. hasEastBorder, hasWestBorder, &
  694. lli, lli_z, global_lli )
  695. !
  696. ! !USES:
  697. !
  698. use grid, only : assignment(=) ! to copy lli
  699. !
  700. ! !INPUT PARAMETERS:
  701. !
  702. type(dist_grid), intent(in) :: DistGrid
  703. integer, optional :: i_strt, i_stop, i_strt_halo, i_stop_halo
  704. integer, optional :: j_strt, j_stop, j_strt_halo, j_stop_halo
  705. integer, optional :: i_wrld, j_wrld
  706. logical, optional :: hasSouthPole, hasNorthPole
  707. logical, optional :: hasSouthBorder, hasNorthBorder
  708. logical, optional :: hasWestBorder, hasEastBorder
  709. type(TllGridInfo), optional :: lli ! local Lat-Lon grid info
  710. type(TllGridInfo), optional :: global_lli ! global Lat-Lon grid info
  711. type(TllGridInfo), optional :: lli_z ! global zonal grid info
  712. !
  713. ! !REMARKS: You must "call DONE(lli, stat)" if you request lli, once you
  714. ! are done(!) with it, to avoid memory leak.
  715. !
  716. !EOP
  717. !------------------------------------------------------------------------
  718. !BOC
  719. if (present(i_strt)) i_strt = DistGrid%i_strt
  720. if (present(i_stop)) i_stop = DistGrid%i_stop
  721. if (present(i_strt_halo)) i_strt_halo = DistGrid%i_strt_halo
  722. if (present(i_stop_halo)) i_stop_halo = DistGrid%i_stop_halo
  723. if (present(j_strt)) j_strt = DistGrid%j_strt
  724. if (present(j_stop)) j_stop = DistGrid%j_stop
  725. if (present(j_strt_halo)) j_strt_halo = DistGrid%j_strt_halo
  726. if (present(j_stop_halo)) j_stop_halo = DistGrid%j_stop_halo
  727. if (present(i_wrld)) i_wrld = DistGrid%im_region
  728. if (present(j_wrld)) j_wrld = DistGrid%jm_region
  729. if (present(hasSouthPole)) hasSouthPole = DistGrid%has_south_pole
  730. if (present(hasNorthPole)) hasNorthPole = DistGrid%has_north_pole
  731. if (present(hasSouthBorder)) hasSouthBorder = DistGrid%has_south_boundary
  732. if (present(hasNorthBorder)) hasNorthBorder = DistGrid%has_north_boundary
  733. if (present(hasEastBorder )) hasEastBorder = DistGrid%has_east_boundary
  734. if (present(hasWestBorder )) hasWestBorder = DistGrid%has_west_boundary
  735. if (present(lli)) lli = DistGrid%lli
  736. if (present(global_lli)) global_lli = DistGrid%glbl_lli
  737. if (present(lli_z)) lli_z = DistGrid%lli_z
  738. END SUBROUTINE GET_DISTGRID
  739. !EOC
  740. #ifdef MPI /* MPI TYPES */
  741. !--------------------------------------------------------------------------
  742. ! TM5 !
  743. !--------------------------------------------------------------------------
  744. !BOP
  745. !
  746. ! !IROUTINE: GET_HALO_TYPE
  747. !
  748. ! !DESCRIPTION: Returns derived MPI types needed for halo communications,
  749. ! and the start indices for the send & receive communications.
  750. ! Four of each are returned, one for each side of the domain,
  751. ! in the following order: West, North, East, South.
  752. !\\
  753. !\\
  754. ! !INTERFACE:
  755. !
  756. SUBROUTINE GET_HALO_TYPE( DistGrid, arr_shape, halo, datatype, srtype, ijsr, status )
  757. !
  758. ! !INPUT PARAMETERS:
  759. !
  760. type(dist_grid), intent(in) :: DistGrid
  761. integer, intent(in) :: arr_shape(:) ! shape of local array
  762. integer, intent(in) :: halo ! halo size
  763. integer, intent(in) :: datatype ! basic MPI datatype
  764. !
  765. ! !OUTPUT PARAMETERS:
  766. !
  767. integer, intent(out) :: srtype(4) ! derived MPI datatypes for send/recv
  768. integer, intent(out) :: ijsr(4,4) ! start indices for send/recv
  769. integer, intent(out) :: status
  770. !
  771. ! !REVISION HISTORY:
  772. ! 01 Nov 2011 - P. Le Sager - v0
  773. !
  774. ! !REMARKS:
  775. ! (1) Not tested on all imaginable cases, but should work with any size
  776. ! GE 2, and any of the basic numerical MPI_xxx datatype
  777. !
  778. !EOP
  779. !------------------------------------------------------------------------
  780. !BOC
  781. character(len=*), parameter :: rname = mname//'get_halo_type'
  782. integer :: nsslice, weslice, nstype, wetype ! MPI derived datatypes
  783. integer :: n, m, hstride ! sizes to build datatypes
  784. integer :: ndims, sz, i0, j0, i1, j1
  785. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, vstride
  786. sz = size(arr_shape)
  787. ! collapse third and above dimensions
  788. ndims = 1
  789. if (sz > 2) ndims = product(arr_shape(3:))
  790. ! strides
  791. CALL MPI_TYPE_GET_EXTENT( datatype, lb, sizeoftype, ierr)
  792. IF_NOTOK_MPI(status=1)
  793. hstride = arr_shape(1)
  794. vstride = arr_shape(1)*arr_shape(2)*sizeoftype
  795. ! size of data slice to carry
  796. n = DistGrid%I_STOP - DistGrid%I_STRT + 1
  797. m = DistGrid%J_STOP - DistGrid%J_STRT + 1
  798. ! Type for North and South halo regions
  799. ! --------------------------------------
  800. ! halo lines of lenght N, separated by hstride
  801. call MPI_TYPE_VECTOR (halo, n, hstride, datatype, nsslice, ierr)
  802. IF_NOTOK_MPI(status=1)
  803. ! stack 3rd (and above) dimension if any
  804. if (ndims == 1) then
  805. nstype = nsslice
  806. else
  807. ! note : also works with ndims=1
  808. call MPI_TYPE_CREATE_HVECTOR (ndims, 1, vstride, nsslice, nstype, ierr)
  809. IF_NOTOK_MPI(status=1)
  810. call MPI_TYPE_FREE(nsslice, ierr)
  811. IF_NOTOK_MPI(status=1)
  812. end if
  813. call MPI_TYPE_COMMIT (nstype, ierr)
  814. IF_NOTOK_MPI(status=1)
  815. ! Type for West and East halo regions
  816. ! ------------------------------------
  817. ! M lines of lenght 'halo', separated by hstride
  818. call MPI_TYPE_VECTOR (m, halo, hstride, datatype, weslice, ierr)
  819. IF_NOTOK_MPI(status=1)
  820. ! stack 3rd (and above) dimension
  821. if (ndims == 1) then
  822. wetype = weslice
  823. else
  824. ! note : also works with ndims=1
  825. call MPI_TYPE_CREATE_HVECTOR (ndims, 1, vstride, weslice, wetype, ierr)
  826. IF_NOTOK_MPI(status=1)
  827. call MPI_TYPE_FREE(weslice, ierr)
  828. IF_NOTOK_MPI(status=1)
  829. end if
  830. call MPI_TYPE_COMMIT (wetype, ierr)
  831. IF_NOTOK_MPI(status=1)
  832. ! Buffers anchors
  833. ! ----------------
  834. ! Assuming that neighbors are stored in (West, North, East, South) order
  835. i0 = DistGrid%i_strt
  836. j0 = DistGrid%j_strt
  837. i1 = DistGrid%i_stop
  838. j1 = DistGrid%j_stop
  839. srtype = (/ wetype, nstype, wetype, nstype /)
  840. ijsr(:,1) = (/ i0, i0, i1+1-halo, i0 /) ! i-start location of buffer to send
  841. ijsr(:,2) = (/ j0, j1-halo+1, j0, j0 /) ! j-start location of buffer to send
  842. ijsr(:,3) = (/ i0-halo, i0, i1+1, i0 /) ! i-start location of buffer to receive
  843. ijsr(:,4) = (/ j0, j1+1, j0, j0-halo /) ! j-start location of buffer to receive
  844. status=0
  845. END SUBROUTINE GET_HALO_TYPE
  846. !EOC
  847. !--------------------------------------------------------------------------
  848. ! TM5 !
  849. !--------------------------------------------------------------------------
  850. !BOP
  851. !
  852. ! !IROUTINE: GET_INTERIOR_TYPE
  853. !
  854. ! !DESCRIPTION: Returns derived MPI types that describe the interior domains
  855. ! needed for collective communications.
  856. !\\
  857. !\\
  858. ! !INTERFACE:
  859. !
  860. SUBROUTINE GET_INTERIOR_TYPE( DistGrid, shp, datatype, linterior, ginterior, status )
  861. !
  862. ! !INPUT PARAMETERS:
  863. !
  864. type(dist_grid), intent(in) :: DistGrid
  865. integer, intent(in) :: shp(:) ! shape of local array
  866. integer, intent(in) :: datatype ! basic MPI datatype
  867. !
  868. ! !OUTPUT PARAMETERS:
  869. !
  870. ! derived MPI datatypes describing distributed interior domains:
  871. integer, intent(out) :: ginterior(npes-1) ! within global array (used by root, as many as NPES-1)
  872. integer, intent(out) :: linterior ! within local array (used by non-root)
  873. integer, intent(out) :: status
  874. !
  875. ! !REVISION HISTORY:
  876. ! 01 Nov 2011 - P. Le Sager - v0
  877. !
  878. ! !REMARKS:
  879. ! (1) input must be checked before by calling CHECK_DIST_ARR first
  880. !
  881. !EOP
  882. !------------------------------------------------------------------------
  883. !BOC
  884. character(len=*), parameter :: rname = mname//'get_interior_type'
  885. integer :: gslice, lslice ! intermediate datatypes
  886. integer :: n, m ! sizes to build datatypes
  887. integer :: hlstride, hgstride ! strides to build datatypes
  888. integer :: stack, sz, klm
  889. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, vlstride, vgstride
  890. ! init : number of dimensions, default value
  891. sz = size(shp)
  892. ginterior = MPI_DATATYPE_NULL
  893. linterior = MPI_DATATYPE_NULL
  894. ! collapse third and above dimensions
  895. stack = 1
  896. if (sz > 2) stack = product(shp(3:))
  897. ! size of data slice to carry
  898. n = DistGrid%I_STOP - DistGrid%I_STRT + 1
  899. m = DistGrid%J_STOP - DistGrid%J_STRT + 1
  900. CALL MPI_TYPE_GET_EXTENT( datatype, lb, sizeoftype, ierr)
  901. IF_NOTOK_MPI(status=1)
  902. ! horizontal global stride (in data)
  903. hgstride = DistGrid%im_region
  904. ! vertical global stride (in byte)
  905. vgstride = DistGrid%im_region * DistGrid%jm_region * sizeoftype
  906. ! local strides (may be different than n and n*m because of halo)
  907. hlstride = shp(1) ! in data
  908. vlstride = shp(1)*shp(2)*sizeoftype ! in byte
  909. if ( isRoot ) then
  910. do klm=1,npes-1
  911. ! horizontal chunk is M lines of lenght N, separated by a global
  912. ! stride
  913. n = DistGrid%bounds(2,klm) - DistGrid%bounds(1,klm) + 1
  914. m = DistGrid%bounds(4,klm) - DistGrid%bounds(3,klm) + 1
  915. call MPI_TYPE_VECTOR (m, n, hgstride, datatype, gslice, ierr)
  916. IF_NOTOK_MPI(status=1)
  917. ! stack 3rd and above dimensions
  918. if (stack == 1) then
  919. ginterior(klm) = gslice
  920. else
  921. ! note : also works with stack=1
  922. call MPI_TYPE_CREATE_HVECTOR(stack, 1, vgstride, gslice, ginterior(klm), ierr)
  923. IF_NOTOK_MPI(status=1)
  924. call MPI_TYPE_FREE(gslice, ierr)
  925. IF_NOTOK_MPI(status=1)
  926. end if
  927. call MPI_TYPE_COMMIT (ginterior(klm), ierr)
  928. IF_NOTOK_MPI(status=1)
  929. end do
  930. else
  931. ! local interior is basically M lines of lenght N, separated by a local
  932. ! stride
  933. call MPI_TYPE_VECTOR (m, n, hlstride, datatype, lslice, ierr)
  934. IF_NOTOK_MPI(status=1)
  935. ! stack 3rd and above dimensions
  936. if (stack == 1) then
  937. linterior = lslice
  938. else
  939. ! note : also works with stack=1
  940. call MPI_TYPE_CREATE_HVECTOR (stack, 1, vlstride, lslice, linterior, ierr)
  941. IF_NOTOK_MPI(status=1)
  942. call MPI_TYPE_FREE(lslice, ierr)
  943. IF_NOTOK_MPI(status=1)
  944. end if
  945. call MPI_TYPE_COMMIT (linterior, ierr)
  946. IF_NOTOK_MPI(status=1)
  947. end if
  948. status=0
  949. END SUBROUTINE GET_INTERIOR_TYPE
  950. !EOC
  951. !--------------------------------------------------------------------------
  952. ! TM5 !
  953. !--------------------------------------------------------------------------
  954. !BOP
  955. !
  956. ! !IROUTINE: FREE_DERIVED_TYPE
  957. !
  958. ! !DESCRIPTION: free all MPI derived datatypes in a vector
  959. !\\
  960. !\\
  961. ! !INTERFACE:
  962. !
  963. SUBROUTINE FREE_DERIVED_TYPE( datatype )
  964. !
  965. ! !INPUT/OUTPUT PARAMETERS:
  966. !
  967. integer, intent(inout) :: datatype(:) ! set of derived MPI datatypes
  968. !
  969. ! !REVISION HISTORY:
  970. ! 01 Nov 2011 - P. Le Sager - v0
  971. !
  972. !EOP
  973. !------------------------------------------------------------------------
  974. !BOC
  975. integer :: i, j
  976. integer :: res(size(datatype)) ! hold unique elements
  977. integer :: k ! number of unique elements
  978. ! Get list of unique handle(s)
  979. ! ----------------------------
  980. k = 1
  981. res(1) = 1
  982. outer: do i=2,size(datatype)
  983. ! look for a match
  984. do j=1,k
  985. if (datatype(res(j)) == datatype(i)) cycle outer ! match
  986. end do
  987. ! no match : add it to the list
  988. k = k + 1
  989. res(k) = i
  990. end do outer
  991. ! Free handle(s)
  992. ! ---------------------------
  993. do i=1,k
  994. if (datatype(res(i)) /= MPI_DATATYPE_NULL) &
  995. call MPI_TYPE_FREE(datatype(res(i)), ierr)
  996. end do
  997. END SUBROUTINE FREE_DERIVED_TYPE
  998. !EOC
  999. #endif /* MPI TYPES */
  1000. !--------------------------------------------------------------------------
  1001. ! TM5 !
  1002. !--------------------------------------------------------------------------
  1003. !BOP
  1004. !
  1005. ! !IROUTINE: CHECK_DIST_ARR
  1006. !
  1007. ! !DESCRIPTION: Check that the shape of a local array correspond to an array
  1008. ! distributed on current process. This check is done on the
  1009. ! first 2 dimensions only, along which the domain
  1010. ! decomposition is done.
  1011. !
  1012. ! Optionally: check shape of a global array. If arrays are 3D
  1013. ! or more, the 3rd and above dimensions of local and global
  1014. ! arrays are also compared. (This becomes "2D or more" for I-
  1015. ! and J-Slices.)
  1016. !\\
  1017. !\\
  1018. ! !INTERFACE:
  1019. !
  1020. SUBROUTINE CHECK_DIST_ARR( DistGrid, shp, halo, status, glbl_shp, jband, iband, has_global )
  1021. !
  1022. ! !INPUT PARAMETERS:
  1023. !
  1024. type(dist_grid), intent(in) :: DistGrid
  1025. integer, intent(in) :: shp(:) ! shape of local array
  1026. integer, intent(in) :: halo ! halo size
  1027. !
  1028. integer, intent(in), optional :: glbl_shp(:) ! shape of global array
  1029. logical, intent(in), optional :: jband, iband ! is it a zonal or meridional slice?
  1030. logical, intent(in), optional :: has_global ! current proc hold global array (default is root)
  1031. !
  1032. ! !OUTPUT PARAMETERS:
  1033. !
  1034. integer, intent(out) :: status
  1035. !
  1036. ! !REVISION HISTORY:
  1037. ! 01 Nov 2011 - P. Le Sager - v0
  1038. !
  1039. ! !REMARKS: i-band refers to a unique i value, i.e. a meridional dataset.
  1040. ! j-band refers to a unique j value, i.e. a zonal dataset.
  1041. !
  1042. !EOP
  1043. !------------------------------------------------------------------------
  1044. !BOC
  1045. character(len=*), parameter :: rname = mname//'check_dist_arr '
  1046. integer :: n, m, sz
  1047. integer, allocatable :: glbsz(:)
  1048. logical :: x_jband, x_iband, hold_global
  1049. status = 0
  1050. ! check inputs
  1051. x_jband=.false.
  1052. x_iband=.false.
  1053. if(present(iband))x_iband=iband
  1054. if(present(jband))x_jband=jband
  1055. if(x_iband.and.x_jband)then
  1056. write (gol,*) "CHECK_DIST_ARR: cannot have **both** I- and J-Slices" ; call goErr
  1057. TRACEBACK; status=1; return
  1058. end if
  1059. ! by default global array is expected on root
  1060. hold_global = isRoot
  1061. if (present(has_global)) hold_global=has_global
  1062. ! required size w/o halo
  1063. n = DistGrid%I_STOP - DistGrid%I_STRT + 1
  1064. m = DistGrid%J_STOP - DistGrid%J_STRT + 1
  1065. ! check shape of array
  1066. if (x_iband) then
  1067. if (shp(1) /= m+2*halo) then
  1068. write (gol,*) "CHECK_DIST_ARR: local array shape does not conform" ; call goErr
  1069. write (gol,'(" local array : ", i4)') shp(1) ; call goErr
  1070. write (gol,'(" should be : ", i4)') m+2*halo ; call goErr
  1071. write (gol,'(" with J-stop : ", i4)') DistGrid%J_STOP ; call goErr
  1072. write (gol,'(" J-start: ", i4)') DistGrid%J_STRT ; call goErr
  1073. TRACEBACK; status=1; return
  1074. end if
  1075. else if (x_jband) then
  1076. if (shp(1) /= n+2*halo) then
  1077. write (gol,*) "CHECK_DIST_ARR: local array shape does not conform" ; call goErr
  1078. write (gol,'(" local array : ",2i4)') shp(1) ; call goErr
  1079. write (gol,'(" should be : ",2i4)') n+2*halo ; call goErr
  1080. write (gol,'(" with J-stop : ", i4)') DistGrid%I_STOP ; call goErr
  1081. write (gol,'(" J-start: ", i4)') DistGrid%I_STRT ; call goErr
  1082. TRACEBACK; status=1; return
  1083. end if
  1084. else
  1085. if ((shp(1) /= n+2*halo).or.(shp(2) /= m+2*halo)) then
  1086. write (gol,*) "CHECK_DIST_ARR: local array shape does not conform" ; call goErr
  1087. write (gol,'(" local array : ",2i4)') shp(1:2) ; call goErr
  1088. write (gol,'(" should be : ",2i4)') n+2*halo, m+2*halo ; call goErr
  1089. write (gol,'(" w/ I/J-stop : ", i4)') DistGrid%I_STOP, DistGrid%J_STOP ; call goErr
  1090. write (gol,'(" I/J-start: ", i4)') DistGrid%I_STRT, DistGrid%J_STRT ; call goErr
  1091. TRACEBACK; status=1; return
  1092. end if
  1093. end if
  1094. ! check shape of global array on root
  1095. if (present(glbl_shp) .and. hold_global) then
  1096. sz = size(shp)
  1097. if (sz /= size(glbl_shp)) then
  1098. write (gol,'("CHECK_DIST_ARR : global and local arrays have different rank:")') ; call goErr
  1099. write (gol,'(" local rank : ", i4)') sz ; call goErr
  1100. write (gol,'(" global rank : ", i4)') size(glbl_shp) ; call goErr
  1101. TRACEBACK; status=1; return
  1102. end if
  1103. allocate(glbsz(sz))
  1104. glbsz = shp
  1105. if (x_iband) then
  1106. glbsz(1) = DistGrid%jm_region
  1107. else if (x_jband) then
  1108. glbsz(1) = DistGrid%im_region
  1109. else
  1110. glbsz(1:2) = (/ DistGrid%im_region, DistGrid%jm_region /)
  1111. end if
  1112. if ( any (glbl_shp /= glbsz) ) then
  1113. write (gol,'("CHECK_DIST_ARR : global array has wrong shape:")') ; call goErr
  1114. write (gol,*) " shape(global) : ", glbl_shp ; call goErr
  1115. write (gol,*) " im [and/or] jm_region/-extra dims- : ", glbsz ; call goErr
  1116. TRACEBACK; status=1; return
  1117. end if
  1118. deallocate(glbsz)
  1119. end if
  1120. END SUBROUTINE CHECK_DIST_ARR
  1121. !EOC
  1122. !--------------------------------------------------------------------------
  1123. ! TM5 !
  1124. !--------------------------------------------------------------------------
  1125. !BOP
  1126. !
  1127. ! !IROUTINE: UPDATE_HALO_IBAND_1D_R
  1128. !
  1129. ! !DESCRIPTION: update halo cells of a vector distributed along latitudes
  1130. !\\
  1131. !\\
  1132. ! !INTERFACE:
  1133. !
  1134. SUBROUTINE UPDATE_HALO_IBAND_1D_R( DistGrid, dist_array, halo, status )
  1135. !
  1136. ! !INPUT PARAMETERS:
  1137. !
  1138. type(dist_grid), intent(in) :: DistGrid
  1139. integer, intent(in) :: halo
  1140. real, intent(inout) :: dist_array(DistGrid%j_strt-halo:)
  1141. !
  1142. ! !OUTPUT PARAMETERS:
  1143. !
  1144. integer, intent(out) :: status
  1145. !
  1146. ! !REVISION HISTORY:
  1147. ! 7 Jan 2016 - Ph. Le Sager - v0
  1148. !
  1149. !EOP
  1150. !------------------------------------------------------------------------
  1151. !BOC
  1152. character(len=*), parameter :: rname = mname//'update_halo_iband_1d_r'
  1153. integer :: j0, j1
  1154. #ifdef MPI
  1155. integer :: stat(MPI_STATUS_SIZE,4), req(4)
  1156. integer :: k, sz(1), tag_snd(2), tag_rcv(2)
  1157. integer :: ijsr(2,2), nghbr(2)
  1158. ! check input
  1159. if ( halo == 0 ) return
  1160. sz = shape(dist_array)
  1161. j0 = DistGrid%j_strt
  1162. j1 = DistGrid%j_stop
  1163. ! degenerate case
  1164. if (npe_lat==1) return
  1165. if(okdebug)then
  1166. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, iband=.true. )
  1167. IF_NOTOK_RETURN(status=1)
  1168. end if
  1169. ! Buffers anchors
  1170. ! ----------------
  1171. ijsr(:,1) = (/ j1-halo+1, j0 /) ! j-start location of north and south buffers to send
  1172. ijsr(:,2) = (/ j1+1, j0-halo /) ! j-start location of north and south buffers to receive
  1173. ! Communicate
  1174. ! ---------------- ! only south and north
  1175. tag_snd = (/2,4/)
  1176. tag_rcv = (/4,2/)
  1177. nghbr = (/2,4/)
  1178. neigh : do k=1,2
  1179. call MPI_ISEND( dist_array( ijsr(k,1)), halo, my_real, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1180. call MPI_IRECV( dist_array( ijsr(k,2)), halo, my_real, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1181. end do neigh
  1182. call MPI_WAITALL(4, req, stat, ierr)
  1183. IF_NOTOK_MPI(status=1)
  1184. #endif
  1185. status = 0
  1186. END SUBROUTINE UPDATE_HALO_IBAND_1D_R
  1187. !EOC
  1188. !--------------------------------------------------------------------------
  1189. ! TM5 !
  1190. !--------------------------------------------------------------------------
  1191. !BOP
  1192. !
  1193. ! !IROUTINE: UPDATE_HALO_JBAN_1D_R
  1194. !
  1195. ! !DESCRIPTION: update halo cells of a decomposed zonal vector
  1196. !\\
  1197. !\\
  1198. ! !INTERFACE:
  1199. !
  1200. SUBROUTINE UPDATE_HALO_JBAND_1D_R( DistGrid, dist_array, halo, status )
  1201. !
  1202. ! !INPUT PARAMETERS:
  1203. !
  1204. type(dist_grid), intent(in) :: DistGrid
  1205. integer, intent(in) :: halo
  1206. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:)
  1207. !
  1208. ! !OUTPUT PARAMETERS:
  1209. !
  1210. integer, intent(out) :: status
  1211. !
  1212. ! !REVISION HISTORY:
  1213. ! 20 Feb 2012 - P. Le Sager - v0
  1214. !
  1215. !EOP
  1216. !------------------------------------------------------------------------
  1217. !BOC
  1218. character(len=*), parameter :: rname = mname//'update_halo_jband_1d_r'
  1219. integer :: i0, i1
  1220. #ifdef MPI
  1221. integer :: stat(MPI_STATUS_SIZE,4), req(4)
  1222. integer :: k, sz(1), tag_snd(2), tag_rcv(2)
  1223. integer :: ijsr(2,2), nghbr(2)
  1224. ! check input
  1225. if ( halo == 0 ) return
  1226. sz = shape(dist_array)
  1227. i0 = DistGrid%i_strt
  1228. i1 = DistGrid%i_stop
  1229. ! degenerate case
  1230. if (npe_lon==1) then
  1231. if (DistGrid%is_periodic) then
  1232. dist_array(i0-halo:i0-1) = dist_array(i1-halo+1:i1)
  1233. dist_array(i1+1:i1+halo) = dist_array(i0:i0+halo-1)
  1234. end if
  1235. return
  1236. end if
  1237. if(okdebug)then
  1238. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, jband=.true. )
  1239. IF_NOTOK_RETURN(status=1)
  1240. end if
  1241. ! Buffers anchors
  1242. ! ----------------
  1243. ijsr(:,1) = (/ i0, i1+1-halo /) ! i-start location of buffer to send
  1244. ijsr(:,2) = (/ i0-halo, i1+1 /) ! i-start location of buffer to receive
  1245. ! Communicate
  1246. ! ---------------- ! only east and west
  1247. tag_snd = (/1,3/)
  1248. tag_rcv = (/3,1/)
  1249. nghbr = (/1,3/)
  1250. neigh : do k=1,2
  1251. call MPI_ISEND( dist_array( ijsr(k,1)), halo, my_real, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1252. call MPI_IRECV( dist_array( ijsr(k,2)), halo, my_real, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1253. end do neigh
  1254. call MPI_WAITALL(4, req, stat, ierr)
  1255. IF_NOTOK_MPI(status=1)
  1256. #else
  1257. if ((halo/=0).and.DistGrid%is_periodic) then
  1258. i0 = DistGrid%i_strt
  1259. i1 = DistGrid%i_stop
  1260. dist_array(i0-halo:i0-1) = dist_array(i1-halo+1:i1)
  1261. dist_array(i1+1:i1+halo) = dist_array(i0:i0+halo-1)
  1262. end if
  1263. #endif
  1264. status = 0
  1265. END SUBROUTINE UPDATE_HALO_JBAND_1D_R
  1266. !EOC
  1267. !--------------------------------------------------------------------------
  1268. ! TM5 !
  1269. !--------------------------------------------------------------------------
  1270. !BOP
  1271. !
  1272. ! !IROUTINE: UPDATE_HALO_JBAND_2D_R
  1273. !
  1274. ! !DESCRIPTION: update halo cells of a decomposed zonal 2d slice
  1275. !\\
  1276. !\\
  1277. ! !INTERFACE:
  1278. !
  1279. SUBROUTINE UPDATE_HALO_JBAND_2D_R( DistGrid, dist_array, halo, status )
  1280. !
  1281. ! !INPUT PARAMETERS:
  1282. !
  1283. type(dist_grid), intent(in) :: DistGrid
  1284. integer, intent(in) :: halo
  1285. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,:)
  1286. !
  1287. ! !OUTPUT PARAMETERS:
  1288. !
  1289. integer, intent(out) :: status
  1290. !
  1291. ! !REVISION HISTORY:
  1292. ! 20 Feb 2012 - P. Le Sager - v0
  1293. !
  1294. !EOP
  1295. !------------------------------------------------------------------------
  1296. !BOC
  1297. character(len=*), parameter :: rname = mname//'update_halo_jband_2d_r'
  1298. integer :: i0, i1
  1299. #ifdef MPI
  1300. integer :: stat(MPI_STATUS_SIZE,4), req(4), wetype
  1301. integer :: k, sz(2), tag_snd(2), tag_rcv(2)
  1302. integer :: ijsr(2,2), nghbr(2)
  1303. status = 0
  1304. ! check input
  1305. if ( halo == 0 ) return
  1306. sz = shape(dist_array)
  1307. i0 = DistGrid%i_strt
  1308. i1 = DistGrid%i_stop
  1309. ! degenerate case
  1310. if (npe_lon==1) then
  1311. if (DistGrid%is_periodic) then
  1312. dist_array(i0-halo:i0-1,:) = dist_array(i1-halo+1:i1,:)
  1313. dist_array(i1+1:i1+halo,:) = dist_array(i0:i0+halo-1,:)
  1314. end if
  1315. return
  1316. end if
  1317. if(okdebug)then
  1318. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, jband=.true. )
  1319. IF_NOTOK_RETURN(status=1)
  1320. end if
  1321. ! Buffers anchors
  1322. ! ----------------
  1323. ijsr(:,1) = (/ i0, i1+1-halo /) ! i-start location of buffer to send
  1324. ijsr(:,2) = (/ i0-halo, i1+1 /) ! i-start location of buffer to receive
  1325. ! pack data
  1326. !----------
  1327. call MPI_TYPE_VECTOR (sz(2), halo, sz(1), my_real, wetype, ierr)
  1328. call MPI_TYPE_COMMIT (wetype, ierr)
  1329. ! Communicate
  1330. ! ----------------
  1331. tag_snd = (/1,3/)
  1332. tag_rcv = (/3,1/)
  1333. nghbr = (/1,3/)
  1334. neigh : do k=1,2 ! only east and west
  1335. call MPI_ISEND( dist_array(ijsr(k,1),1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1336. call MPI_IRECV( dist_array(ijsr(k,2),1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1337. end do neigh
  1338. call MPI_WAITALL(4, req, stat, ierr)
  1339. IF_NOTOK_MPI(status=1)
  1340. call MPI_TYPE_FREE(wetype, ierr)
  1341. #else
  1342. if ((halo/=0).and.DistGrid%is_periodic) then
  1343. i0 = DistGrid%i_strt
  1344. i1 = DistGrid%i_stop
  1345. dist_array(i0-halo:i0-1,:) = dist_array(i1-halo+1:i1,:)
  1346. dist_array(i1+1:i1+halo,:) = dist_array(i0:i0+halo-1,:)
  1347. end if
  1348. #endif
  1349. status = 0
  1350. END SUBROUTINE UPDATE_HALO_JBAND_2D_R
  1351. !EOC
  1352. !--------------------------------------------------------------------------
  1353. ! TM5 !
  1354. !--------------------------------------------------------------------------
  1355. !BOP
  1356. !
  1357. ! !IROUTINE: UPDATE_HALO_JBAND_3D_R
  1358. !
  1359. ! !DESCRIPTION: update halo cells of a decomposed zonal 3d slice
  1360. !\\
  1361. !\\
  1362. ! !INTERFACE:
  1363. !
  1364. SUBROUTINE UPDATE_HALO_JBAND_3D_R( DistGrid, dist_array, halo, status )
  1365. !
  1366. ! !INPUT PARAMETERS:
  1367. !
  1368. type(dist_grid), intent(in) :: DistGrid
  1369. integer, intent(in) :: halo
  1370. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,:,:)
  1371. !
  1372. ! !OUTPUT PARAMETERS:
  1373. !
  1374. integer, intent(out) :: status
  1375. !
  1376. ! !REVISION HISTORY:
  1377. ! 20 Feb 2012 - P. Le Sager - v0
  1378. !
  1379. !EOP
  1380. !------------------------------------------------------------------------
  1381. !BOC
  1382. character(len=*), parameter :: rname = mname//'update_halo_jband_3d_r'
  1383. integer :: i0, i1
  1384. #ifdef MPI
  1385. integer :: stat(MPI_STATUS_SIZE,4), req(4), wetype
  1386. integer :: k, sz(3), tag_snd(2), tag_rcv(2)
  1387. integer :: ijsr(2,2), nghbr(2)
  1388. status = 0
  1389. ! check input
  1390. if ( halo == 0 ) return
  1391. sz = shape(dist_array)
  1392. i0 = DistGrid%i_strt
  1393. i1 = DistGrid%i_stop
  1394. ! degenerate case
  1395. if (npe_lon==1) then
  1396. if (DistGrid%is_periodic) then
  1397. dist_array(i0-halo:i0-1,:,:) = dist_array(i1-halo+1:i1,:,:)
  1398. dist_array(i1+1:i1+halo,:,:) = dist_array(i0:i0+halo-1,:,:)
  1399. end if
  1400. return
  1401. end if
  1402. if(okdebug)then
  1403. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, jband=.true. )
  1404. IF_NOTOK_RETURN(status=1)
  1405. end if
  1406. ! Buffers anchors
  1407. ! ----------------
  1408. ijsr(:,1) = (/ i0, i1+1-halo /) ! i-start location of buffer to send
  1409. ijsr(:,2) = (/ i0-halo, i1+1 /) ! i-start location of buffer to receive
  1410. ! pack data
  1411. !----------
  1412. call MPI_TYPE_VECTOR (sz(2)*sz(3), halo, sz(1), my_real, wetype, ierr)
  1413. call MPI_TYPE_COMMIT (wetype, ierr)
  1414. ! Communicate
  1415. ! ----------------
  1416. tag_snd = (/1,3/)
  1417. tag_rcv = (/3,1/)
  1418. nghbr = (/1,3/)
  1419. neigh : do k=1,2 ! only east and west
  1420. call MPI_ISEND( dist_array(ijsr(k,1),1,1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1421. call MPI_IRECV( dist_array(ijsr(k,2),1,1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1422. end do neigh
  1423. call MPI_WAITALL(4, req, stat, ierr)
  1424. IF_NOTOK_MPI(status=1)
  1425. call MPI_TYPE_FREE(wetype, ierr)
  1426. #else
  1427. if ((halo/=0).and.DistGrid%is_periodic) then
  1428. i0 = DistGrid%i_strt
  1429. i1 = DistGrid%i_stop
  1430. dist_array(i0-halo:i0-1,:,:) = dist_array(i1-halo+1:i1,:,:)
  1431. dist_array(i1+1:i1+halo,:,:) = dist_array(i0:i0+halo-1,:,:)
  1432. end if
  1433. #endif
  1434. status = 0
  1435. END SUBROUTINE UPDATE_HALO_JBAND_3D_R
  1436. !EOC
  1437. !--------------------------------------------------------------------------
  1438. ! TM5 !
  1439. !--------------------------------------------------------------------------
  1440. !BOP
  1441. !
  1442. ! !IROUTINE: UPDATE_HALO_JBAND_4D_R
  1443. !
  1444. ! !DESCRIPTION: update halo cells of a decomposed zonal 4d slice
  1445. !\\
  1446. !\\
  1447. ! !INTERFACE:
  1448. !
  1449. SUBROUTINE UPDATE_HALO_JBAND_4D_R( DistGrid, dist_array, halo, status )
  1450. !
  1451. ! !INPUT PARAMETERS:
  1452. !
  1453. type(dist_grid), intent(in) :: DistGrid
  1454. integer, intent(in) :: halo
  1455. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,:,:,:)
  1456. !
  1457. ! !OUTPUT PARAMETERS:
  1458. !
  1459. integer, intent(out) :: status
  1460. !
  1461. ! !REVISION HISTORY:
  1462. ! 20 Feb 2012 - P. Le Sager - v0
  1463. !
  1464. !EOP
  1465. !------------------------------------------------------------------------
  1466. !BOC
  1467. character(len=*), parameter :: rname = mname//'update_halo_jband_4d_r'
  1468. integer :: i0, i1
  1469. #ifdef MPI
  1470. integer :: stat(MPI_STATUS_SIZE,4), req(4), wetype
  1471. integer :: k, sz(4), tag_snd(2), tag_rcv(2)
  1472. integer :: ijsr(2,2), nghbr(2)
  1473. status = 0
  1474. ! check input
  1475. if ( halo == 0 ) return
  1476. sz = shape(dist_array)
  1477. i0 = DistGrid%i_strt
  1478. i1 = DistGrid%i_stop
  1479. ! degenerate case
  1480. if (npe_lon==1) then
  1481. if (DistGrid%is_periodic) then
  1482. dist_array(i0-halo:i0-1,:,:,:) = dist_array(i1-halo+1:i1,:,:,:)
  1483. dist_array(i1+1:i1+halo,:,:,:) = dist_array(i0:i0+halo-1,:,:,:)
  1484. end if
  1485. return
  1486. end if
  1487. if(okdebug)then
  1488. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, jband=.true. )
  1489. IF_NOTOK_RETURN(status=1)
  1490. end if
  1491. ! Buffers anchors
  1492. ! ----------------
  1493. ijsr(:,1) = (/ i0, i1+1-halo /) ! i-start location of buffer to send
  1494. ijsr(:,2) = (/ i0-halo, i1+1 /) ! i-start location of buffer to receive
  1495. ! pack data
  1496. !----------
  1497. call MPI_TYPE_VECTOR (sz(2)*sz(3)*sz(4), halo, sz(1), my_real, wetype, ierr)
  1498. call MPI_TYPE_COMMIT (wetype, ierr)
  1499. ! Communicate
  1500. ! ----------------
  1501. tag_snd = (/1,3/)
  1502. tag_rcv = (/3,1/)
  1503. nghbr = (/1,3/)
  1504. neigh : do k=1,2 ! only east and west
  1505. call MPI_ISEND( dist_array(ijsr(k,1),1,1,1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_snd(k), localComm, req(k), ierr)
  1506. call MPI_IRECV( dist_array(ijsr(k,2),1,1,1), 1, wetype, DistGrid%neighbors(nghbr(k)), tag_rcv(k), localComm, req(k+2), ierr)
  1507. end do neigh
  1508. call MPI_WAITALL(4, req, stat, ierr)
  1509. IF_NOTOK_MPI(status=1)
  1510. call MPI_TYPE_FREE(wetype, ierr)
  1511. #else
  1512. if ((halo/=0).and.DistGrid%is_periodic) then
  1513. i0 = DistGrid%i_strt
  1514. i1 = DistGrid%i_stop
  1515. dist_array(i0-halo:i0-1,:,:,:) = dist_array(i1-halo+1:i1,:,:,:)
  1516. dist_array(i1+1:i1+halo,:,:,:) = dist_array(i0:i0+halo-1,:,:,:)
  1517. end if
  1518. #endif
  1519. status = 0
  1520. END SUBROUTINE UPDATE_HALO_JBAND_4D_R
  1521. !EOC
  1522. !--------------------------------------------------------------------------
  1523. ! TM5 !
  1524. !--------------------------------------------------------------------------
  1525. !BOP
  1526. !
  1527. ! !IROUTINE: UPDATE_HALO_2D_R
  1528. !
  1529. ! !DESCRIPTION: update halo cells of a distributed 2D real array
  1530. !\\
  1531. !\\
  1532. ! !INTERFACE:
  1533. !
  1534. SUBROUTINE UPDATE_HALO_2D_R( DistGrid, dist_array, halo, status, i_only, j_only )
  1535. !
  1536. ! !INPUT PARAMETERS:
  1537. !
  1538. type(dist_grid), intent(in) :: DistGrid
  1539. integer, intent(in) :: halo
  1540. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  1541. logical, optional, intent(in) :: i_only ! update East & West halo only
  1542. logical, optional, intent(in) :: j_only ! update North & South halo only
  1543. !
  1544. ! !OUTPUT PARAMETERS:
  1545. !
  1546. integer, intent(out) :: status
  1547. !
  1548. ! !REVISION HISTORY:
  1549. ! 01 Nov 2011 - P. Le Sager - v0
  1550. !
  1551. !EOP
  1552. !------------------------------------------------------------------------
  1553. !BOC
  1554. character(len=*), parameter :: rname = mname//'update_halo_2d_r'
  1555. integer :: i1, i2, j1, j2
  1556. #ifdef MPI
  1557. integer :: stat(MPI_STATUS_SIZE,8), req(8)
  1558. integer :: k, sz(2), tag_snd(4), tag_rcv(4)
  1559. integer :: srtype(4), ijsr(4,4)
  1560. ! check input
  1561. if ( halo == 0 ) return
  1562. sz = shape(dist_array)
  1563. if(okdebug)then
  1564. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status )
  1565. IF_NOTOK_RETURN(status=1)
  1566. end if
  1567. ! get types and I/J start
  1568. CALL GET_HALO_TYPE( DistGrid, sz, halo, my_real, srtype, ijsr, status )
  1569. IF_NOTOK_RETURN(status=1)
  1570. ! Communicate
  1571. tag_snd = (/1,2,3,4/)
  1572. tag_rcv = (/3,4,1,2/)
  1573. neigh : do k=1,4
  1574. call MPI_ISEND( dist_array( ijsr(k,1), ijsr(k,2)), 1, srtype(k), DistGrid%neighbors(k), tag_snd(k), localComm, req(k), ierr)
  1575. call MPI_IRECV( dist_array( ijsr(k,3), ijsr(k,4)), 1, srtype(k), DistGrid%neighbors(k), &
  1576. tag_rcv(k), localComm, req(k+4), ierr)
  1577. end do neigh
  1578. call MPI_WAITALL(8, req, stat, ierr)
  1579. IF_NOTOK_MPI(status=1)
  1580. call FREE_DERIVED_TYPE( srtype )
  1581. #else
  1582. if ((halo/=0).and.DistGrid%is_periodic) then
  1583. i1 = DistGrid%i_strt
  1584. i2 = DistGrid%i_stop
  1585. j1 = DistGrid%j_strt
  1586. j2 = DistGrid%j_stop
  1587. dist_array(i1-halo:i1-1,j1:j2) = dist_array(i2-halo+1:i2,j1:j2)
  1588. dist_array(i2+1:i2+halo,j1:j2) = dist_array(i1:i1+halo-1,j1:j2)
  1589. end if
  1590. #endif
  1591. status = 0
  1592. END SUBROUTINE UPDATE_HALO_2D_R
  1593. !EOC
  1594. !--------------------------------------------------------------------------
  1595. ! TM5 !
  1596. !--------------------------------------------------------------------------
  1597. !BOP
  1598. !
  1599. ! !IROUTINE: UPDATE_HALO_2D_I
  1600. !
  1601. ! !DESCRIPTION: update halo cells of a distributed 2D integer array
  1602. !\\
  1603. !\\
  1604. ! !INTERFACE:
  1605. !
  1606. SUBROUTINE UPDATE_HALO_2D_I( DistGrid, dist_array, halo, status )
  1607. !
  1608. ! !INPUT PARAMETERS:
  1609. !
  1610. type(dist_grid), intent(in) :: DistGrid
  1611. integer, intent(in) :: halo
  1612. integer, intent(inout) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  1613. !
  1614. ! !OUTPUT PARAMETERS:
  1615. !
  1616. integer, intent(out) :: status
  1617. !
  1618. ! !REVISION HISTORY:
  1619. ! 01 Nov 2011 - P. Le Sager - v0
  1620. !
  1621. ! !REMARKS:
  1622. ! (1) not tested yet, but the version for 'real' has been...
  1623. !
  1624. !EOP
  1625. !------------------------------------------------------------------------
  1626. !BOC
  1627. character(len=*), parameter :: rname = mname//'update_halo_2D_i'
  1628. integer :: i1, i2, j1, j2
  1629. #ifdef MPI
  1630. integer :: stat(MPI_STATUS_SIZE,8), req(8), k, sz(2)
  1631. integer :: srtype(4), ijsr(4,4), tag_snd(4), tag_rcv(4)
  1632. ! check input
  1633. if ( halo == 0 ) return
  1634. sz = shape(dist_array)
  1635. if(okdebug)then
  1636. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status )
  1637. IF_NOTOK_RETURN(status=1)
  1638. end if
  1639. ! get types and I/J start
  1640. CALL GET_HALO_TYPE( distGrid, sz, halo, MPI_INTEGER, srtype, ijsr, status )
  1641. IF_NOTOK_RETURN(status=1)
  1642. ! Communicate
  1643. tag_snd = (/1,2,3,4/)
  1644. tag_rcv = (/3,4,1,2/)
  1645. neigh : do k=1,4
  1646. call MPI_ISEND( dist_array( ijsr(k,1), ijsr(k,2)), 1, srtype(k), DistGrid%neighbors(k), tag_snd(k), localComm, req(k), ierr)
  1647. call MPI_IRECV( dist_array( ijsr(k,3), ijsr(k,4)), 1, srtype(k), DistGrid%neighbors(k), &
  1648. tag_rcv(k), localComm, req(k+4), ierr)
  1649. end do neigh
  1650. call MPI_WAITALL(8, req, stat, ierr)
  1651. call FREE_DERIVED_TYPE( srtype )
  1652. #else
  1653. if ((halo/=0).and.DistGrid%is_periodic) then
  1654. i1 = DistGrid%i_strt
  1655. i2 = DistGrid%i_stop
  1656. j1 = DistGrid%j_strt
  1657. j2 = DistGrid%j_stop
  1658. dist_array(i1-halo:i1-1,j1:j2) = dist_array(i2-halo+1:i2,j1:j2)
  1659. dist_array(i2+1:i2+halo,j1:j2) = dist_array(i1:i1+halo-1,j1:j2)
  1660. end if
  1661. #endif
  1662. status = 0
  1663. END SUBROUTINE UPDATE_HALO_2D_I
  1664. !EOC
  1665. !--------------------------------------------------------------------------
  1666. ! TM5 !
  1667. !--------------------------------------------------------------------------
  1668. !BOP
  1669. !
  1670. ! !IROUTINE: UPDATE_HALO_3D_R
  1671. !
  1672. ! !DESCRIPTION: update halo cells of a distributed 3D real array
  1673. !\\
  1674. !\\
  1675. ! !INTERFACE:
  1676. !
  1677. SUBROUTINE UPDATE_HALO_3D_R( DistGrid, dist_array, halo, status )
  1678. !
  1679. ! !INPUT PARAMETERS:
  1680. !
  1681. type(dist_grid), intent(in) :: DistGrid
  1682. integer, intent(in) :: halo
  1683. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:)
  1684. !
  1685. ! !OUTPUT PARAMETERS:
  1686. !
  1687. integer, intent(out) :: status
  1688. !
  1689. ! !REVISION HISTORY:
  1690. ! 01 Nov 2011 - P. Le Sager - v0
  1691. !
  1692. !EOP
  1693. !------------------------------------------------------------------------
  1694. !BOC
  1695. character(len=*), parameter :: rname = mname//'update_halo_3d_r'
  1696. integer :: i1, i2, j1, j2
  1697. #ifdef MPI
  1698. integer :: stat(MPI_STATUS_SIZE,8), req(8), k, sz(3)
  1699. integer :: srtype(4), ijsr(4,4), tag_snd(4), tag_rcv(4)
  1700. ! check input
  1701. if ( halo == 0 ) return
  1702. sz = shape(dist_array)
  1703. if(okdebug)then
  1704. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status )
  1705. IF_NOTOK_RETURN(status=1)
  1706. end if
  1707. ! get types and I/J start
  1708. CALL GET_HALO_TYPE( DistGrid, sz, halo, my_real, srtype, ijsr, status )
  1709. IF_NOTOK_RETURN(status=1)
  1710. ! Communicate
  1711. tag_snd = (/1,2,3,4/)
  1712. tag_rcv = (/3,4,1,2/)
  1713. neigh : do k=1,4
  1714. call MPI_ISEND( dist_array( ijsr(k,1), ijsr(k,2), 1), 1, srtype(k), DistGrid%neighbors(k), tag_snd(k), localComm, &
  1715. req(k), ierr)
  1716. call MPI_IRECV( dist_array( ijsr(k,3), ijsr(k,4), 1), 1, srtype(k), DistGrid%neighbors(k), tag_rcv(k), localComm, &
  1717. req(k+4), ierr)
  1718. end do neigh
  1719. call MPI_WAITALL(8, req, stat, ierr)
  1720. IF_NOTOK_MPI(status=1)
  1721. call FREE_DERIVED_TYPE( srtype )
  1722. #else
  1723. if ((halo/=0).and.DistGrid%is_periodic) then
  1724. i1 = DistGrid%i_strt
  1725. i2 = DistGrid%i_stop
  1726. j1 = DistGrid%j_strt
  1727. j2 = DistGrid%j_stop
  1728. dist_array(i1-halo:i1-1,j1:j2,:) = dist_array(i2-halo+1:i2,j1:j2,:)
  1729. dist_array(i2+1:i2+halo,j1:j2,:) = dist_array(i1:i1+halo-1,j1:j2,:)
  1730. end if
  1731. #endif
  1732. status = 0
  1733. END SUBROUTINE UPDATE_HALO_3D_R
  1734. !EOC
  1735. !--------------------------------------------------------------------------
  1736. ! TM5 !
  1737. !--------------------------------------------------------------------------
  1738. !BOP
  1739. !
  1740. ! !IROUTINE: UPDATE_HALO_4D_R
  1741. !
  1742. ! !DESCRIPTION: update halo cells of a distributed 4D real array
  1743. !\\
  1744. !\\
  1745. ! !INTERFACE:
  1746. !
  1747. SUBROUTINE UPDATE_HALO_4D_R( DistGrid, dist_array, halo, status )
  1748. !
  1749. ! !INPUT PARAMETERS:
  1750. !
  1751. type(dist_grid), intent(in) :: DistGrid
  1752. integer, intent(in) :: halo
  1753. real, intent(inout) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:)
  1754. !
  1755. ! !OUTPUT PARAMETERS:
  1756. !
  1757. integer, intent(out) :: status
  1758. !
  1759. ! !REVISION HISTORY:
  1760. ! 01 Nov 2011 - P. Le Sager - v0
  1761. !
  1762. !EOP
  1763. !------------------------------------------------------------------------
  1764. !BOC
  1765. character(len=*), parameter :: rname = mname//'update_halo_4D_r'
  1766. integer :: i1, i2, j1, j2
  1767. #ifdef MPI
  1768. integer :: stat(MPI_STATUS_SIZE,8), req(8), k, sz(4)
  1769. integer :: srtype(4), ijsr(4,4), tag_snd(4), tag_rcv(4)
  1770. ! check input
  1771. if ( halo == 0 ) return
  1772. sz = shape(dist_array)
  1773. if(okdebug)then
  1774. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status )
  1775. IF_NOTOK_RETURN(status=1)
  1776. end if
  1777. ! get types and I/J start
  1778. CALL GET_HALO_TYPE( DistGrid, sz, halo, my_real, srtype, ijsr, status )
  1779. IF_NOTOK_RETURN(status=1)
  1780. ! Communicate
  1781. tag_snd = (/1,2,3,4/)
  1782. tag_rcv = (/3,4,1,2/)
  1783. neigh : do k=1,4
  1784. call MPI_ISEND( dist_array( ijsr(k,1), ijsr(k,2), 1, 1), 1, srtype(k), DistGrid%neighbors(k), tag_snd(k), &
  1785. localComm, req(k), ierr)
  1786. call MPI_IRECV( dist_array( ijsr(k,3), ijsr(k,4), 1, 1), 1, srtype(k), DistGrid%neighbors(k), tag_rcv(k), &
  1787. localComm, req(k+4), ierr)
  1788. end do neigh
  1789. call MPI_WAITALL(8, req, stat, ierr)
  1790. IF_NOTOK_MPI(status=1)
  1791. call FREE_DERIVED_TYPE( srtype )
  1792. #else
  1793. if ((halo/=0).and.DistGrid%is_periodic) then
  1794. i1 = DistGrid%i_strt
  1795. i2 = DistGrid%i_stop
  1796. j1 = DistGrid%j_strt
  1797. j2 = DistGrid%j_stop
  1798. dist_array(i1-halo:i1-1,j1:j2,:,:) = dist_array(i2-halo+1:i2,j1:j2,:,:)
  1799. dist_array(i2+1:i2+halo,j1:j2,:,:) = dist_array(i1:i1+halo-1,j1:j2,:,:)
  1800. end if
  1801. #endif
  1802. status = 0
  1803. END SUBROUTINE UPDATE_HALO_4D_R
  1804. !EOC
  1805. !--------------------------------------------------------------------------
  1806. ! TM5 !
  1807. !--------------------------------------------------------------------------
  1808. !BOP
  1809. !
  1810. ! !IROUTINE: GATHER_2D_R
  1811. !
  1812. ! !DESCRIPTION: gather local 2D arrays into a global 2D array
  1813. !\\
  1814. !\\
  1815. ! !INTERFACE:
  1816. !
  1817. SUBROUTINE GATHER_2D_R( DistGrid, dist_array, glbl_array, halo, status )
  1818. !
  1819. ! !INPUT PARAMETERS:
  1820. !
  1821. type(dist_grid), intent(in) :: DistGrid
  1822. integer, intent(in) :: halo
  1823. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  1824. !
  1825. ! !OUTPUT PARAMETERS:
  1826. !
  1827. real, intent(out) :: glbl_array(:,:)
  1828. integer, intent(out) :: status
  1829. !
  1830. ! !REVISION HISTORY:
  1831. ! 01 Nov 2011 - P. Le Sager - v0
  1832. !
  1833. ! !REMARKS:
  1834. ! (1) I have not use mpi_gatherv because of varying *receiving* size
  1835. !
  1836. !EOP
  1837. !------------------------------------------------------------------------
  1838. !BOC
  1839. character(len=*), parameter :: rname = mname//'gather_2d_r'
  1840. #ifndef MPI
  1841. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop )
  1842. status = 0
  1843. #else
  1844. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  1845. integer :: i, j, klm, i0, j0, i1, j1, sz(2)
  1846. status=0
  1847. ! check input, get derived types
  1848. !-------------------------------
  1849. sz = shape(dist_array)
  1850. if(okdebug)then
  1851. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  1852. IF_NOTOK_RETURN(status=1)
  1853. end if
  1854. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  1855. IF_NOTOK_RETURN(status=1)
  1856. i0 = DistGrid%i_strt
  1857. j0 = DistGrid%j_strt
  1858. ! ------- GATHER -------------
  1859. if ( isRoot ) then
  1860. ! get first chunk locally
  1861. i1 = DistGrid%i_stop
  1862. j1 = DistGrid%j_stop
  1863. glbl_array(i0:i1,j0:j1) = dist_array(i0:i1,j0:j1)
  1864. ! get remaining chunks from other pes
  1865. do klm=1,npes-1
  1866. i = DistGrid%bounds(1,klm)
  1867. j = DistGrid%bounds(3,klm)
  1868. call MPI_RECV( glbl_array(i,j), 1, ginterior(klm), klm, 1, &
  1869. localComm, stat, ierr)
  1870. end do
  1871. call FREE_DERIVED_TYPE( ginterior )
  1872. else
  1873. call MPI_SEND( dist_array(i0,j0), 1, linterior, root, 1, localComm, ierr)
  1874. call MPI_TYPE_FREE(linterior, ierr)
  1875. end if
  1876. #endif
  1877. END SUBROUTINE GATHER_2D_R
  1878. !EOC
  1879. !--------------------------------------------------------------------------
  1880. ! TM5 !
  1881. !--------------------------------------------------------------------------
  1882. !BOP
  1883. !
  1884. ! !IROUTINE: GATHER_2D_I
  1885. !
  1886. ! !DESCRIPTION: gather local 2D arrays into a global 2D array (integer version)
  1887. !\\
  1888. !\\
  1889. ! !INTERFACE:
  1890. !
  1891. SUBROUTINE GATHER_2D_I( DistGrid, dist_array, glbl_array, halo, status )
  1892. !
  1893. ! !INPUT PARAMETERS:
  1894. !
  1895. type(dist_grid), intent(in) :: DistGrid
  1896. integer, intent(in) :: halo
  1897. integer, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  1898. !
  1899. ! !OUTPUT PARAMETERS:
  1900. !
  1901. integer, intent(out) :: glbl_array(:,:)
  1902. integer, intent(out) :: status
  1903. !
  1904. ! !REVISION HISTORY:
  1905. ! 01 Nov 2011 - P. Le Sager - v0
  1906. !
  1907. !EOP
  1908. !------------------------------------------------------------------------
  1909. !BOC
  1910. character(len=*), parameter :: rname = mname//'gather_2d_i'
  1911. #ifndef MPI
  1912. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop )
  1913. status = 0
  1914. #else
  1915. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  1916. integer :: i, j, klm, i0, j0, i1, j1, sz(2)
  1917. status=0
  1918. ! check input, get derived types
  1919. !-------------------------------
  1920. sz = shape(dist_array)
  1921. if(okdebug)then
  1922. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  1923. IF_NOTOK_RETURN(status=1)
  1924. end if
  1925. CALL GET_INTERIOR_TYPE( DistGrid, sz, MPI_INTEGER, linterior, ginterior, status )
  1926. IF_NOTOK_RETURN(status=1)
  1927. i0 = DistGrid%i_strt
  1928. j0 = DistGrid%j_strt
  1929. ! ------- GATHER -------------
  1930. if ( isRoot ) then
  1931. ! get first chunk locally
  1932. i1 = DistGrid%i_stop
  1933. j1 = DistGrid%j_stop
  1934. glbl_array(i0:i1,j0:j1) = dist_array(i0:i1,j0:j1)
  1935. ! get remaining chunks from other pes
  1936. do klm=1,npes-1
  1937. i = DistGrid%bounds(1,klm)
  1938. j = DistGrid%bounds(3,klm)
  1939. call MPI_RECV( glbl_array(i,j), 1, ginterior(klm), klm, 1, &
  1940. localComm, stat, ierr)
  1941. end do
  1942. call FREE_DERIVED_TYPE( ginterior )
  1943. else
  1944. call MPI_SEND( dist_array(i0,j0), 1, linterior, root, 1, localComm, ierr)
  1945. call MPI_TYPE_FREE(linterior, ierr)
  1946. end if
  1947. #endif
  1948. END SUBROUTINE GATHER_2D_I
  1949. !EOC
  1950. !--------------------------------------------------------------------------
  1951. ! TM5 !
  1952. !--------------------------------------------------------------------------
  1953. !BOP
  1954. !
  1955. ! !IROUTINE: GATHER_3D_R
  1956. !
  1957. ! !DESCRIPTION: gather local 3D arrays into a global 3D array
  1958. !\\
  1959. !\\
  1960. ! !INTERFACE:
  1961. !
  1962. SUBROUTINE GATHER_3D_R( DistGrid, dist_array, glbl_array, halo, status )
  1963. !
  1964. ! !INPUT PARAMETERS:
  1965. !
  1966. type(dist_grid), intent(in) :: DistGrid
  1967. integer, intent(in) :: halo
  1968. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:)
  1969. !
  1970. ! !OUTPUT PARAMETERS:
  1971. !
  1972. real, intent(out) :: glbl_array(:,:,:)
  1973. integer, intent(out) :: status
  1974. !
  1975. ! !REVISION HISTORY:
  1976. ! 01 Nov 2011 - P. Le Sager - v0
  1977. !
  1978. !EOP
  1979. !------------------------------------------------------------------------
  1980. !BOC
  1981. character(len=*), parameter :: rname = mname//'gather_3d_r'
  1982. #ifndef MPI
  1983. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop, : )
  1984. status = 0
  1985. #else
  1986. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  1987. integer :: i, j, klm, i0, j0, i1, j1, sz(3)
  1988. status=0
  1989. ! ------- Check input & get derived types
  1990. sz = shape(dist_array)
  1991. if(okdebug)then
  1992. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  1993. IF_NOTOK_RETURN(status=1)
  1994. end if
  1995. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  1996. IF_NOTOK_RETURN(status=1)
  1997. i0 = DistGrid%i_strt
  1998. j0 = DistGrid%j_strt
  1999. ! ------- GATHER -------------
  2000. if ( isRoot ) then
  2001. ! get first chunk locally
  2002. i1 = DistGrid%i_stop
  2003. j1 = DistGrid%j_stop
  2004. glbl_array(i0:i1,j0:j1,:) = dist_array(i0:i1,j0:j1,:)
  2005. ! get remaining chunks from other pes
  2006. do klm=1,npes-1
  2007. i = DistGrid%bounds(1,klm)
  2008. j = DistGrid%bounds(3,klm)
  2009. call MPI_RECV( glbl_array(i,j,1), 1, ginterior(klm), klm, 1, &
  2010. localComm, stat, ierr)
  2011. end do
  2012. call FREE_DERIVED_TYPE( ginterior )
  2013. else
  2014. call MPI_SEND( dist_array(i0,j0,1), 1, linterior, root, 1, localComm, ierr)
  2015. call MPI_TYPE_FREE(linterior, ierr)
  2016. end if
  2017. #endif
  2018. END SUBROUTINE GATHER_3D_R
  2019. !EOC
  2020. !--------------------------------------------------------------------------
  2021. ! TM5 !
  2022. !--------------------------------------------------------------------------
  2023. !BOP
  2024. !
  2025. ! !IROUTINE: GATHER_4D_R
  2026. !
  2027. ! !DESCRIPTION: gather local 4D arrays into a global 4D array
  2028. !\\
  2029. !\\
  2030. ! !INTERFACE:
  2031. !
  2032. SUBROUTINE GATHER_4D_R( DistGrid, dist_array, glbl_array, halo, status )
  2033. !
  2034. ! !INPUT PARAMETERS:
  2035. !
  2036. type(dist_grid), intent(in) :: DistGrid
  2037. integer, intent(in) :: halo
  2038. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:)
  2039. !
  2040. ! !OUTPUT PARAMETERS:
  2041. !
  2042. real, intent(out) :: glbl_array(:,:,:,:)
  2043. integer, intent(out) :: status
  2044. !
  2045. ! !REVISION HISTORY:
  2046. ! 01 Nov 2011 - P. Le Sager - v0
  2047. !
  2048. ! !REMARKS:
  2049. ! (1) global array has to really be global on root only
  2050. !
  2051. !EOP
  2052. !------------------------------------------------------------------------
  2053. !BOC
  2054. character(len=*), parameter :: rname = mname//'gather_4d_r'
  2055. #ifndef MPI
  2056. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop, :, :)
  2057. status = 0
  2058. #else
  2059. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2060. integer :: i, j, klm, i0, j0, i1, j1, sz(4)
  2061. status=0
  2062. ! ------- Check input & get derived types
  2063. sz = shape(dist_array)
  2064. if(okdebug)then
  2065. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2066. IF_NOTOK_RETURN(status=1)
  2067. end if
  2068. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2069. IF_NOTOK_RETURN(status=1)
  2070. i0 = DistGrid%i_strt
  2071. j0 = DistGrid%j_strt
  2072. ! ------- GATHER -------------
  2073. if ( isRoot ) then ! RECV
  2074. ! get first chunk locally
  2075. i1 = DistGrid%i_stop
  2076. j1 = DistGrid%j_stop
  2077. glbl_array(i0:i1,j0:j1,:,:) = dist_array(i0:i1,j0:j1,:,:)
  2078. ! send remaining chunks to other pes
  2079. do klm=1,npes-1
  2080. i = DistGrid%bounds(1,klm)
  2081. j = DistGrid%bounds(3,klm)
  2082. call MPI_RECV( glbl_array(i,j,1,1), 1, ginterior(klm), klm, 1, &
  2083. localComm, stat, ierr)
  2084. end do
  2085. call FREE_DERIVED_TYPE( ginterior )
  2086. else !SEND
  2087. call MPI_SEND( dist_array(i0,j0,1,1), 1, linterior, root, 1, localComm, ierr)
  2088. call MPI_TYPE_FREE(linterior, ierr)
  2089. end if
  2090. #endif
  2091. END SUBROUTINE GATHER_4D_R
  2092. !EOC
  2093. !--------------------------------------------------------------------------
  2094. ! TM5 !
  2095. !--------------------------------------------------------------------------
  2096. !BOP
  2097. !
  2098. ! !IROUTINE: GATHER_5D_R
  2099. !
  2100. ! !DESCRIPTION: gather local 5D arrays into a global 5D array
  2101. !\\
  2102. !\\
  2103. ! !INTERFACE:
  2104. !
  2105. SUBROUTINE GATHER_5D_R( DistGrid, dist_array, glbl_array, halo, status )
  2106. !
  2107. ! !INPUT PARAMETERS:
  2108. !
  2109. type(dist_grid), intent(in) :: DistGrid
  2110. integer, intent(in) :: halo
  2111. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:,:)
  2112. !
  2113. ! !OUTPUT PARAMETERS:
  2114. !
  2115. real, intent(out) :: glbl_array(:,:,:,:,:)
  2116. integer, intent(out) :: status
  2117. !
  2118. ! !REVISION HISTORY:
  2119. ! 01 Nov 2011 - P. Le Sager - v0
  2120. !
  2121. ! !REMARKS:
  2122. ! (1) global array has to really be global on root only
  2123. !
  2124. !EOP
  2125. !------------------------------------------------------------------------
  2126. !BOC
  2127. character(len=*), parameter :: rname = mname//'gather_5d_r'
  2128. #ifndef MPI
  2129. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop, :,:,:)
  2130. status = 0
  2131. #else
  2132. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2133. integer :: i, j, klm, i0, j0, i1, j1, sz(5)
  2134. status=0
  2135. ! ------- Check input & get derived types
  2136. sz = shape(dist_array)
  2137. if(okdebug)then
  2138. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2139. IF_NOTOK_RETURN(status=1)
  2140. end if
  2141. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2142. IF_NOTOK_RETURN(status=1)
  2143. i0 = DistGrid%i_strt
  2144. j0 = DistGrid%j_strt
  2145. ! ------- GATHER -------------
  2146. if ( isRoot ) then ! RECV
  2147. ! get first chunk locally
  2148. i1 = DistGrid%i_stop
  2149. j1 = DistGrid%j_stop
  2150. glbl_array(i0:i1,j0:j1,:,:,:) = dist_array(i0:i1,j0:j1,:,:,:)
  2151. ! send remaining chunks to other pes
  2152. do klm=1,npes-1
  2153. i = DistGrid%bounds(1,klm)
  2154. j = DistGrid%bounds(3,klm)
  2155. call MPI_RECV( glbl_array(i,j,1,1,1), 1, ginterior(klm), klm, 1, &
  2156. localComm, stat, ierr)
  2157. end do
  2158. call FREE_DERIVED_TYPE( ginterior )
  2159. else !SEND
  2160. call MPI_SEND( dist_array(i0,j0,1,1,1), 1, linterior, root, 1, localComm, ierr)
  2161. call MPI_TYPE_FREE(linterior, ierr)
  2162. end if
  2163. #endif
  2164. END SUBROUTINE GATHER_5D_R
  2165. !EOC
  2166. !--------------------------------------------------------------------------
  2167. ! TM5 !
  2168. !--------------------------------------------------------------------------
  2169. !BOP
  2170. !
  2171. ! !IROUTINE: SCATTER_2D_R
  2172. !
  2173. ! !DESCRIPTION: scatter a 2D global real array
  2174. !\\
  2175. !\\
  2176. ! !INTERFACE:
  2177. !
  2178. SUBROUTINE SCATTER_2D_R( DistGrid, dist_array, glbl_array, halo, status )
  2179. !
  2180. ! !INPUT PARAMETERS:
  2181. !
  2182. type(dist_grid), intent(in) :: DistGrid
  2183. real, intent(in) :: glbl_array(:,:)
  2184. integer, intent(in) :: halo
  2185. !
  2186. ! !OUTPUT PARAMETERS:
  2187. !
  2188. real, intent(out) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  2189. integer, intent(out) :: status
  2190. !
  2191. ! !REVISION HISTORY:
  2192. ! 01 Nov 2011 - P. Le Sager - v0
  2193. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2194. !
  2195. ! !REMARKS: exactly the same as GATHER_2D_R, but inverting send/recv
  2196. !
  2197. !EOP
  2198. !------------------------------------------------------------------------
  2199. !BOC
  2200. character(len=*), parameter :: rname = mname//'scatter_2d_r'
  2201. #ifndef MPI
  2202. dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop ) = glbl_array
  2203. status = 0
  2204. #else
  2205. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2206. integer :: i, j, klm, i0, j0, i1, j1, sz(2)
  2207. status=0
  2208. ! ------- Check input & get derived types
  2209. sz = shape(dist_array)
  2210. if(okdebug)then
  2211. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2212. IF_NOTOK_RETURN(status=1)
  2213. end if
  2214. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2215. IF_NOTOK_RETURN(status=1)
  2216. i0 = DistGrid%i_strt
  2217. j0 = DistGrid%j_strt
  2218. ! ------- SCATTER -------------
  2219. if ( isRoot ) then ! send
  2220. ! get first chunk locally
  2221. i1 = DistGrid%i_stop
  2222. j1 = DistGrid%j_stop
  2223. dist_array(i0:i1,j0:j1) = glbl_array(i0:i1,j0:j1)
  2224. ! send remaining chunks to other pes
  2225. do klm=1,npes-1
  2226. i = DistGrid%bounds(1,klm)
  2227. j = DistGrid%bounds(3,klm)
  2228. call MPI_SSEND( glbl_array(i,j), 1, ginterior(klm), klm, klm, localComm, ierr)
  2229. IF_NOTOK_MPI(status=1)
  2230. end do
  2231. call FREE_DERIVED_TYPE( ginterior )
  2232. else
  2233. call MPI_COMM_RANK(localComm, klm, ierr)
  2234. IF_NOTOK_MPI(status=1)
  2235. call MPI_RECV( dist_array(i0,j0), 1, linterior, root, klm, localComm, stat, ierr)
  2236. IF_NOTOK_MPI(status=1)
  2237. call MPI_TYPE_FREE(linterior, ierr)
  2238. IF_NOTOK_MPI(status=1)
  2239. end if
  2240. #endif
  2241. END SUBROUTINE SCATTER_2D_R
  2242. !EOC
  2243. !--------------------------------------------------------------------------
  2244. ! TM5 !
  2245. !--------------------------------------------------------------------------
  2246. !BOP
  2247. !
  2248. ! !IROUTINE: SCATTER_3D_R
  2249. !
  2250. ! !DESCRIPTION: scatter 3D global real array
  2251. !\\
  2252. !\\
  2253. ! !INTERFACE:
  2254. !
  2255. SUBROUTINE SCATTER_3D_R( DistGrid, dist_array, glbl_array, halo, status )
  2256. !
  2257. ! !INPUT PARAMETERS:
  2258. !
  2259. type(dist_grid), intent(in) :: DistGrid
  2260. integer, intent(in) :: halo
  2261. real, intent(in) :: glbl_array(:,:,:)
  2262. !
  2263. ! !OUTPUT PARAMETERS:
  2264. !
  2265. real, intent(out) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:)
  2266. integer, intent(out) :: status
  2267. !
  2268. ! !REVISION HISTORY:
  2269. ! 01 Nov 2011 - P. Le Sager - v0
  2270. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2271. !
  2272. ! !REMARKS: global array has to really be global on root only
  2273. !
  2274. !EOP
  2275. !------------------------------------------------------------------------
  2276. !BOC
  2277. character(len=*), parameter :: rname = mname//'scatter_3d_r'
  2278. #ifndef MPI
  2279. dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop, :) = glbl_array
  2280. status = 0
  2281. #else
  2282. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2283. integer :: i, j, klm, i0, j0, i1, j1, sz(3)
  2284. status=0
  2285. ! ------- Check input & get derived types
  2286. sz = shape(dist_array)
  2287. if(okdebug)then
  2288. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2289. IF_NOTOK_RETURN(status=1)
  2290. end if
  2291. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2292. IF_NOTOK_RETURN(status=1)
  2293. i0 = DistGrid%i_strt
  2294. j0 = DistGrid%j_strt
  2295. ! ------- SCATTER -------------
  2296. if ( isRoot ) then ! send
  2297. ! get first chunk locally
  2298. i1 = DistGrid%i_stop
  2299. j1 = DistGrid%j_stop
  2300. dist_array(i0:i1,j0:j1,:) = glbl_array(i0:i1,j0:j1,:)
  2301. ! send remaining chunks to other pes
  2302. do klm=1,npes-1
  2303. i = DistGrid%bounds(1,klm)
  2304. j = DistGrid%bounds(3,klm)
  2305. call MPI_SSEND( glbl_array(i,j,1), 1, ginterior(klm), klm, 1, localComm, ierr)
  2306. end do
  2307. call FREE_DERIVED_TYPE( ginterior )
  2308. else
  2309. call MPI_RECV( dist_array(i0,j0,1), 1, linterior, root, 1, localComm, stat, ierr)
  2310. call MPI_TYPE_FREE(linterior, ierr)
  2311. end if
  2312. #endif
  2313. END SUBROUTINE SCATTER_3D_R
  2314. !EOC
  2315. !--------------------------------------------------------------------------
  2316. ! TM5 !
  2317. !--------------------------------------------------------------------------
  2318. !BOP
  2319. !
  2320. ! !IROUTINE: SCATTER_4D_R
  2321. !
  2322. ! !DESCRIPTION: scatter 4D real array
  2323. !\\
  2324. !\\
  2325. ! !INTERFACE:
  2326. !
  2327. SUBROUTINE SCATTER_4D_R( DistGrid, dist_array, glbl_array, halo, status )
  2328. !
  2329. ! !INPUT PARAMETERS:
  2330. !
  2331. type(dist_grid), intent(in) :: DistGrid
  2332. integer, intent(in) :: halo
  2333. real, intent(in) :: glbl_array(:,:,:,:)
  2334. !
  2335. ! !OUTPUT PARAMETERS:
  2336. !
  2337. real, intent(out) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:)
  2338. integer, intent(out) :: status
  2339. !
  2340. ! !REVISION HISTORY:
  2341. ! 01 Nov 2011 - P. Le Sager - v0
  2342. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2343. !
  2344. !EOP
  2345. !------------------------------------------------------------------------
  2346. !BOC
  2347. character(len=*), parameter :: rname = mname//'scatter_4d_r'
  2348. #ifndef MPI
  2349. dist_array( DistGrid%i_strt:DistGrid%i_stop, DistGrid%j_strt:DistGrid%j_stop,:,:) = glbl_array
  2350. status = 0
  2351. #else
  2352. integer :: stat(MPI_STATUS_SIZE), linterior, ginterior(npes-1)
  2353. integer :: i, j, klm, i0, j0, i1, j1, sz(4)
  2354. status=0
  2355. ! ------- Check input & get derived types
  2356. sz = shape(dist_array)
  2357. if(okdebug)then
  2358. CALL CHECK_DIST_ARR( DistGrid, sz, halo, status, shape(glbl_array))
  2359. IF_NOTOK_RETURN(status=1)
  2360. end if
  2361. CALL GET_INTERIOR_TYPE( DistGrid, sz, my_real, linterior, ginterior, status )
  2362. IF_NOTOK_RETURN(status=1)
  2363. i0 = DistGrid%i_strt
  2364. j0 = DistGrid%j_strt
  2365. ! ------- SCATTER -------------
  2366. if ( isRoot ) then ! send
  2367. ! get first chunk locally
  2368. i1 = DistGrid%i_stop
  2369. j1 = DistGrid%j_stop
  2370. dist_array(i0:i1,j0:j1,:,:) = glbl_array(i0:i1,j0:j1,:,:)
  2371. ! send remaining chunks to other pes
  2372. do klm=1,npes-1
  2373. i = DistGrid%bounds(1,klm)
  2374. j = DistGrid%bounds(3,klm)
  2375. call MPI_SSEND( glbl_array(i,j,1,1), 1, ginterior(klm), klm, 1, localComm, ierr)
  2376. end do
  2377. call FREE_DERIVED_TYPE( ginterior )
  2378. else
  2379. call MPI_RECV( dist_array(i0,j0,1,1), 1, linterior, root, 1, localComm, stat, ierr)
  2380. call MPI_TYPE_FREE(linterior, ierr)
  2381. end if
  2382. #endif
  2383. END SUBROUTINE SCATTER_4D_R
  2384. !EOC
  2385. !--------------------------------------------------------------------------
  2386. ! TM5 !
  2387. !--------------------------------------------------------------------------
  2388. !BOP
  2389. !
  2390. ! !IROUTINE: SCATTER_IBAND_1D_R
  2391. !
  2392. ! !DESCRIPTION: scatter a meridional real vector (1D) from root
  2393. !\\
  2394. !\\
  2395. ! !INTERFACE:
  2396. !
  2397. SUBROUTINE SCATTER_IBAND_1D_R( DistGrid, dist_array, glbl_array, status, iref )
  2398. !
  2399. ! !INPUT PARAMETERS:
  2400. !
  2401. type(dist_grid), intent(in) :: DistGrid
  2402. real, intent(in) :: glbl_array(:)
  2403. integer, optional, intent(in) :: iref ! to find targets, default=1
  2404. !
  2405. ! !OUTPUT PARAMETERS:
  2406. !
  2407. real, intent(out) :: dist_array(DistGrid%j_strt:)
  2408. integer, intent(out) :: status
  2409. !
  2410. ! !REVISION HISTORY:
  2411. ! 01 Nov 2011 - P. Le Sager - v0
  2412. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2413. !
  2414. ! !REMARKS: 1D version, along J index, of scatter_2d_r
  2415. !
  2416. !EOP
  2417. !------------------------------------------------------------------------
  2418. !BOC
  2419. character(len=*), parameter :: rname = mname//'scatter_iband_1d_r'
  2420. #ifndef MPI
  2421. dist_array( DistGrid%j_strt:DistGrid%j_stop ) = glbl_array
  2422. status = 0
  2423. #else
  2424. integer :: stat(MPI_STATUS_SIZE)
  2425. integer :: x_iref, n, klm, i0, j0, i1, j1, sz(1), i0t, j0t, i1t, j1t
  2426. status=0
  2427. ! ------- Check inputs
  2428. x_iref=1
  2429. if(present(iref)) x_iref=iref
  2430. sz = shape(dist_array)
  2431. i0 = DistGrid%i_strt
  2432. i1 = DistGrid%i_stop
  2433. j0 = DistGrid%j_strt
  2434. j1 = DistGrid%j_stop
  2435. ! ------- SEND/RECV -------------
  2436. if(okdebug)then
  2437. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, shape(glbl_array), iband=.true.)
  2438. IF_NOTOK_RETURN(status=1)
  2439. end if
  2440. if ( isRoot ) then
  2441. ! local case
  2442. if((x_iref>=i0).and.(x_iref<=i1)) dist_array(j0:j1) = glbl_array(j0:j1)
  2443. ! send remaining chunks to other pes
  2444. do klm=1,npes-1
  2445. i0t = DistGrid%bounds(1,klm)
  2446. i1t = DistGrid%bounds(2,klm)
  2447. j0t = DistGrid%bounds(3,klm)
  2448. j1t = DistGrid%bounds(4,klm)
  2449. ! is klm a target processor?
  2450. if((x_iref<i0t).or.(x_iref>i1t))cycle
  2451. n=j1t-j0t+1
  2452. call MPI_SSEND( glbl_array(j0t), n, my_real, klm, 1, localComm, ierr)
  2453. end do
  2454. else
  2455. ! are we on a target processor?
  2456. if((x_iref<i0).or.(x_iref>i1))return
  2457. n=j1-j0+1
  2458. call MPI_RECV( dist_array(j0), n, my_real, root, 1, localComm, stat, ierr)
  2459. end if
  2460. #endif
  2461. END SUBROUTINE SCATTER_IBAND_1D_R
  2462. !EOC
  2463. !--------------------------------------------------------------------------
  2464. ! TM5 !
  2465. !--------------------------------------------------------------------------
  2466. !BOP
  2467. !
  2468. ! !IROUTINE: SCATTER_JBAND_1D_R
  2469. !
  2470. ! !DESCRIPTION: scatter a zonal real vector (1D) from root
  2471. !\\
  2472. !\\
  2473. ! !INTERFACE:
  2474. !
  2475. SUBROUTINE SCATTER_JBAND_1D_R( DistGrid, dist_array, glbl_array, status, jref )
  2476. !
  2477. ! !INPUT PARAMETERS:
  2478. !
  2479. type(dist_grid), intent(in) :: DistGrid
  2480. real, intent(in) :: glbl_array(:)
  2481. integer, optional, intent(in) :: jref ! to find targets, default=1
  2482. !
  2483. ! !OUTPUT PARAMETERS:
  2484. !
  2485. real, intent(out) :: dist_array(DistGrid%i_strt:)
  2486. integer, intent(out) :: status
  2487. !
  2488. ! !REVISION HISTORY:
  2489. ! 01 Nov 2011 - P. Le Sager - v0
  2490. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2491. !
  2492. ! !REMARKS: 1D version, along I index, of scatter_2d_r
  2493. !
  2494. !EOP
  2495. !------------------------------------------------------------------------
  2496. !BOC
  2497. character(len=*), parameter :: rname = mname//'scatter_jband_1d_r'
  2498. #ifndef MPI
  2499. dist_array( DistGrid%i_strt:DistGrid%i_stop ) = glbl_array
  2500. status = 0
  2501. #else
  2502. integer :: stat(MPI_STATUS_SIZE)
  2503. integer :: x_jref, n, klm, i0, j0, i1, j1, sz(1), i0t, j0t, i1t, j1t
  2504. status=0
  2505. ! ------- Check inputs
  2506. x_jref=1
  2507. if(present(jref)) x_jref=jref
  2508. sz = shape(dist_array)
  2509. if(okdebug)then
  2510. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, shape(glbl_array), jband=.true.)
  2511. IF_NOTOK_RETURN(status=1)
  2512. end if
  2513. i0 = DistGrid%i_strt
  2514. i1 = DistGrid%i_stop
  2515. j0 = DistGrid%j_strt
  2516. j1 = DistGrid%j_stop
  2517. ! ------- SEND/RECV -------------
  2518. if ( isRoot ) then
  2519. ! local case
  2520. if((x_jref>=j0).and.(x_jref<=j1)) dist_array(i0:i1) = glbl_array(i0:i1)
  2521. ! send remaining chunks to other pes
  2522. do klm=1,npes-1
  2523. i0t = DistGrid%bounds(1,klm)
  2524. i1t = DistGrid%bounds(2,klm)
  2525. j0t = DistGrid%bounds(3,klm)
  2526. j1t = DistGrid%bounds(4,klm)
  2527. ! is klm a target processor?
  2528. if((x_jref<j0t).or.(x_jref>j1t))cycle
  2529. n=i1t-i0t+1
  2530. call MPI_SSEND( glbl_array(i0t), n, my_real, klm, 1, localComm, ierr)
  2531. end do
  2532. else
  2533. ! are we on a target processor?
  2534. if((x_jref<j0).or.(x_jref>j1))return
  2535. n=i1-i0+1
  2536. call MPI_RECV( dist_array(i0), n, my_real, root, 1, localComm, stat, ierr)
  2537. end if
  2538. #endif
  2539. END SUBROUTINE SCATTER_JBAND_1D_R
  2540. !EOC
  2541. !--------------------------------------------------------------------------
  2542. ! TM5 !
  2543. !--------------------------------------------------------------------------
  2544. !BOP
  2545. !
  2546. ! !IROUTINE: SCATTER_JBAND_2D_R
  2547. !
  2548. ! !DESCRIPTION: scatter a zonal slice (2D) from root
  2549. !\\
  2550. !\\
  2551. ! !INTERFACE:
  2552. !
  2553. SUBROUTINE SCATTER_JBAND_2D_R( DistGrid, dist_array, glbl_array, status, jref )
  2554. !
  2555. ! !INPUT PARAMETERS:
  2556. !
  2557. type(dist_grid), intent(in) :: DistGrid
  2558. real, intent(in) :: glbl_array(:,:)
  2559. integer, optional, intent(in) :: jref ! to find targets, default=1
  2560. !
  2561. ! !OUTPUT PARAMETERS:
  2562. !
  2563. real, intent(out) :: dist_array(DistGrid%i_strt:,:)
  2564. integer, intent(out) :: status
  2565. !
  2566. ! !REVISION HISTORY:
  2567. ! 01 Nov 2011 - P. Le Sager - v0
  2568. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2569. !
  2570. ! !REMARKS: 2D version, along I index, of scatter_3d_r
  2571. !
  2572. !EOP
  2573. !------------------------------------------------------------------------
  2574. !BOC
  2575. character(len=*), parameter :: rname = mname//'scatter_jband_2d_r'
  2576. #ifndef MPI
  2577. dist_array( DistGrid%i_strt:DistGrid%i_stop,: ) = glbl_array
  2578. status = 0
  2579. #else
  2580. integer :: stat(MPI_STATUS_SIZE), subarr
  2581. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2582. integer :: x_jref, n, klm, sz(2), gsz(2)
  2583. status=0
  2584. ! ------- Check inputs
  2585. x_jref=1
  2586. if(present(jref)) x_jref=jref
  2587. sz = shape(dist_array)
  2588. gsz = shape(glbl_array)
  2589. if(okdebug)then
  2590. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true.)
  2591. IF_NOTOK_RETURN(status=1)
  2592. end if
  2593. i0 = DistGrid%i_strt
  2594. i1 = DistGrid%i_stop
  2595. j0 = DistGrid%j_strt
  2596. j1 = DistGrid%j_stop
  2597. ! ------- SEND/RECV -------------
  2598. if ( isRoot ) then
  2599. !local case
  2600. if((x_jref>=j0).and.(x_jref<=j1)) dist_array(i0:i1,:) = glbl_array(i0:i1,:)
  2601. ! send remaining chunks to other pes
  2602. do klm=1,npes-1
  2603. i0t = DistGrid%bounds(1,klm)
  2604. i1t = DistGrid%bounds(2,klm)
  2605. j0t = DistGrid%bounds(3,klm)
  2606. j1t = DistGrid%bounds(4,klm)
  2607. ! if klm is a target processor, pack and send
  2608. if((x_jref<j0t).or.(x_jref>j1t))cycle
  2609. n=i1t-i0t+1
  2610. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, subarr, ierr)
  2611. call MPI_TYPE_COMMIT (subarr, ierr)
  2612. call MPI_SSEND( glbl_array(i0t,1), 1, subarr, klm, 1, localComm, ierr)
  2613. call MPI_TYPE_FREE (subarr, ierr)
  2614. end do
  2615. else
  2616. ! are we on a target processor?
  2617. if((x_jref<j0).or.(x_jref>j1))return
  2618. n=i1-i0+1
  2619. call MPI_RECV( dist_array(i0,1), n*sz(2), my_real, root, 1, localComm, stat, ierr)
  2620. end if
  2621. #endif
  2622. END SUBROUTINE SCATTER_JBAND_2D_R
  2623. !EOC
  2624. !--------------------------------------------------------------------------
  2625. ! TM5 !
  2626. !--------------------------------------------------------------------------
  2627. !BOP
  2628. !
  2629. ! !IROUTINE: SCATTER_IBAND_2D_R
  2630. !
  2631. ! !DESCRIPTION: scatter a meridional real array (2D) from root
  2632. !\\
  2633. !\\
  2634. ! !INTERFACE:
  2635. !
  2636. SUBROUTINE SCATTER_IBAND_2D_R( DistGrid, dist_array, glbl_array, status, iref )
  2637. !
  2638. ! !INPUT PARAMETERS:
  2639. !
  2640. type(dist_grid), intent(in) :: DistGrid
  2641. real, intent(in) :: glbl_array(:,:)
  2642. integer, optional, intent(in) :: iref ! to find targets, default=1
  2643. !
  2644. ! !OUTPUT PARAMETERS:
  2645. !
  2646. real, intent(out) :: dist_array(DistGrid%j_strt:,:)
  2647. integer, intent(out) :: status
  2648. !
  2649. ! !REVISION HISTORY:
  2650. ! 01 Nov 2011 - P. Le Sager - v0
  2651. ! 21 Jun 2013 - P. Le Sager - MPI_SEND -> MPI_SSEND to avoid buffering
  2652. !
  2653. ! !REMARKS: 2D version, along J index, of scatter_3d_r
  2654. !
  2655. !EOP
  2656. !------------------------------------------------------------------------
  2657. !BOC
  2658. character(len=*), parameter :: rname = mname//'scatter_iband_2d_r'
  2659. #ifndef MPI
  2660. dist_array( DistGrid%j_strt:DistGrid%j_stop,: ) = glbl_array
  2661. status = 0
  2662. #else
  2663. integer :: stat(MPI_STATUS_SIZE), subarr
  2664. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2665. integer :: x_iref, n, klm, sz(2), gsz(2)
  2666. status=0
  2667. ! ------- Check inputs
  2668. x_iref=1
  2669. if(present(iref)) x_iref=iref
  2670. sz = shape(dist_array)
  2671. gsz = shape(glbl_array)
  2672. if(okdebug)then
  2673. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, iband=.true.)
  2674. IF_NOTOK_RETURN(status=1)
  2675. end if
  2676. i0 = DistGrid%i_strt
  2677. i1 = DistGrid%i_stop
  2678. j0 = DistGrid%j_strt
  2679. j1 = DistGrid%j_stop
  2680. ! ------- SEND/RECV -------------
  2681. if ( isRoot ) then
  2682. ! local case
  2683. if((x_iref>=i0).and.(x_iref<=i1)) dist_array(j0:j1,:) = glbl_array(j0:j1,:)
  2684. ! send remaining chunks to other pes
  2685. do klm=1,npes-1
  2686. i0t = DistGrid%bounds(1,klm)
  2687. i1t = DistGrid%bounds(2,klm)
  2688. j0t = DistGrid%bounds(3,klm)
  2689. j1t = DistGrid%bounds(4,klm)
  2690. ! is klm a target processor?
  2691. if((x_iref<i0t).or.(x_iref>i1t))cycle
  2692. n=j1t-j0t+1
  2693. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, subarr, ierr)
  2694. call MPI_TYPE_COMMIT (subarr, ierr)
  2695. call MPI_SSEND( glbl_array(j0t,1), 1, subarr, klm, 1, localComm, ierr)
  2696. call MPI_TYPE_FREE (subarr, ierr)
  2697. end do
  2698. else
  2699. ! are we on a target processor?
  2700. if((x_iref<i0).or.(x_iref>i1))return
  2701. n=j1-j0+1
  2702. call MPI_RECV( dist_array(j0,1), n*sz(2), my_real, root, 1, localComm, stat, ierr)
  2703. end if
  2704. #endif
  2705. END SUBROUTINE SCATTER_IBAND_2D_R
  2706. !EOC
  2707. !--------------------------------------------------------------------------
  2708. ! TM5 !
  2709. !--------------------------------------------------------------------------
  2710. !BOP
  2711. !
  2712. ! !IROUTINE: SCATTER_JBAND_3D_R
  2713. !
  2714. ! !DESCRIPTION: scatter a zonal slice (2D) from root
  2715. !\\
  2716. !\\
  2717. ! !INTERFACE:
  2718. !
  2719. SUBROUTINE SCATTER_JBAND_3D_R( DistGrid, dist_array, glbl_array, status, jref, rowcom )
  2720. !
  2721. ! !INPUT PARAMETERS:
  2722. !
  2723. type(dist_grid), intent(in) :: DistGrid
  2724. real, intent(in) :: glbl_array(:,:,:)
  2725. integer, optional, intent(in) :: jref ! to find targets, default=1
  2726. logical, optional, intent(in) :: rowcom ! to scatter from row_root instead of global root
  2727. !
  2728. ! !OUTPUT PARAMETERS:
  2729. !
  2730. real, intent(out) :: dist_array(DistGrid%i_strt:,:,:)
  2731. integer, intent(out) :: status
  2732. !
  2733. ! !REVISION HISTORY:
  2734. !
  2735. ! !REMARKS: 2D version, along I index, of scatter_3d_r
  2736. !
  2737. !EOP
  2738. !------------------------------------------------------------------------
  2739. !BOC
  2740. character(len=*), parameter :: rname = mname//'scatter_jband_3d_r'
  2741. #ifndef MPI
  2742. dist_array( DistGrid%i_strt:DistGrid%i_stop,:,: ) = glbl_array
  2743. status = 0
  2744. #else
  2745. integer :: stat(MPI_STATUS_SIZE), subarr
  2746. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2747. integer :: x_jref, n, klm, sz(3), gsz(3), slab, tgt_root
  2748. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  2749. logical :: selectRoot
  2750. status=0
  2751. ! ------- Check inputs
  2752. x_jref=1
  2753. if(present(jref)) x_jref=jref
  2754. i0 = DistGrid%i_strt
  2755. i1 = DistGrid%i_stop
  2756. j0 = DistGrid%j_strt
  2757. j1 = DistGrid%j_stop
  2758. ! by default scatter from global root
  2759. selectRoot = isRoot
  2760. tgt_root = root
  2761. if (present(rowcom)) then
  2762. if (rowcom) then
  2763. selectRoot = isRowRoot.and.(x_jref>=j0).and.(x_jref<=j1)
  2764. tgt_root = RowRootId
  2765. endif
  2766. endif
  2767. sz = shape(dist_array)
  2768. gsz = shape(glbl_array)
  2769. if(okdebug)then
  2770. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true., has_global=selectRoot)
  2771. IF_NOTOK_RETURN(status=1)
  2772. end if
  2773. ! ------- SEND/RECV -------------
  2774. if ( selectRoot ) then
  2775. !local case
  2776. if((x_jref>=j0).and.(x_jref<=j1)) dist_array(i0:i1,:,:) = glbl_array(i0:i1,:,:)
  2777. ! send remaining chunks to other pes
  2778. do klm=0,npes-1
  2779. if (klm==myid) cycle ! skip local case
  2780. i0t = DistGrid%bounds(1,klm)
  2781. i1t = DistGrid%bounds(2,klm)
  2782. j0t = DistGrid%bounds(3,klm)
  2783. j1t = DistGrid%bounds(4,klm)
  2784. ! if klm is a target processor, pack and send
  2785. if((x_jref<j0t).or.(x_jref>j1t))cycle
  2786. n=i1t-i0t+1
  2787. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  2788. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  2789. stride = gsz(1)*gsz(2)*sizeoftype
  2790. call MPI_TYPE_CREATE_HVECTOR (sz(3), 1, stride, slab, subarr, ierr)
  2791. call MPI_TYPE_FREE (slab, ierr)
  2792. call MPI_TYPE_COMMIT (subarr, ierr)
  2793. call MPI_SSEND( glbl_array(i0t,1,1), 1, subarr, klm, 1, localComm, ierr)
  2794. call MPI_TYPE_FREE (subarr, ierr)
  2795. end do
  2796. else
  2797. ! are we on a target processor?
  2798. if((x_jref<j0).or.(x_jref>j1))return
  2799. n=i1-i0+1
  2800. call MPI_RECV( dist_array(i0,1,1), n*sz(2)*sz(3), my_real, tgt_root, 1, localComm, stat, ierr)
  2801. end if
  2802. #endif
  2803. END SUBROUTINE SCATTER_JBAND_3D_R
  2804. !EOC
  2805. !--------------------------------------------------------------------------
  2806. ! TM5 !
  2807. !--------------------------------------------------------------------------
  2808. !BOP
  2809. !
  2810. ! !IROUTINE: SCATTER_JBAND_4D_R
  2811. !
  2812. ! !DESCRIPTION: scatter a zonal slice (4D) from root
  2813. !\\
  2814. !\\
  2815. ! !INTERFACE:
  2816. !
  2817. SUBROUTINE SCATTER_JBAND_4D_R( DistGrid, dist_array, glbl_array, status, jref, rowcom )
  2818. !
  2819. ! !INPUT PARAMETERS:
  2820. !
  2821. type(dist_grid), intent(in) :: DistGrid
  2822. real, intent(in) :: glbl_array(:,:,:,:)
  2823. integer, optional, intent(in) :: jref ! to find targets, default=1
  2824. logical, optional, intent(in) :: rowcom ! to scatter from row_root instead of global root
  2825. !
  2826. ! !OUTPUT PARAMETERS:
  2827. !
  2828. real, intent(out) :: dist_array(DistGrid%i_strt:,:,:,:)
  2829. integer, intent(out) :: status
  2830. !
  2831. ! !REVISION HISTORY:
  2832. ! 17 Feb 2015 - Ph. Le Sager - v0
  2833. !
  2834. ! !REMARKS: 2D version, along I index, of scatter_4d_r
  2835. !
  2836. !EOP
  2837. !------------------------------------------------------------------------
  2838. !BOC
  2839. character(len=*), parameter :: rname = mname//'scatter_jband_4d_r'
  2840. #ifndef MPI
  2841. dist_array( DistGrid%i_strt:DistGrid%i_stop,:,:,: ) = glbl_array
  2842. status = 0
  2843. #else
  2844. integer :: stat(MPI_STATUS_SIZE), subarr
  2845. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2846. integer :: x_jref, n, klm, sz(4), gsz(4), slab, tgt_root
  2847. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  2848. logical :: selectRoot
  2849. status=0
  2850. ! ------- Check inputs
  2851. x_jref=1
  2852. if(present(jref)) x_jref=jref
  2853. i0 = DistGrid%i_strt
  2854. i1 = DistGrid%i_stop
  2855. j0 = DistGrid%j_strt
  2856. j1 = DistGrid%j_stop
  2857. ! by default scatter from global root
  2858. selectRoot = isRoot
  2859. tgt_root = root
  2860. if (present(rowcom)) then
  2861. if (rowcom) then
  2862. selectRoot = isRowRoot.and.(x_jref>=j0).and.(x_jref<=j1)
  2863. tgt_root = RowRootId
  2864. endif
  2865. endif
  2866. sz = shape(dist_array)
  2867. gsz = shape(glbl_array)
  2868. if(okdebug)then
  2869. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true., has_global=selectRoot)
  2870. IF_NOTOK_RETURN(status=1)
  2871. end if
  2872. ! ------- SEND/RECV -------------
  2873. if ( selectRoot ) then
  2874. !local case
  2875. if((x_jref>=j0).and.(x_jref<=j1)) dist_array(i0:i1,:,:,:) = glbl_array(i0:i1,:,:,:)
  2876. ! send remaining chunks to other pes
  2877. do klm=0,npes-1
  2878. if (klm==myid) cycle ! skip local case
  2879. i0t = DistGrid%bounds(1,klm)
  2880. i1t = DistGrid%bounds(2,klm)
  2881. j0t = DistGrid%bounds(3,klm)
  2882. j1t = DistGrid%bounds(4,klm)
  2883. ! if klm is a target processor, pack and send
  2884. if((x_jref<j0t).or.(x_jref>j1t))cycle
  2885. n=i1t-i0t+1
  2886. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  2887. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  2888. stride = gsz(1)*gsz(2)*sizeoftype
  2889. call MPI_TYPE_CREATE_HVECTOR (sz(3)*sz(4), 1, stride, slab, subarr, ierr)
  2890. call MPI_TYPE_FREE (slab, ierr)
  2891. call MPI_TYPE_COMMIT (subarr, ierr)
  2892. call MPI_SSEND( glbl_array(i0t,1,1,1), 1, subarr, klm, 1, localComm, ierr)
  2893. call MPI_TYPE_FREE (subarr, ierr)
  2894. end do
  2895. else
  2896. ! are we on a target processor?
  2897. if((x_jref<j0).or.(x_jref>j1))return
  2898. n=i1-i0+1
  2899. call MPI_RECV( dist_array(i0,1,1,1), n*sz(2)*sz(3)*sz(4), my_real, tgt_root, 1, localComm, stat, ierr)
  2900. end if
  2901. #endif
  2902. END SUBROUTINE SCATTER_JBAND_4D_R
  2903. !EOC
  2904. !--------------------------------------------------------------------------
  2905. ! TM5 !
  2906. !--------------------------------------------------------------------------
  2907. !BOP
  2908. !
  2909. ! !IROUTINE: GATHER_JBAND_2D_R
  2910. !
  2911. ! !DESCRIPTION: gather a zonal slice (2D) on root. For 2D arrays, with first
  2912. ! dimension distributed along I (making it a J-band), and the
  2913. ! other dimension is *not* distributed along J. For example:
  2914. ! [i1:i2, lev], or [i1:i2, trac]
  2915. !\\
  2916. !\\
  2917. ! !INTERFACE:
  2918. !
  2919. SUBROUTINE GATHER_JBAND_2D_R( DistGrid, dist_array, glbl_array, status, jref )
  2920. !
  2921. ! !INPUT PARAMETERS:
  2922. !
  2923. type(dist_grid), intent(in) :: DistGrid
  2924. real, intent(in) :: dist_array(DistGrid%i_strt:,:)
  2925. integer, intent(in) :: jref ! to find sources
  2926. !
  2927. ! !OUTPUT PARAMETERS:
  2928. !
  2929. real, intent(out) :: glbl_array(:,:)
  2930. integer, intent(out) :: status
  2931. !
  2932. ! !REVISION HISTORY:
  2933. ! 01 Nov 2011 - P. Le Sager - v0
  2934. !
  2935. ! !REMARKS:
  2936. !
  2937. !EOP
  2938. !------------------------------------------------------------------------
  2939. !BOC
  2940. character(len=*), parameter :: rname = mname//'gather_jband_2d_r'
  2941. #ifndef MPI
  2942. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop,: )
  2943. status = 0
  2944. #else
  2945. integer :: stat(MPI_STATUS_SIZE), subarr
  2946. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  2947. integer :: n, klm, sz(2), gsz(2)
  2948. status=0
  2949. ! ------- Check inputs
  2950. sz = shape(dist_array)
  2951. gsz = shape(glbl_array)
  2952. if(okdebug)then
  2953. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true.)
  2954. IF_NOTOK_RETURN(status=1)
  2955. end if
  2956. i0 = DistGrid%i_strt
  2957. i1 = DistGrid%i_stop
  2958. j0 = DistGrid%j_strt
  2959. j1 = DistGrid%j_stop
  2960. ! ------- SEND/RECV -------------
  2961. if ( isRoot ) then
  2962. ! local case
  2963. if((jref>=j0).and.(jref<=j1)) glbl_array(i0:i1,:) = dist_array(i0:i1,:)
  2964. ! receive remaining chunks from other pes
  2965. do klm=1, npes-1
  2966. i0t = DistGrid%bounds(1,klm)
  2967. i1t = DistGrid%bounds(2,klm)
  2968. j0t = DistGrid%bounds(3,klm)
  2969. j1t = DistGrid%bounds(4,klm)
  2970. ! if klm is a source processor, receive from it
  2971. if((jref<j0t).or.(jref>j1t))cycle
  2972. n=i1t-i0t+1
  2973. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, subarr, ierr)
  2974. call MPI_TYPE_COMMIT (subarr, ierr)
  2975. call MPI_RECV( glbl_array(i0t,1), 1, subarr, klm, jref, localComm, stat, ierr)
  2976. call MPI_TYPE_FREE (subarr, ierr)
  2977. end do
  2978. else
  2979. ! are we on a src processor?
  2980. if((jref<j0).or.(jref>j1))return
  2981. n=i1-i0+1
  2982. call MPI_SEND( dist_array(i0,1), n*sz(2), my_real, root, jref, localComm, ierr)
  2983. end if
  2984. #endif
  2985. END SUBROUTINE GATHER_JBAND_2D_R
  2986. !EOC
  2987. !--------------------------------------------------------------------------
  2988. ! TM5 !
  2989. !--------------------------------------------------------------------------
  2990. !BOP
  2991. !
  2992. ! !IROUTINE: GATHER_JBAND_3D_R
  2993. !
  2994. ! !DESCRIPTION: Gather a zonal slab (3D) on root or rowroot(jref) [i.e. the
  2995. ! root of the row of procs].
  2996. ! Local arrays [i1:i2, a:b, c:d] are gathered into the root
  2997. ! proc as [1:im, 1:b-a+1, 1:d-c+1]. Caller has to ensure that at least
  2998. ! **ALL** the ROW procs call this routine, plus root if needed.
  2999. ! No halo possible yet.
  3000. !\\
  3001. !\\
  3002. ! !INTERFACE:
  3003. !
  3004. SUBROUTINE GATHER_JBAND_3D_R( DistGrid, dist_array, glbl_array, status, jref, rowcom)
  3005. !
  3006. ! !INPUT PARAMETERS:
  3007. !
  3008. type(dist_grid), intent(in) :: DistGrid
  3009. real, intent(in) :: dist_array(DistGrid%i_strt:,:,:)
  3010. integer, intent(in) :: jref ! to find sources (defines the row we want)
  3011. logical, optional, intent(in) :: rowcom ! to gather on row_root instead of global root
  3012. !
  3013. ! !OUTPUT PARAMETERS:
  3014. !
  3015. real, intent(out) :: glbl_array(:,:,:)
  3016. integer, intent(out) :: status
  3017. !
  3018. ! !REVISION HISTORY:
  3019. ! 01 Nov 2011 - P. Le Sager - v0
  3020. !
  3021. ! !REMARKS: use in budget for gathering fluxes, advect_cfl
  3022. !
  3023. !EOP
  3024. !------------------------------------------------------------------------
  3025. !BOC
  3026. character(len=*), parameter :: rname = mname//'gather_jband_3d_r'
  3027. #ifndef MPI
  3028. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop,:,:)
  3029. status = 0
  3030. #else
  3031. integer :: stat(MPI_STATUS_SIZE), subarr
  3032. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  3033. integer :: n, klm, sz(3), gsz(3), slab, tgt_root
  3034. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  3035. logical :: selectRoot
  3036. status=0
  3037. ! ------- Check inputs
  3038. i0 = DistGrid%i_strt
  3039. i1 = DistGrid%i_stop
  3040. j0 = DistGrid%j_strt
  3041. j1 = DistGrid%j_stop
  3042. ! by default gather into global root
  3043. selectRoot = isRoot
  3044. tgt_root = root
  3045. if (present(rowcom)) then
  3046. if (rowcom) then
  3047. selectRoot = isRowRoot.and.(jref>=j0).and.(jref<=j1)
  3048. tgt_root = RowRootId
  3049. endif
  3050. endif
  3051. sz = shape(dist_array)
  3052. gsz = shape(glbl_array)
  3053. if(okdebug)then
  3054. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true., has_global=selectRoot)
  3055. IF_NOTOK_RETURN(status=1)
  3056. end if
  3057. ! ------- SEND/RECV -------------
  3058. if ( selectRoot ) then
  3059. ! local case
  3060. if((jref>=j0).and.(jref<=j1)) glbl_array(i0:i1,:,:) = dist_array(i0:i1,:,:)
  3061. ! receive remaining chunks from other pes
  3062. do klm=0,npes-1
  3063. if (klm==myid) cycle ! skip local case
  3064. i0t = DistGrid%bounds(1,klm)
  3065. i1t = DistGrid%bounds(2,klm)
  3066. j0t = DistGrid%bounds(3,klm)
  3067. j1t = DistGrid%bounds(4,klm)
  3068. ! if klm is a source processor, receive from it
  3069. if((jref<j0t).or.(jref>j1t))cycle
  3070. n=i1t-i0t+1
  3071. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  3072. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  3073. stride = gsz(1)*gsz(2)*sizeoftype
  3074. call MPI_TYPE_CREATE_HVECTOR (sz(3), 1, stride, slab, subarr, ierr)
  3075. call MPI_TYPE_FREE (slab, ierr)
  3076. call MPI_TYPE_COMMIT (subarr, ierr)
  3077. call MPI_RECV( glbl_array(i0t,1,1), 1, subarr, klm, jref, localComm, stat, ierr)
  3078. call MPI_TYPE_FREE (subarr, ierr)
  3079. end do
  3080. else
  3081. ! are we on a src processor?
  3082. if((jref<j0).or.(jref>j1))return
  3083. n=i1-i0+1
  3084. call MPI_SEND( dist_array(i0,1,1), n*sz(2)*sz(3), my_real, tgt_root, jref, localComm, ierr)
  3085. end if
  3086. #endif
  3087. END SUBROUTINE GATHER_JBAND_3D_R
  3088. !EOC
  3089. !--------------------------------------------------------------------------
  3090. ! TM5 !
  3091. !--------------------------------------------------------------------------
  3092. !BOP
  3093. !
  3094. ! !IROUTINE: GATHER_JBAND_4D_R
  3095. !
  3096. ! !DESCRIPTION: Gather a zonal slab (4D) on root or rowroot(jref) [i.e. the
  3097. ! root of the row of procs].
  3098. ! Local arrays [i1:i2, a:b, c:d, e:f] are gathered into the root
  3099. ! proc as [1:im, 1:b-a+1, 1:d-c+1, 1:f-e+1]. Caller has to
  3100. ! ensure that at least **ALL** the ROW procs call this routine,
  3101. ! plus root if needed.
  3102. ! No halo possible yet.
  3103. !\\
  3104. !\\
  3105. ! !INTERFACE:
  3106. !
  3107. SUBROUTINE GATHER_JBAND_4D_R( DistGrid, dist_array, glbl_array, status, jref, rowcom)
  3108. !
  3109. ! !INPUT PARAMETERS:
  3110. !
  3111. type(dist_grid), intent(in) :: DistGrid
  3112. real, intent(in) :: dist_array(DistGrid%i_strt:,:,:,:)
  3113. integer, intent(in) :: jref ! to find sources (defines the row we want)
  3114. logical, optional, intent(in) :: rowcom ! to gather on row_root instead of global root
  3115. !
  3116. ! !OUTPUT PARAMETERS:
  3117. !
  3118. real, intent(out) :: glbl_array(:,:,:,:)
  3119. integer, intent(out) :: status
  3120. !
  3121. ! !REVISION HISTORY:
  3122. ! 17 Feb 2015 - Ph. Le Sager - v0
  3123. !
  3124. ! !REMARKS: use in advectx
  3125. !
  3126. !EOP
  3127. !------------------------------------------------------------------------
  3128. !BOC
  3129. character(len=*), parameter :: rname = mname//'gather_jband_4d_r'
  3130. #ifndef MPI
  3131. glbl_array = dist_array( DistGrid%i_strt:DistGrid%i_stop,:,:,:)
  3132. status = 0
  3133. #else
  3134. integer :: stat(MPI_STATUS_SIZE), subarr
  3135. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  3136. integer :: n, klm, sz(4), gsz(4), slab, tgt_root, stack
  3137. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  3138. logical :: selectRoot
  3139. status=0
  3140. ! ------- Check inputs
  3141. i0 = DistGrid%i_strt
  3142. i1 = DistGrid%i_stop
  3143. j0 = DistGrid%j_strt
  3144. j1 = DistGrid%j_stop
  3145. ! by default gather into global root
  3146. selectRoot = isRoot
  3147. tgt_root = root
  3148. if (present(rowcom)) then
  3149. if (rowcom) then
  3150. selectRoot = isRowRoot.and.(jref>=j0).and.(jref<=j1)
  3151. tgt_root = RowRootId
  3152. endif
  3153. endif
  3154. sz = shape(dist_array)
  3155. gsz = shape(glbl_array)
  3156. stack = sz(3)*sz(4)
  3157. if(okdebug)then
  3158. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, jband=.true., has_global=selectRoot)
  3159. IF_NOTOK_RETURN(status=1)
  3160. end if
  3161. ! ------- SEND/RECV -------------
  3162. if ( selectRoot ) then
  3163. ! local case
  3164. if((jref>=j0).and.(jref<=j1)) glbl_array(i0:i1,:,:,:) = dist_array(i0:i1,:,:,:)
  3165. ! receive remaining chunks from other pes
  3166. do klm=0,npes-1
  3167. if (klm==myid) cycle ! skip local case
  3168. i0t = DistGrid%bounds(1,klm)
  3169. i1t = DistGrid%bounds(2,klm)
  3170. j0t = DistGrid%bounds(3,klm)
  3171. j1t = DistGrid%bounds(4,klm)
  3172. ! if klm is a source processor, receive from it
  3173. if((jref<j0t).or.(jref>j1t))cycle
  3174. n=i1t-i0t+1
  3175. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  3176. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  3177. stride = gsz(1)*gsz(2)*sizeoftype
  3178. call MPI_TYPE_CREATE_HVECTOR (stack, 1, stride, slab, subarr, ierr)
  3179. call MPI_TYPE_FREE (slab, ierr)
  3180. call MPI_TYPE_COMMIT (subarr, ierr)
  3181. call MPI_RECV( glbl_array(i0t,1,1,1), 1, subarr, klm, jref, localComm, stat, ierr)
  3182. call MPI_TYPE_FREE (subarr, ierr)
  3183. end do
  3184. else
  3185. ! are we on a src processor?
  3186. if((jref<j0).or.(jref>j1))return
  3187. n=i1-i0+1
  3188. call MPI_SEND( dist_array(i0,1,1,1), n*sz(2)*sz(3)*sz(4), my_real, tgt_root, jref, localComm, ierr)
  3189. end if
  3190. #endif
  3191. END SUBROUTINE GATHER_JBAND_4D_R
  3192. !EOC
  3193. !--------------------------------------------------------------------------
  3194. ! TM5 !
  3195. !--------------------------------------------------------------------------
  3196. !BOP
  3197. !
  3198. ! !IROUTINE: GATHER_IBAND_1D_R
  3199. !
  3200. ! !DESCRIPTION: gather a meridional (with dimension distributed along J)
  3201. ! vector on root. For example: [j1:j2]
  3202. !\\
  3203. !\\
  3204. ! !INTERFACE:
  3205. !
  3206. SUBROUTINE GATHER_IBAND_1D_R( DistGrid, dist_array, glbl_array, status, iref )
  3207. !
  3208. ! !INPUT PARAMETERS:
  3209. !
  3210. type(dist_grid), intent(in) :: DistGrid
  3211. real, intent(in) :: dist_array(DistGrid%j_strt:)
  3212. integer, intent(in) :: iref ! to define source processors
  3213. !
  3214. ! !OUTPUT PARAMETERS:
  3215. !
  3216. real, intent(out) :: glbl_array(:)
  3217. integer, intent(out) :: status
  3218. !
  3219. ! !REVISION HISTORY:
  3220. ! 01 Nov 2011 - P. Le Sager - v0
  3221. !
  3222. ! !REMARKS:
  3223. ! - all processors with an i-range containing IREF are used.
  3224. !
  3225. !EOP
  3226. !------------------------------------------------------------------------
  3227. !BOC
  3228. character(len=*), parameter :: rname = mname//'gather_iband_1d_r'
  3229. #ifndef MPI
  3230. glbl_array = dist_array( DistGrid%j_strt:DistGrid%j_stop )
  3231. status = 0
  3232. #else
  3233. integer :: stat(MPI_STATUS_SIZE), subarr
  3234. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  3235. integer :: n, klm, sz(1), gsz(1)
  3236. status=0
  3237. ! ------- Check inputs
  3238. sz = shape(dist_array)
  3239. gsz = shape(glbl_array)
  3240. if(okdebug)then
  3241. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, iband=.true.)
  3242. ! write(gol,*)"lbound m",lbound(dist_array); call goPr
  3243. ! write(gol,*)"ubound m",ubound(dist_array); call goPr
  3244. IF_NOTOK_RETURN(status=1)
  3245. end if
  3246. i0 = DistGrid%i_strt
  3247. i1 = DistGrid%i_stop
  3248. j0 = DistGrid%j_strt
  3249. j1 = DistGrid%j_stop
  3250. ! ------- SEND/RECV -------------
  3251. if ( isRoot ) then
  3252. ! local case
  3253. if((iref>=i0).and.(iref<=i1)) glbl_array(j0:j1) = dist_array(j0:j1)
  3254. ! receive remaining chunks from other pes
  3255. do klm=1, npes-1
  3256. i0t = DistGrid%bounds(1,klm)
  3257. i1t = DistGrid%bounds(2,klm)
  3258. j0t = DistGrid%bounds(3,klm)
  3259. j1t = DistGrid%bounds(4,klm)
  3260. ! if klm is a source processor, receive from it
  3261. if((iref<i0t).or.(iref>i1t))cycle
  3262. n=j1t-j0t+1
  3263. call MPI_RECV( glbl_array(j0t), n, my_real, klm, iref, localComm, stat, ierr)
  3264. end do
  3265. else
  3266. ! are we on a src processor?
  3267. if((iref<i0).or.(iref>i1)) return
  3268. n=j1-j0+1
  3269. call MPI_SEND( dist_array(j0), n, my_real, root, iref, localComm, ierr)
  3270. end if
  3271. #endif
  3272. END SUBROUTINE GATHER_IBAND_1D_R
  3273. !EOC
  3274. !--------------------------------------------------------------------------
  3275. ! TM5 !
  3276. !--------------------------------------------------------------------------
  3277. !BOP
  3278. !
  3279. ! !IROUTINE: GATHER_IBAND_3D_R
  3280. !
  3281. ! !DESCRIPTION: gather a meridional slice (3D) on root. For arrays like:
  3282. ! [j1:j2, lev, trac], that is without a distributed I dim.
  3283. !\\
  3284. !\\
  3285. ! !INTERFACE:
  3286. !
  3287. SUBROUTINE GATHER_IBAND_3D_R( DistGrid, dist_array, glbl_array, status, iref )
  3288. !
  3289. ! !INPUT PARAMETERS:
  3290. !
  3291. type(dist_grid), intent(in) :: DistGrid
  3292. real, intent(in) :: dist_array(DistGrid%j_strt:,:,:)
  3293. integer, intent(in) :: iref ! to find sources
  3294. !
  3295. ! !OUTPUT PARAMETERS:
  3296. !
  3297. real, intent(out) :: glbl_array(:,:,:)
  3298. integer, intent(out) :: status
  3299. !
  3300. ! !REVISION HISTORY:
  3301. ! 01 Nov 2011 - P. Le Sager - v0
  3302. !
  3303. ! !REMARKS: use in budget for gathering fluxes
  3304. !
  3305. !EOP
  3306. !------------------------------------------------------------------------
  3307. !BOC
  3308. character(len=*), parameter :: rname = mname//'gather_iband_3d_r'
  3309. #ifndef MPI
  3310. glbl_array = dist_array( DistGrid%j_strt:DistGrid%j_stop,:,:)
  3311. status = 0
  3312. #else
  3313. integer :: stat(MPI_STATUS_SIZE), subarr
  3314. integer :: i0, j0, i1, j1, i0t, j0t, i1t, j1t
  3315. integer :: n, klm, sz(3), gsz(3), slab
  3316. integer(kind=MPI_ADDRESS_KIND) :: sizeoftype, lb, stride
  3317. status=0
  3318. ! ------- Check inputs
  3319. sz = shape(dist_array)
  3320. gsz = shape(glbl_array)
  3321. if(okdebug)then
  3322. CALL CHECK_DIST_ARR( DistGrid, sz, 0, status, gsz, iband=.true.)
  3323. IF_NOTOK_RETURN(status=1)
  3324. end if
  3325. i0 = DistGrid%i_strt
  3326. i1 = DistGrid%i_stop
  3327. j0 = DistGrid%j_strt
  3328. j1 = DistGrid%j_stop
  3329. ! ------- SEND/RECV -------------
  3330. if ( isRoot ) then
  3331. ! local case
  3332. if((iref>=i0).and.(iref<=i1)) glbl_array(j0:j1,:,:) = dist_array(j0:j1,:,:)
  3333. ! receive remaining chunks from other pes
  3334. do klm=1,npes-1
  3335. i0t = DistGrid%bounds(1,klm)
  3336. i1t = DistGrid%bounds(2,klm)
  3337. j0t = DistGrid%bounds(3,klm)
  3338. j1t = DistGrid%bounds(4,klm)
  3339. ! if klm is a source processor, receive from it
  3340. if((iref<i0t).or.(iref>i1t))cycle
  3341. n=j1t-j0t+1
  3342. call MPI_TYPE_VECTOR (sz(2), n, gsz(1), my_real, slab, ierr)
  3343. CALL MPI_TYPE_GET_EXTENT( my_real, lb, sizeoftype, ierr)
  3344. stride = gsz(1)*gsz(2)*sizeoftype
  3345. call MPI_TYPE_CREATE_HVECTOR (sz(3), 1, stride, slab, subarr, ierr)
  3346. call MPI_TYPE_FREE (slab, ierr)
  3347. call MPI_TYPE_COMMIT (subarr, ierr)
  3348. call MPI_RECV( glbl_array(j0t,1,1), 1, subarr, klm, iref, localComm, stat, ierr)
  3349. call MPI_TYPE_FREE (subarr, ierr)
  3350. end do
  3351. else
  3352. ! are we on a src processor?
  3353. if((iref<i0).or.(iref>i1))return
  3354. n=j1-j0+1
  3355. call MPI_SEND( dist_array(j0,1,1), n*sz(2)*sz(3), my_real, root, iref, localComm, ierr)
  3356. end if
  3357. #endif
  3358. END SUBROUTINE GATHER_IBAND_3D_R
  3359. !EOC
  3360. !--------------------------------------------------------------------------
  3361. ! TM5 !
  3362. !--------------------------------------------------------------------------
  3363. !BOP
  3364. !
  3365. ! !IROUTINE: REDUCE_2D_R
  3366. !
  3367. ! !DESCRIPTION: Global reduction. Data are gathered on root, where OP is
  3368. ! then done, instead of OPing on each proc and then calling
  3369. ! MPI_REDUCE. This ensures bitwise agreement with serial code
  3370. ! results in case of SUM.
  3371. !\\
  3372. !\\
  3373. ! !INTERFACE:
  3374. !
  3375. SUBROUTINE REDUCE_2D_R( DistGrid, dist_array, halo, op, resultat, status, all, debug)
  3376. !
  3377. ! !INPUT PARAMETERS:
  3378. !
  3379. type(dist_grid), intent(in) :: DistGrid
  3380. integer, intent(in) :: halo
  3381. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  3382. character(len=3), intent(in) :: op ! 'MAX', 'MIN' or 'SUM'
  3383. logical, intent(in), optional :: all ! mimic mpi_allreduce instead of mpi_reduce
  3384. logical, intent(in), optional :: debug ! print additional information: location of Min/Max
  3385. !
  3386. ! !OUTPUT PARAMETERS:
  3387. !
  3388. real, intent(out) :: resultat
  3389. integer, intent(out) :: status
  3390. !
  3391. ! !REVISION HISTORY:
  3392. ! 01 Nov 2011 - P. Le Sager - v0
  3393. !
  3394. !EOP
  3395. !------------------------------------------------------------------------
  3396. !BOC
  3397. character(len=*), parameter :: rname = mname//'REDUCE_2D_R'
  3398. logical :: x_debug
  3399. integer :: shp(2)
  3400. real, allocatable :: glbl_array(:,:)
  3401. x_debug=.false.
  3402. if(present(debug)) x_debug=debug
  3403. #ifdef MPI
  3404. if (isRoot) then
  3405. allocate( glbl_array( DistGrid%im_region, DistGrid%jm_region ))
  3406. else
  3407. allocate( glbl_array(1,1) )
  3408. end if
  3409. call gather(DistGrid, dist_array, glbl_array, halo, status)
  3410. IF_NOTOK_RETURN(status=1)
  3411. if (isRoot) then
  3412. select case( op )
  3413. case('sum', 'Sum', 'SUM')
  3414. resultat = sum(glbl_array)
  3415. case('max', 'Max', 'MAX')
  3416. resultat = maxval(glbl_array)
  3417. if(x_debug) then
  3418. shp=maxloc(glbl_array)
  3419. write(gol,*) rname //": Max location =", shp; call goPr
  3420. end if
  3421. case('min', 'Min', 'MIN')
  3422. resultat = minval(glbl_array)
  3423. if(x_debug) then
  3424. shp=minloc(glbl_array)
  3425. write(gol,*) rname //": Min location =", shp; call goPr
  3426. end if
  3427. case default
  3428. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3429. IF_NOTOK_RETURN(status=1)
  3430. end select
  3431. end if
  3432. if (present(all)) then
  3433. if (all) call MPI_bcast(resultat, 1, my_real, root, localComm, ierr)
  3434. end if
  3435. deallocate(glbl_array)
  3436. #else
  3437. select case( op )
  3438. case('sum', 'Sum', 'SUM')
  3439. resultat = sum(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3440. case('max', 'Max', 'MAX')
  3441. resultat = maxval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3442. if(x_debug) then
  3443. shp=maxloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3444. write(gol,*) rname //": Max location =", shp; call goPr
  3445. end if
  3446. case('min', 'Min', 'MIN')
  3447. resultat = minval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3448. if(x_debug) then
  3449. shp=minloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region))
  3450. write(gol,*) rname //": Min location =", shp; call goPr
  3451. end if
  3452. case default
  3453. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3454. IF_NOTOK_RETURN(status=1)
  3455. end select
  3456. #endif
  3457. status=0
  3458. END SUBROUTINE REDUCE_2D_R
  3459. !EOC
  3460. !--------------------------------------------------------------------------
  3461. ! TM5 !
  3462. !--------------------------------------------------------------------------
  3463. !BOP
  3464. !
  3465. ! !IROUTINE: REDUCE_3D_R
  3466. !
  3467. ! !DESCRIPTION: same as REDUCE_2D_R, for 3D arrays.
  3468. !\\
  3469. !\\
  3470. ! !INTERFACE:
  3471. !
  3472. SUBROUTINE REDUCE_3D_R( DistGrid, dist_array, halo, op, resultat, status, all, debug)
  3473. !
  3474. ! !INPUT PARAMETERS:
  3475. !
  3476. type(dist_grid), intent(in) :: DistGrid
  3477. integer, intent(in) :: halo
  3478. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:)
  3479. character(len=3), intent(in) :: op ! 'MAX', 'MIN' or 'SUM'
  3480. logical, intent(in), optional :: all ! mimic MPI_ALLREDUCE instead of MPI_REDUCE
  3481. logical, intent(in), optional :: debug ! print additional information: location of Min/Max
  3482. !
  3483. ! !OUTPUT PARAMETERS:
  3484. !
  3485. real, intent(out) :: resultat
  3486. integer, intent(out) :: status
  3487. !
  3488. ! !REVISION HISTORY:
  3489. ! 01 Nov 2011 - P. Le Sager - v0
  3490. !
  3491. !EOP
  3492. !------------------------------------------------------------------------
  3493. !BOC
  3494. character(len=*), parameter :: rname = mname//'REDUCE_3D_R'
  3495. integer :: shp(3)
  3496. logical :: x_debug
  3497. real, allocatable :: glbl_array(:,:,:)
  3498. x_debug=.false.
  3499. if(present(debug)) x_debug=debug
  3500. #ifdef MPI
  3501. shp = shape( dist_array )
  3502. if (isRoot) then
  3503. allocate( glbl_array( DistGrid%im_region, DistGrid%jm_region, shp(3)) )
  3504. else
  3505. allocate( glbl_array(1,1,1) )
  3506. end if
  3507. call gather(DistGrid, dist_array, glbl_array, halo, status)
  3508. IF_NOTOK_RETURN(status=1)
  3509. if (isRoot) then
  3510. select case( op )
  3511. case('sum', 'Sum', 'SUM')
  3512. resultat = sum(glbl_array)
  3513. case('max', 'Max', 'MAX')
  3514. resultat = maxval(glbl_array)
  3515. if(x_debug) then
  3516. shp=maxloc(glbl_array)
  3517. write(gol,*) rname //": Max location =", shp; call goPr
  3518. end if
  3519. case('min', 'Min', 'MIN')
  3520. resultat = minval(glbl_array)
  3521. if(x_debug) then
  3522. shp=minloc(glbl_array)
  3523. write(gol,*) rname //": Min location =", shp; call goPr
  3524. end if
  3525. case default
  3526. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3527. IF_NOTOK_RETURN(status=1)
  3528. end select
  3529. end if
  3530. if (present(all)) then
  3531. if (all) call MPI_bcast(resultat, 1, my_real, root, localComm, ierr)
  3532. end if
  3533. #else
  3534. select case( op )
  3535. case('sum', 'Sum', 'SUM')
  3536. resultat = sum(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3537. case('max', 'Max', 'MAX')
  3538. resultat = maxval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3539. if(x_debug) then
  3540. shp=maxloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3541. write(gol,*) rname //": Max location =", shp; call goPr
  3542. end if
  3543. case('min', 'Min', 'MIN')
  3544. resultat = minval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3545. if(x_debug) then
  3546. shp=minloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:))
  3547. write(gol,*) rname //": Min location =", shp; call goPr
  3548. end if
  3549. case default
  3550. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3551. IF_NOTOK_RETURN(status=1)
  3552. end select
  3553. #endif
  3554. status=0
  3555. END SUBROUTINE REDUCE_3D_R
  3556. !EOC
  3557. !--------------------------------------------------------------------------
  3558. ! TM5 !
  3559. !--------------------------------------------------------------------------
  3560. !BOP
  3561. !
  3562. ! !IROUTINE: REDUCE_4D_R
  3563. !
  3564. ! !DESCRIPTION: same as REDUCE_2D_R, for 4D arrays.
  3565. !\\
  3566. !\\
  3567. ! !INTERFACE:
  3568. !
  3569. SUBROUTINE REDUCE_4D_R( DistGrid, dist_array, halo, op, resultat, status, all, debug)
  3570. !
  3571. ! !INPUT PARAMETERS:
  3572. !
  3573. type(dist_grid), intent(in) :: DistGrid
  3574. integer, intent(in) :: halo
  3575. real, intent(in) :: dist_array(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:,:,:)
  3576. character(len=3), intent(in) :: op ! 'MAX', 'MIN' or 'SUM'
  3577. logical, intent(in), optional :: all ! mimic MPI_ALLREDUCE instead of MPI_REDUCE
  3578. logical, intent(in), optional :: debug ! print additional information: location of Min/Max
  3579. !
  3580. ! !OUTPUT PARAMETERS:
  3581. !
  3582. real, intent(out) :: resultat
  3583. integer, intent(out) :: status
  3584. !
  3585. ! !REVISION HISTORY:
  3586. ! 01 Nov 2011 - P. Le Sager - v0
  3587. !
  3588. !EOP
  3589. !------------------------------------------------------------------------
  3590. !BOC
  3591. character(len=*), parameter :: rname = mname//'reduce_4d_r'
  3592. integer :: shp(4)
  3593. logical :: x_debug
  3594. real, allocatable :: glbl_array(:,:,:,:)
  3595. x_debug=.false.
  3596. if(present(debug)) x_debug=debug
  3597. #ifdef MPI
  3598. shp = shape( dist_array )
  3599. if (isRoot) then
  3600. allocate( glbl_array( DistGrid%im_region, DistGrid%jm_region, shp(3), shp(4)) )
  3601. else
  3602. allocate( glbl_array(1,1,1,1) )
  3603. end if
  3604. call gather(DistGrid, dist_array, glbl_array, halo, status)
  3605. IF_NOTOK_RETURN(status=1)
  3606. if (isRoot) then
  3607. select case( op )
  3608. case('sum', 'Sum', 'SUM')
  3609. resultat = sum(glbl_array)
  3610. case('max', 'Max', 'MAX')
  3611. resultat = maxval(glbl_array)
  3612. if(x_debug) then
  3613. shp=maxloc(glbl_array)
  3614. write(gol,*) rname //": Max location =", shp; call goPr
  3615. end if
  3616. case('min', 'Min', 'MIN')
  3617. resultat = minval(glbl_array)
  3618. if(x_debug) then
  3619. shp=minloc(glbl_array)
  3620. write(gol,*) rname //": Min location =", shp; call goPr
  3621. end if
  3622. case default
  3623. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3624. IF_NOTOK_RETURN(status=1)
  3625. end select
  3626. end if
  3627. if (present(all)) then
  3628. if (all) call MPI_bcast(resultat, 1, my_real, root, localComm, ierr)
  3629. end if
  3630. #else
  3631. select case( op )
  3632. case('sum', 'Sum', 'SUM')
  3633. resultat = sum(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3634. case('max', 'Max', 'MAX')
  3635. resultat = maxval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3636. if(x_debug) then
  3637. shp=maxloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3638. write(gol,*) rname //": Max location =", shp; call goPr
  3639. end if
  3640. case('min', 'Min', 'MIN')
  3641. resultat = minval(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3642. if(x_debug) then
  3643. shp=minloc(dist_array(1:DistGrid%im_region, 1:DistGrid%jm_region,:,:))
  3644. write(gol,*) rname //": Min location =", shp; call goPr
  3645. end if
  3646. case default
  3647. write(gol,*) 'UNSUPPORTED OPERATION'; call goPr; status=1
  3648. IF_NOTOK_RETURN(status=1)
  3649. end select
  3650. #endif
  3651. status=0
  3652. END SUBROUTINE REDUCE_4D_R
  3653. !EOC
  3654. !--------------------------------------------------------------------------
  3655. ! TM5 !
  3656. !--------------------------------------------------------------------------
  3657. !BOP
  3658. !
  3659. ! !IROUTINE: DIST_ARR_STAT_2D_R
  3660. !
  3661. ! !DESCRIPTION: calculate and print the MIN, MEAN, MAX of a 2D distributed
  3662. ! real field. This is basically a wrapper around several calls
  3663. ! to REDUCE.
  3664. !
  3665. ! *** SHOULD BE CALLED ONLY FOR DEBUGGING PURPOSES ***
  3666. !\\
  3667. !\\
  3668. ! !INTERFACE:
  3669. !
  3670. SUBROUTINE DIST_ARR_STAT_2D_R( DistGrid, name, arr, halo, status)
  3671. !
  3672. ! !INPUT PARAMETERS:
  3673. !
  3674. type(dist_grid), intent(in) :: DistGrid
  3675. character(len=*), intent(in) :: name ! verbose helper
  3676. integer, intent(in) :: halo
  3677. real, intent(in) :: arr(DistGrid%i_strt-halo:,DistGrid%j_strt-halo:)
  3678. !
  3679. ! !OUTPUT PARAMETERS:
  3680. !
  3681. integer, intent(out):: status
  3682. !
  3683. ! !REVISION HISTORY:
  3684. ! 7 Mar 2012 - P. Le Sager - adapted for lon-lat MPI domain
  3685. ! decomposition, from DD_FIELD_STATISTICS in
  3686. ! both DIFFUSION.F90 and DRY_DEPOSITION.F90
  3687. ! !REMARKS:
  3688. ! (1) FIXME : does not compute the mean of only non-zero data anymore
  3689. !EOP
  3690. !------------------------------------------------------------------------
  3691. !BOC
  3692. character(len=*), parameter :: rname=mname//'dist_arr_stat_2d_r'
  3693. integer :: ntel_non_zero, nx, ny
  3694. real :: maxf, minf, meanf, mean_non_zero
  3695. call reduce( DistGrid, arr, halo, 'MAX', maxf, status)
  3696. IF_NOTOK_RETURN(status=1)
  3697. call reduce( DistGrid, arr, halo, 'MIN', minf, status)
  3698. IF_NOTOK_RETURN(status=1)
  3699. call reduce( DistGrid, arr, halo, 'SUM', meanf, status)
  3700. IF_NOTOK_RETURN(status=1)
  3701. if(isRoot) then
  3702. nx = DistGrid%im_region
  3703. ny = DistGrid%jm_region
  3704. meanf = meanf / ( nx*ny )
  3705. write(gol,'(a10,3(a5,1x,1pe10.3))') name,' max=', maxf,' min=',minf,'mean=',meanf!,'mn0',mean_non_zero
  3706. call goPr
  3707. end if
  3708. status=0
  3709. END SUBROUTINE DIST_ARR_STAT_2D_R
  3710. !EOC
  3711. !--------------------------------------------------------------------------
  3712. ! TM5 !
  3713. !--------------------------------------------------------------------------
  3714. !BOP
  3715. !
  3716. ! !IROUTINE: PRINT_DISTGRID
  3717. !
  3718. ! !DESCRIPTION: utility that prints information about a distributed grid
  3719. !\\
  3720. !\\
  3721. ! !INTERFACE:
  3722. !
  3723. SUBROUTINE PRINT_DISTGRID ( DistGrid )
  3724. !
  3725. ! !INPUT PARAMETERS:
  3726. !
  3727. type(dist_grid), intent(in) :: DistGrid
  3728. !
  3729. ! !REVISION HISTORY:
  3730. ! 01 Nov 2011 - P. Le Sager - v0
  3731. !
  3732. !EOP
  3733. !------------------------------------------------------------------------
  3734. !BOC
  3735. integer, parameter :: maxrow=5
  3736. integer, parameter :: maxcol=5
  3737. integer :: sz1(1), i1
  3738. ! header
  3739. write(gol,*) "========== Start Distributed Grid ==================="; call goPr
  3740. ! dist_grid
  3741. write(gol,*) "im_region :" , DistGrid%im_region ; call goPr
  3742. write(gol,*) "jm_region :" , DistGrid%jm_region ; call goPr
  3743. write(gol,*) "i_strt :" , DistGrid%i_strt ; call goPr
  3744. write(gol,*) "i_stop :" , DistGrid%i_stop ; call goPr
  3745. write(gol,*) "i_strt_halo :" , DistGrid%i_strt_halo ; call goPr
  3746. write(gol,*) "i_stop_halo :" , DistGrid%i_stop_halo ; call goPr
  3747. write(gol,*) "j_strt :" , DistGrid%j_strt ; call goPr
  3748. write(gol,*) "j_stop :" , DistGrid%j_stop ; call goPr
  3749. write(gol,*) "j_strt_halo :" , DistGrid%j_strt_halo ; call goPr
  3750. write(gol,*) "j_stop_halo :" , DistGrid%j_stop_halo ; call goPr
  3751. write(gol,*) "has_north_pole:" , DistGrid%has_north_pole ; call goPr
  3752. write(gol,*) "has_south_pole:" , DistGrid%has_south_pole ; call goPr
  3753. ! physical grid
  3754. write(gol,*) '------------- physical grid -------------------' ; call goPr
  3755. write(gol,*) "llgrid name:", trim(DistGrid%lli%name) ; call goPr
  3756. write(gol,*) "R[m] :", DistGrid%lli%R ; call goPr
  3757. write(gol,*) "dlon[deg] :", DistGrid%lli%dlon_deg ; call goPr
  3758. write(gol,*) "dlat[deg] :", DistGrid%lli%dlat_deg ; call goPr
  3759. write(gol,*) "im :", DistGrid%lli%im ; call goPr
  3760. write(gol,*) "jm :", DistGrid%lli%jm ; call goPr
  3761. if (associated(DistGrid%lli%lon_deg)) then
  3762. sz1 = shape(DistGrid%lli%lon_deg)
  3763. i1 = min(maxcol,sz1(1))
  3764. write(gol,*) "lon = ",DistGrid%lli%lon_deg(1:i1); call goPr
  3765. endif
  3766. if (associated(DistGrid%lli%lat_deg)) then
  3767. sz1=shape(DistGrid%lli%lat_deg)
  3768. i1=min(maxrow,sz1(1))
  3769. write(gol,*) "lat = ",DistGrid%lli%lat_deg(1:i1); call goPr
  3770. endif
  3771. ! footer
  3772. write(gol,*) "========== End Distributed Grid ===================" ; call goPr
  3773. END SUBROUTINE PRINT_DISTGRID
  3774. !EOC
  3775. !--------------------------------------------------------------------------
  3776. ! TM5 !
  3777. !--------------------------------------------------------------------------
  3778. !BOP
  3779. !
  3780. ! !IROUTINE: TESTCOMM
  3781. !
  3782. ! !DESCRIPTION: check if the communications are working as expected.
  3783. ! Currently checked:
  3784. ! - GATHER, SCATTER, UPDATE_HALO (2D, 3D, 4D)
  3785. ! - SCATTER_I_BAND, SCATTER_J_BAND (1D, 2D)
  3786. !\\
  3787. !\\
  3788. ! !INTERFACE:
  3789. !
  3790. SUBROUTINE TESTCOMM( DistGrid, halo, status )
  3791. !
  3792. ! !INPUT PARAMETERS:
  3793. !
  3794. type(dist_grid), intent(in) :: DistGrid
  3795. integer, intent(in) :: halo
  3796. !
  3797. ! !OUTPUT PARAMETERS:
  3798. !
  3799. integer, intent(out) :: status
  3800. !
  3801. ! !REVISION HISTORY:
  3802. ! 01 Nov 2011 - P. Le Sager - v0
  3803. !
  3804. ! !REMARKS:
  3805. ! (1) to run with different halo sizes
  3806. ! (2) note that will not catch some errors in halo_update if using too few processors
  3807. !
  3808. !EOP
  3809. !------------------------------------------------------------------------
  3810. !BOC
  3811. character(len=*), parameter :: rname = mname//'testcomm'
  3812. ! real, allocatable :: src1(:), tgt1(:), res1(:)
  3813. ! real, allocatable :: src2(:,:), tgt2(:,:), res2(:,:)
  3814. ! real, allocatable :: larray2a(:,:), larray2b(:,:), glb2a(:,:), glb2b(:,:)
  3815. ! real, allocatable :: larray3a(:,:,:), larray3b(:,:,:), glb3a(:,:,:), glb3b(:,:,:)
  3816. ! real, allocatable :: larray4a(:,:,:,:), larray4b(:,:,:,:)
  3817. ! real, allocatable :: glb4a(:,:,:,:), glb4b(:,:,:,:)
  3818. ! integer :: i0, j0, i1, j1, x_halo, k, levels, l, trac, iref(2), jref(2)
  3819. ! logical :: south, north, west, east, test
  3820. ! character(len=*), parameter :: form='(f4.0)'
  3821. ! levels=17
  3822. ! trac=5
  3823. ! ! General
  3824. ! call Get_DistGrid( DistGrid, I_STRT=i0, I_STOP=i1, J_STRT=j0, J_STOP=j1, &
  3825. ! hasEastBorder=east, hasWestBorder=west, &
  3826. ! hasSouthBorder=south, hasNorthBorder=north )
  3827. ! x_halo=halo
  3828. ! status=0
  3829. ! if(isRoot) print*, "========= TESTING COMM FOR HALO=",x_HALO
  3830. ! call par_barrier()
  3831. ! ! *************************** SCATTER BAND ***************************
  3832. ! iref=(/ 1, DistGrid%im_region/) ! to test i_band along west and east border
  3833. ! jref=(/ 1, DistGrid%jm_region/) ! to test j_band along south and north border
  3834. ! if(isRoot) then
  3835. ! allocate(src1(DistGrid%im_region))
  3836. ! else
  3837. ! allocate(src1(1))
  3838. ! end if
  3839. ! allocate(tgt1(i0:i1), res1(i0:i1))
  3840. ! if (isRoot) src1 = (/(k, k=1,DistGrid%im_region)/)
  3841. ! res1 = (/(k, k=i0,i1)/)
  3842. ! do trac=1,2
  3843. ! tgt1=0
  3844. ! call scatter_j_band(distgrid, tgt1, src1, status, jref=jref(trac))
  3845. ! IF_NOTOK_RETURN(status=1)
  3846. ! test=((trac==1).and.south).or.((trac==2).and.north)
  3847. ! ! diff should be 0 along borders only
  3848. ! if (maxval(abs(res1-tgt1)) /= 0.) then
  3849. ! if(test) then
  3850. ! print*, "test scatter_J_band 1D FAILED at border:"
  3851. ! !print*, i0,i1,tgt1(i0), tgt1(i1), res1(i0), res1(i1)
  3852. ! status=1
  3853. ! !else ! Expected only if tgt1 has inout attribute in scatter routine
  3854. ! ! print*, "test scatter_J_band 1D PASSED inside:"
  3855. ! ! print*, i0,i1,tgt1(i0), tgt1(i1), res1(i0), res1(i1)
  3856. ! end if
  3857. ! else
  3858. ! if(test) then
  3859. ! print*, "test scatter_J_band 1D PASSED at border"
  3860. ! !print*, i0,i1,tgt1(i0), tgt1(i1), res1(i0), res1(i1)
  3861. ! !else ! Expected only if tgt1 has inout attribute in scatter routine
  3862. ! ! print*, "test scatter_J_band 1D FAILED inside"
  3863. ! ! print*, i0,i1,tgt1(i0), tgt1(i1), res1(i0), res1(i1)
  3864. ! ! status=1
  3865. ! end if
  3866. ! end if
  3867. ! IF_NOTOK_RETURN(status=1)
  3868. ! end do
  3869. ! deallocate(src1, tgt1, res1)
  3870. ! if(isRoot) then
  3871. ! allocate(src1(DistGrid%jm_region))
  3872. ! else
  3873. ! allocate(src1(1))
  3874. ! end if
  3875. ! allocate(tgt1(j0:j1), res1(j0:j1))
  3876. ! if (isRoot) src1 = (/(k, k=1,DistGrid%jm_region)/)
  3877. ! res1 = (/(k, k=j0,j1)/)
  3878. ! do trac=1,2
  3879. ! tgt1=0
  3880. ! call scatter_i_band(distgrid, tgt1, src1, status, iref=iref(trac))
  3881. ! IF_NOTOK_RETURN(status=1)
  3882. ! test=((trac==1).and.west).or.((trac==2).and.east)
  3883. ! ! diff should be 0 along borders only
  3884. ! if (maxval(abs(res1-tgt1)) /= 0.) then
  3885. ! if(test) then
  3886. ! print*, "test scatter_I_band 1D FAILED at border"
  3887. ! status=1
  3888. ! !else
  3889. ! ! print*, "test scatter_I_band 1D PASSED inside"
  3890. ! end if
  3891. ! else
  3892. ! if(test) then
  3893. ! print*, "test scatter_I_band 1D PASSED at border"
  3894. ! !else
  3895. ! ! print*, "test scatter_I_band 1D FAILED inside"
  3896. ! ! status=1
  3897. ! end if
  3898. ! end if
  3899. ! IF_NOTOK_RETURN(status=1)
  3900. ! end do
  3901. ! deallocate(src1, tgt1, res1)
  3902. ! ! ---------------- 2D
  3903. ! if(isRoot) then
  3904. ! allocate(src2(DistGrid%im_region, levels))
  3905. ! else
  3906. ! allocate(src2(1,1))
  3907. ! end if
  3908. ! allocate(tgt2(i0:i1,levels), res2(i0:i1,levels))
  3909. ! do l=1,levels
  3910. ! if (isRoot) src2(:,l) = (/(k, k=1,DistGrid%im_region)/) * l
  3911. ! res2(:,l) = (/(k, k=i0,i1)/)* l
  3912. ! end do
  3913. ! do trac=1,2
  3914. ! tgt2=0
  3915. ! call scatter_j_band(distgrid, tgt2, src2, status, jref=jref(trac))
  3916. ! IF_NOTOK_RETURN(status=1)
  3917. ! test=((trac==1).and.south).or.((trac==2).and.north)
  3918. ! ! diff should be 0 along borders only
  3919. ! if (maxval(abs(res2-tgt2)) /= 0.) then
  3920. ! if(test) then
  3921. ! print*, "test scatter_J_band 2D FAILED at border"
  3922. ! status=1
  3923. ! !else
  3924. ! ! print*, "test scatter_J_band 2D PASSED inside"
  3925. ! end if
  3926. ! else
  3927. ! if(test) then
  3928. ! print*, "test scatter_J_band 2D PASSED at border"
  3929. ! !else
  3930. ! ! print*, "test scatter_J_band 2D FAILED inside"
  3931. ! ! status=1
  3932. ! end if
  3933. ! end if
  3934. ! IF_NOTOK_RETURN(status=1)
  3935. ! end do
  3936. ! deallocate(src2, tgt2, res2)
  3937. ! if(isRoot) then
  3938. ! allocate(src2(DistGrid%jm_region,levels))
  3939. ! else
  3940. ! allocate(src2(1,1))
  3941. ! end if
  3942. ! allocate(tgt2(j0:j1,levels), res2(j0:j1,levels))
  3943. ! do l=1,levels
  3944. ! if (isRoot) src2(:,l) = (/(k, k=1,DistGrid%jm_region)/)*l
  3945. ! res2(:,l) = (/(k, k=j0,j1)/)*l
  3946. ! enddo
  3947. ! do trac=1,2
  3948. ! tgt2=0
  3949. ! call scatter_i_band(distgrid, tgt2, src2, status, iref=iref(trac))
  3950. ! IF_NOTOK_RETURN(status=1)
  3951. ! test=((trac==1).and.west).or.((trac==2).and.east)
  3952. ! ! diff should be 0 along borders only
  3953. ! if (maxval(abs(res2-tgt2)) /= 0.) then
  3954. ! if(test) then
  3955. ! print*, "test scatter_I_band 2D FAILED at border"
  3956. ! status=1
  3957. ! !else
  3958. ! ! print*, "test scatter_I_band 2D PASSED inside"
  3959. ! end if
  3960. ! else
  3961. ! if(test) then
  3962. ! print*, "test scatter_I_band 2D PASSED at border"
  3963. ! !else
  3964. ! ! print*, "test scatter_I_band 2D FAILED inside"
  3965. ! ! status=1
  3966. ! end if
  3967. ! end if
  3968. ! IF_NOTOK_RETURN(status=1)
  3969. ! end do
  3970. ! deallocate(src2, tgt2, res2)
  3971. ! ! *************************** GATHER/SCATTER ***************************
  3972. ! !----------------------
  3973. ! ! Allocate 2D
  3974. ! !----------------------
  3975. ! allocate( larray2a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo) )
  3976. ! allocate( larray2b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo) )
  3977. ! allocate( glb2a(DistGrid%im_region, DistGrid%jm_region) )
  3978. ! ! in halo, 0, elsewhere myid
  3979. ! larray2a=0.
  3980. ! larray2a(i0:i1,j0:j1)=real(myid)
  3981. ! ! glb2b is global array, used in root only
  3982. ! if (isRoot) then
  3983. ! allocate( glb2b( DistGrid%im_region, DistGrid%jm_region) )
  3984. ! else
  3985. ! allocate( glb2b(1,1) )
  3986. ! end if
  3987. ! !----------------------
  3988. ! ! test GATHER_2D_R
  3989. ! !----------------------
  3990. ! glb2b=0.
  3991. ! larray2b=0.
  3992. ! ! gather
  3993. ! call gather( DistGrid, larray2a, glb2b, x_halo, status)
  3994. ! IF_NOTOK_RETURN(status=1)
  3995. ! ! broadcast result
  3996. ! if (isRoot) glb2a = glb2b
  3997. ! #ifdef MPI
  3998. ! call MPI_bcast(glb2a, size(glb2a), my_real, root, localComm, ierr)
  3999. ! #endif
  4000. ! larray2b(i0:i1,j0:j1) = glb2a(i0:i1,j0:j1)
  4001. ! larray2b = larray2a-larray2b
  4002. ! ! diff should be 0
  4003. ! if (maxval(abs(larray2b)) /= 0.) then
  4004. ! print*, "test gather 2d FAILED"
  4005. ! status=1
  4006. ! else
  4007. ! print*, "test gather 2d PASSED"
  4008. ! end if
  4009. ! IF_NOTOK_RETURN(status=1)
  4010. ! call par_barrier()
  4011. ! !----------------------
  4012. ! ! test SCATTER_2D_R
  4013. ! !----------------------
  4014. ! larray2b=0.
  4015. ! call scatter( DistGrid, larray2b, glb2b, x_halo, status)
  4016. ! IF_NOTOK_RETURN(status=1)
  4017. ! larray2b=larray2a-larray2b
  4018. ! ! diff should be 0
  4019. ! if (maxval(abs(larray2b)) /= 0.) then
  4020. ! print*, "test scatter 2d FAILED"
  4021. ! status=1
  4022. ! else
  4023. ! print*, "test scatter 2d PASSED"
  4024. ! end if
  4025. ! IF_NOTOK_RETURN(status=1)
  4026. ! ! CLEANUP
  4027. ! deallocate(larray2a,larray2b,glb2a,glb2b)
  4028. ! call par_barrier()
  4029. ! !----------------------
  4030. ! ! Allocate 3D
  4031. ! !----------------------
  4032. ! allocate( larray3a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels) )
  4033. ! allocate( larray3b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels) )
  4034. ! allocate( glb3a( DistGrid%im_region, DistGrid%jm_region, levels) )
  4035. ! ! in halo, 0, elsewhere myid*level
  4036. ! larray3a=0.
  4037. ! do k=1,levels
  4038. ! larray3a(i0:i1,j0:j1,k)=real(myid*k)
  4039. ! end do
  4040. ! ! glb2b is global array, used in root only
  4041. ! if (isRoot) then
  4042. ! allocate( glb3b( DistGrid%im_region, DistGrid%jm_region, levels) )
  4043. ! else
  4044. ! allocate( glb3b(1,1,1) )
  4045. ! end if
  4046. ! !----------------------
  4047. ! ! test GATHER_3D_R
  4048. ! !----------------------
  4049. ! glb3b=0.
  4050. ! larray3b=0.
  4051. ! ! gather
  4052. ! call gather( DistGrid, larray3a, glb3b, x_halo, status)
  4053. ! IF_NOTOK_RETURN(status=1)
  4054. ! ! broadcast result
  4055. ! if (isRoot) glb3a = glb3b
  4056. ! #ifdef MPI
  4057. ! call MPI_bcast(glb3a, size(glb3a), my_real, root, localComm, ierr)
  4058. ! #endif
  4059. ! larray3b(i0:i1,j0:j1,:) = glb3a(i0:i1,j0:j1,:)
  4060. ! larray3b = larray3a-larray3b
  4061. ! ! diff should be 0
  4062. ! if (maxval(abs(larray3b)) /= 0.) then
  4063. ! print*, "test gather 3d FAILED"
  4064. ! status=1
  4065. ! else
  4066. ! print*, "test gather 3d PASSED"
  4067. ! end if
  4068. ! IF_NOTOK_RETURN(status=1)
  4069. ! call par_barrier()
  4070. ! !----------------------
  4071. ! ! test SCATTER_3D_R
  4072. ! !----------------------
  4073. ! larray3b=0.
  4074. ! call scatter( DistGrid, larray3b, glb3b, x_halo, status)
  4075. ! IF_NOTOK_RETURN(status=1)
  4076. ! larray3b=larray3a-larray3b
  4077. ! ! diff should be 0
  4078. ! if (maxval(abs(larray3b)) /= 0.) then
  4079. ! print*, "test scatter 3d FAILED"
  4080. ! status=1
  4081. ! else
  4082. ! print*, "test scatter 3d PASSED"
  4083. ! end if
  4084. ! IF_NOTOK_RETURN(status=1)
  4085. ! ! CLEANUP
  4086. ! deallocate(larray3a,larray3b,glb3a,glb3b)
  4087. ! call par_barrier()
  4088. ! !----------------------
  4089. ! ! Allocate 4D
  4090. ! !----------------------
  4091. ! allocate( larray4a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels, trac) )
  4092. ! allocate( larray4b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels, trac) )
  4093. ! allocate( glb4a( DistGrid%im_region, DistGrid%jm_region, levels, trac) )
  4094. ! ! in halo, 0, elsewhere (myid+1)*level*trac
  4095. ! larray4a=0.
  4096. ! do l=1,trac
  4097. ! do k=1,levels
  4098. ! larray4a(i0:i1,j0:j1,k,l)=real((myid+1)*k*l)
  4099. ! end do
  4100. ! end do
  4101. ! ! glb4b is global array, used in root only
  4102. ! if (isRoot) then
  4103. ! allocate( glb4b( DistGrid%im_region, DistGrid%jm_region, levels, trac) )
  4104. ! else
  4105. ! allocate( glb4b(1,1,1,1) )
  4106. ! end if
  4107. ! !----------------------
  4108. ! ! test GATHER_4D_R
  4109. ! !----------------------
  4110. ! glb4b=0.
  4111. ! larray4b=0.
  4112. ! ! gather
  4113. ! call gather( DistGrid, larray4a, glb4b, x_halo, status)
  4114. ! IF_NOTOK_RETURN(status=1)
  4115. ! ! broadcast result
  4116. ! if (isRoot) glb4a = glb4b
  4117. ! #ifdef MPI
  4118. ! call MPI_bcast(glb4a, size(glb4a), my_real, root, localComm, ierr)
  4119. ! #endif
  4120. ! larray4b(i0:i1,j0:j1,:,:) = glb4a(i0:i1,j0:j1,:,:)
  4121. ! larray4b = larray4a-larray4b
  4122. ! ! diff should be 0
  4123. ! if (maxval(abs(larray4b)) /= 0.) then
  4124. ! print*, "test gather 4d FAILED"
  4125. ! status=1
  4126. ! else
  4127. ! print*, "test gather 4d PASSED"
  4128. ! end if
  4129. ! IF_NOTOK_RETURN(status=1)
  4130. ! call par_barrier()
  4131. ! !----------------------
  4132. ! ! test SCATTER_4D_R
  4133. ! !----------------------
  4134. ! larray4b=0.
  4135. ! call scatter( DistGrid, larray4b, glb4b, x_halo, status)
  4136. ! IF_NOTOK_RETURN(status=1)
  4137. ! larray4b=larray4a-larray4b
  4138. ! ! diff should be 0
  4139. ! if (maxval(abs(larray4b)) /= 0.) then
  4140. ! print*, "test scatter 4d FAILED"
  4141. ! status=1
  4142. ! else
  4143. ! print*, "test scatter 4d PASSED"
  4144. ! end if
  4145. ! IF_NOTOK_RETURN(status=1)
  4146. ! ! CLEANUP
  4147. ! deallocate(larray4a,larray4b,glb4a,glb4b)
  4148. ! call par_barrier()
  4149. ! ! *************************************** HALO ***************************
  4150. ! !----------------------
  4151. ! ! test UPDATE_HALO_2D_R
  4152. ! !----------------------
  4153. ! ! Allocate 2D
  4154. ! allocate( larray2a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo) )
  4155. ! allocate( larray2b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo) )
  4156. ! ! array to update : in halo set to 0, elsewhere to myid
  4157. ! larray2b=0.
  4158. ! larray2b(i0:i1,j0:j1)=real(myid)
  4159. ! ! test array : fill halo with neighbors rank
  4160. ! larray2a=0.
  4161. ! larray2a( i0-x_halo:i0-1, j0:j1 ) = DistGrid%neighbors(1) ! west halo
  4162. ! larray2a( i1+1:i1+x_halo, j0:j1 ) = DistGrid%neighbors(3) ! east halo
  4163. ! larray2a( i0:i1, j1+1:j1+x_halo ) = DistGrid%neighbors(2) ! north halo
  4164. ! larray2a( i0:i1, j0-x_halo:j0-1 ) = DistGrid%neighbors(4) ! south halo
  4165. ! larray2a(i0:i1,j0:j1)=real(myid)
  4166. ! where (larray2a == MPI_PROC_NULL) larray2a=0.
  4167. ! ! update
  4168. ! CALL UPDATE_HALO( DISTGRID, larray2b, x_halo, status)
  4169. ! IF_NOTOK_RETURN(status=1)
  4170. ! if (isRoot.and.(x_halo==1)) then ! 32 is size to look good for 2x2 processes and halo=1
  4171. ! print*, "----------------------------"
  4172. ! print '(32F4.0)', larray2a
  4173. ! print*, "----------------------------"
  4174. ! print '(32F4.0)', larray2b
  4175. ! print*, "----------------------------"
  4176. ! end if
  4177. ! ! compare (diff should be 0)
  4178. ! larray2b=larray2a-larray2b
  4179. ! if (maxval(abs(larray2b)) /= 0.) then
  4180. ! print*, "test update_halo 2d FAILED"
  4181. ! status=1
  4182. ! else
  4183. ! print*, "test update_halo 2d PASSED"
  4184. ! end if
  4185. ! IF_NOTOK_RETURN(status=1)
  4186. ! ! CLEANUP
  4187. ! deallocate(larray2a,larray2b)
  4188. ! call par_barrier()
  4189. ! !----------------------
  4190. ! ! test UPDATE_HALO_3D_R
  4191. ! !----------------------
  4192. ! ! Allocate 3D
  4193. ! allocate( larray3a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels) )
  4194. ! allocate( larray3b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels) )
  4195. ! ! array to update : in halo set to 0, elsewhere to myid
  4196. ! larray3b=0.
  4197. ! larray3b(i0:i1,j0:j1,:)=real(myid)
  4198. ! ! test array : fill halo with neighbors rank
  4199. ! larray3a=0.
  4200. ! larray3a( i0-x_halo:i0-1, j0:j1, : ) = DistGrid%neighbors(1) ! west halo
  4201. ! larray3a( i1+1:i1+x_halo, j0:j1, : ) = DistGrid%neighbors(3) ! east halo
  4202. ! larray3a( i0:i1, j1+1:j1+x_halo, : ) = DistGrid%neighbors(2) ! north halo
  4203. ! larray3a( i0:i1, j0-x_halo:j0-1, : ) = DistGrid%neighbors(4) ! south halo
  4204. ! larray3a(i0:i1,j0:j1,:)=real(myid) ! interior
  4205. ! where (larray3a == MPI_PROC_NULL) larray3a=0. !if no neighbor
  4206. ! ! update
  4207. ! CALL UPDATE_HALO( DISTGRID, larray3b, x_halo, status)
  4208. ! IF_NOTOK_RETURN(status=1)
  4209. ! ! compare (diff should be 0)
  4210. ! larray3b=larray3a-larray3b
  4211. ! if (maxval(abs(larray3b)) /= 0.) then
  4212. ! print*, "test update_halo 3d FAILED"
  4213. ! status=1
  4214. ! else
  4215. ! print*, "test update_halo 3d PASSED"
  4216. ! end if
  4217. ! IF_NOTOK_RETURN(status=1)
  4218. ! ! CLEANUP
  4219. ! deallocate(larray3a,larray3b)
  4220. ! call par_barrier()
  4221. ! !----------------------
  4222. ! ! test UPDATE_HALO_4D_R
  4223. ! !----------------------
  4224. ! ! Allocate 4D
  4225. ! allocate( larray4a( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels, trac) )
  4226. ! allocate( larray4b( i0-x_halo:i1+x_halo, j0-x_halo:j1+x_halo, levels, trac) )
  4227. ! ! array to update : in halo set to 0, elsewhere to myid
  4228. ! larray4b=0.
  4229. ! larray4b(i0:i1,j0:j1,:,:)=real(myid)
  4230. ! ! test array : fill halo with neighbors rank
  4231. ! larray4a=0.
  4232. ! larray4a( i0-x_halo:i0-1, j0:j1, :,: ) = DistGrid%neighbors(1) ! west halo
  4233. ! larray4a( i1+1:i1+x_halo, j0:j1, :,: ) = DistGrid%neighbors(3) ! east halo
  4234. ! larray4a( i0:i1, j1+1:j1+x_halo, :,: ) = DistGrid%neighbors(2) ! north halo
  4235. ! larray4a( i0:i1, j0-x_halo:j0-1, :,: ) = DistGrid%neighbors(4) ! south halo
  4236. ! larray4a(i0:i1,j0:j1,:,:)=real(myid) ! interior
  4237. ! where (larray4a == MPI_PROC_NULL) larray4a=0. !if no neighbor
  4238. ! ! update
  4239. ! CALL UPDATE_HALO( DISTGRID, larray4b, x_halo, status)
  4240. ! IF_NOTOK_RETURN(status=1)
  4241. ! ! compare (diff should be 0)
  4242. ! larray4b=larray4a-larray4b
  4243. ! if (maxval(abs(larray4b)) /= 0.) then
  4244. ! print*, "test update_halo 4d FAILED"
  4245. ! status=1
  4246. ! else
  4247. ! print*, "test update_halo 4d PASSED"
  4248. ! end if
  4249. ! IF_NOTOK_RETURN(status=1)
  4250. ! ! CLEANUP
  4251. ! deallocate(larray4a,larray4b)
  4252. ! call par_barrier()
  4253. status=0
  4254. END SUBROUTINE TESTCOMM
  4255. !EOC
  4256. END MODULE DOMAIN_DECOMP