icb_combrest.py 8.3 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222223224225226227228229230231232233234235236237238239240241242243244245246247248249250251252253254255256257258259260261262263264265266267268269270271272273274275
  1. import os
  2. from netCDF4 import Dataset
  3. from argparse import ArgumentParser
  4. import numpy as np
  5. import sys
  6. #
  7. # Basic iceberg trajectory restart post-processing python script.
  8. # This script collects iceberg information from the distributed restarts written
  9. # out by each processing region and writes the information into a global restart file.
  10. # The global restart file must already exist and contain the collated 2D spatial fields
  11. # as prepared by utilities such as rebuild_nemo. This python script simply adds the
  12. # iceberg position and state data that is held using the unlimited dimension 'n' which
  13. # has been ignored by rebuild_nemo. Each processing region that contains icebergs will
  14. # (probably) have a different number of icebergs (indicated by differing values for the
  15. # current size of the unlimited dimension). This script collects all icebergs into a
  16. # single unordered list.
  17. #
  18. parser = ArgumentParser(description='produce a global trajectory restart file from distributed output\
  19. files, e.g. \n python ./icb_pp.py \
  20. -f icebergs_00692992_restart_ -n 480 -o icebergs_restart.nc [-O]')
  21. parser.add_argument('-f',dest='froot',help='fileroot_of_distrbuted_data; root name of \
  22. distributed trajectory restart file (usually completed with XXXX.nc, where \
  23. XXXX is the 4 digit processor number)',
  24. default='icebergs_00692992_restart_')
  25. parser.add_argument('-n',dest='fnum',help='number of distributed files to process',
  26. type=int, default=None)
  27. parser.add_argument('-o',dest='fout',help='global_iceberg_restart; file name to append the \
  28. global iceberg restart data to.', default='icebergs_restart.nc')
  29. parser.add_argument('-O',dest='fcre',help='Create the output file from scratch rather than \
  30. append to an existing file.', \
  31. action='store_true', default=False)
  32. args = parser.parse_args()
  33. default_used = 0
  34. if args.froot is None:
  35. pathstart = 'icebergs_00692992_restart_'
  36. default_used = 1
  37. else:
  38. pathstart = args.froot
  39. if args.fnum is None:
  40. procnum = 0
  41. default_used = 1
  42. else:
  43. procnum = args.fnum
  44. if args.fout is None:
  45. pathout = 'icebergs_restart.nc'
  46. default_used = 1
  47. else:
  48. pathout = args.fout
  49. if default_used == 1:
  50. print('At least one default value will be used; command executing is:')
  51. print('icb_combrest.py -f ',pathstart,' -n ',procnum,' -o ',pathout)
  52. if procnum < 1:
  53. print('Need some files to collate! procnum = ',procnum)
  54. sys.exit(11)
  55. icu = []
  56. times = []
  57. ntraj = 0
  58. nk = 0
  59. #
  60. # Loop through all distributed datasets to obtain the total number
  61. # of icebergs to transfer
  62. #
  63. for n in range(procnum):
  64. nn = '%4.4d' % n
  65. try:
  66. fw = Dataset(pathstart+nn+'.nc')
  67. except:
  68. print 'Error: unable to open input file: ' + pathstart+nn+'.nc'
  69. sys.exit(12)
  70. for d in fw.dimensions :
  71. if d == 'n' :
  72. if len(fw.dimensions['n']) > 0:
  73. # print 'icebergs found in: ' + pathstart+nn+'.nc'
  74. if len(fw.dimensions['k']) > nk :
  75. nk = len(fw.dimensions['k'])
  76. ntraj = ntraj + len(fw.dimensions['n'])
  77. fw.close()
  78. #
  79. print(ntraj, ' icebergs found across all datasets')
  80. #
  81. # Declare 2-D arrays to receive the data from all files
  82. #
  83. lons = np.zeros(ntraj)
  84. lats = np.zeros(ntraj)
  85. xis = np.zeros(ntraj)
  86. yjs = np.zeros(ntraj)
  87. uvs = np.zeros(ntraj)
  88. vvs = np.zeros(ntraj)
  89. mas = np.zeros(ntraj)
  90. ths = np.zeros(ntraj)
  91. wis = np.zeros(ntraj)
  92. les = np.zeros(ntraj)
  93. dys = np.zeros(ntraj)
  94. mss = np.zeros(ntraj)
  95. msb = np.zeros(ntraj)
  96. hds = np.zeros(ntraj)
  97. yrs = np.zeros(ntraj , dtype=int)
  98. num = np.zeros((ntraj, nk), dtype=int)
  99. #
  100. # loop through distributed datasets again, this time
  101. # collecting all trajectory data
  102. #
  103. nt = 0
  104. for n in range(procnum):
  105. nn = '%4.4d' % n
  106. fw = Dataset(pathstart+nn+'.nc')
  107. for d in fw.dimensions :
  108. if d == 'n' :
  109. # Note many distributed datafiles will contain no iceberg data
  110. # so skip quickly over these
  111. m = len(fw.dimensions['n'])
  112. if m > 0:
  113. # print pathstart+nn+'.nc'
  114. lons[nt:nt+m] = fw.variables['lon'][:]
  115. lats[nt:nt+m] = fw.variables['lat'][:]
  116. xis[nt:nt+m] = fw.variables['xi'][:]
  117. yjs[nt:nt+m] = fw.variables['yj'][:]
  118. uvs[nt:nt+m] = fw.variables['uvel'][:]
  119. vvs[nt:nt+m] = fw.variables['vvel'][:]
  120. mas[nt:nt+m] = fw.variables['mass'][:]
  121. ths[nt:nt+m] = fw.variables['thickness'][:]
  122. wis[nt:nt+m] = fw.variables['width'][:]
  123. les[nt:nt+m] = fw.variables['length'][:]
  124. dys[nt:nt+m] = fw.variables['day'][:]
  125. mss[nt:nt+m] = fw.variables['mass_scaling'][:]
  126. msb[nt:nt+m] = fw.variables['mass_of_bits'][:]
  127. hds[nt:nt+m] = fw.variables['heat_density'][:]
  128. yrs[nt:nt+m] = fw.variables['year'][:]
  129. num[nt:nt+m,:] = fw.variables['number'][:,:]
  130. nt = nt + m
  131. fw.close()
  132. # Finally create the output file and write out the collated sets
  133. #
  134. if args.fcre :
  135. try:
  136. fo = Dataset(pathout, 'w', format='NETCDF4')
  137. except:
  138. print 'Error accessing output file: ' + pathout
  139. print 'Check it is a writable location.'
  140. sys.exit(13)
  141. else :
  142. # Copy 2D variables across to output file from input file. This step avoids problems if rebuild_nemo
  143. # has created an "n" dimension in the prototype rebuilt file (ie. if there are icebergs on the zeroth
  144. # processor).
  145. try:
  146. os.rename(pathout,pathout.replace('.nc','_WORK.nc'))
  147. except OSError:
  148. print 'Error: unable to move icebergs restart file: '+pathout
  149. sys.exit(14)
  150. #
  151. try:
  152. fi = Dataset(pathout.replace('.nc','_WORK.nc'), 'r')
  153. except:
  154. print 'Error: unable to open icebergs restart file: '+pathout.replace('.nc','_WORK.nc')
  155. sys.exit(15)
  156. fo = Dataset(pathout, 'w')
  157. for dim in ['x','y','c','k']:
  158. indim = fi.dimensions[dim]
  159. fo.createDimension(dim, len(indim))
  160. for var in ['kount','calving','calving_hflx','stored_ice','stored_heat']:
  161. invar = fi.variables[var]
  162. fo.createVariable(var, invar.datatype, invar.dimensions)
  163. fo.variables[var][:] = invar[:]
  164. if "long_name" in invar.ncattrs():
  165. fo.variables[var].long_name = invar.long_name
  166. if "units" in invar.ncattrs():
  167. fo.variables[var].units = invar.units
  168. os.remove(pathout.replace('.nc','_WORK.nc'))
  169. #
  170. add_k = 1
  171. for d in fo.dimensions :
  172. if d == 'n' :
  173. print 'Error: dimension n already exists in output file'
  174. sys.exit(16)
  175. if d == 'k' :
  176. add_k = 0
  177. onn = fo.createDimension('n', None)
  178. if add_k > 0 :
  179. onk = fo.createDimension('k', nk)
  180. olon = fo.createVariable('lon', 'f8',('n'))
  181. olat = fo.createVariable('lat', 'f8',('n'))
  182. oxis = fo.createVariable('xi', 'f8',('n'))
  183. oyjs = fo.createVariable('yj', 'f8',('n'))
  184. ouvs = fo.createVariable('uvel', 'f8',('n'))
  185. ovvs = fo.createVariable('vvel', 'f8',('n'))
  186. omas = fo.createVariable('mass', 'f8',('n'))
  187. oths = fo.createVariable('thickness', 'f8',('n'))
  188. owis = fo.createVariable('width', 'f8',('n'))
  189. oles = fo.createVariable('length', 'f8',('n'))
  190. odys = fo.createVariable('day', 'f8',('n'))
  191. omss = fo.createVariable('mass_scaling', 'f8',('n'))
  192. omsb = fo.createVariable('mass_of_bits', 'f8',('n'))
  193. ohds = fo.createVariable('heat_density', 'f8',('n'))
  194. oyrs = fo.createVariable('year', 'i4',('n'))
  195. onum = fo.createVariable('number', 'i4',('n','k'))
  196. #
  197. olon[:] = lons
  198. olon.long_name = "longitude"
  199. olon.units = "degrees_E"
  200. #
  201. olat[:] = lats
  202. olat.long_name = "latitude"
  203. olat.units = "degrees_N"
  204. #
  205. oxis[:] = xis
  206. oxis.long_name = "x grid box position"
  207. oxis.units = "fractional"
  208. #
  209. oyjs[:] = yjs
  210. oyjs.long_name = "y grid box position"
  211. oyjs.units = "fractional"
  212. #
  213. ouvs[:] = uvs
  214. ouvs.long_name = "zonal velocity"
  215. ouvs.units = "m/s"
  216. #
  217. ovvs[:] = vvs
  218. ovvs.long_name = "meridional velocity"
  219. ovvs.units = "m/s"
  220. #
  221. omas[:] = mas
  222. omas.long_name = "mass"
  223. omas.units = "kg"
  224. #
  225. oths[:] = ths
  226. oths.long_name = "thickness"
  227. oths.units = "m"
  228. #
  229. owis[:] = wis
  230. owis.long_name = "width"
  231. owis.units = "m"
  232. #
  233. oles[:] = les
  234. oles.long_name = "length"
  235. oles.units = "m"
  236. #
  237. odys[:] = dys
  238. odys.long_name = "year day of calving event"
  239. odys.units = "days"
  240. #
  241. omss[:] = mss
  242. omss.long_name = "scaling factor for mass of calving berg"
  243. omss.units = "none"
  244. #
  245. omsb[:] = msb
  246. omsb.long_name = "mass of bergy bits"
  247. omsb.units = "kg"
  248. #
  249. ohds[:] = hds
  250. ohds.long_name = "heat density"
  251. ohds.units = "J/kg"
  252. #
  253. oyrs[:] = yrs
  254. oyrs.long_name = "calendar year of calving event"
  255. oyrs.units = "years"
  256. #
  257. onum[:,:] = num
  258. onum.long_name = "iceberg number on this processor"
  259. onum.units = "count"
  260. #
  261. fo.close()