generate_new_bdy_dta_onefile.py 5.5 KB

123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158
  1. from netCDF4 import Dataset
  2. import sys
  3. import time
  4. import numpy as np
  5. from collections import OrderedDict
  6. from nn_interp import nn_interp_1d
  7. # Script for generating NEMO BDY data files from pre-interpolated data, and a NEMO BDY geometry file.
  8. # Author: C. Pelletier
  9. # Call:
  10. # python generate_new_bdy_dta_onefile.py <input_file> <grid> <output_file>
  11. # where:
  12. # <input_file>: is an input dataset which has been preinterpolated to the NEMO grid, and contains all the fields you want to generate data to.
  13. # This script will "detect" any data fields in <input_file>.
  14. # <grid> is the type of NEMO grid points (T, U or V) that your input data is defined on.
  15. # All fields within one <input_data> file MUST share the same type of points (e.g., don't mix velocities and tracers).
  16. # If the input file has 3D fields, then it should contain a depth variable AND a depth dimension, both called "depth<grid>" (e.g. "depthv" for V nodes)
  17. # <output_file> is the path to an output file.
  18. # BDY geometry file, coming from generate_new_bdy_geom.py
  19. geom_file = "/storepelican/pelletie/nemo_bdy/grids/eORCA025_SO_BDY-COORDS_rim1.nc"
  20. if(len(sys.argv) == 4):
  21. input_file = sys.argv[1]
  22. grid = sys.argv[2]
  23. output_file = sys.argv[3]
  24. else:
  25. print('Please provide three input arguments: python generate_new_bdy_dta_onefile.py <input_file> <grid> <output_file>')
  26. sys.exit()
  27. if(grid not in ['t', 'u', 'v']):
  28. print('grid should be t, u or v')
  29. sys.exit()
  30. ### END OF INPUT SPECIFICATION.
  31. geom_nc = Dataset(geom_file, mode='r')
  32. nbis = geom_nc.variables['nbi'+grid][0,:] - 1
  33. nbjs = geom_nc.variables['nbj'+grid][0,:] - 1
  34. nbrs = geom_nc.variables['nbr'+grid][0,:]
  35. nbdys = len(geom_nc.dimensions['xb'+grid.upper()])
  36. lons = geom_nc.variables['glam'+grid][0,:]
  37. lats = geom_nc.variables['gphi'+grid][0,:]
  38. geom_nc.close()
  39. in_nc = Dataset(input_file, mode='r')
  40. out_nc = Dataset(output_file, mode='w')
  41. nt = len(in_nc.dimensions['time_counter'])
  42. ny = len(in_nc.dimensions['y'])
  43. nx = len(in_nc.dimensions['x'])
  44. has_3d = False
  45. nxb = nbdys
  46. remap_vars = []
  47. for name, var in in_nc.variables.items():
  48. if(( 'time_counter' in var.dimensions) \
  49. and ('x' in var.dimensions)):
  50. remap_vars.append(name)
  51. if('depth'+grid in var.dimensions):
  52. has_3d = True
  53. nz = len( in_nc.dimensions['depth'+grid] )
  54. out_nc.createDimension('yb', 1)
  55. out_nc.createDimension('xb'+grid.upper(), nxb)
  56. out_nc.createDimension('time_counter', 0)
  57. if(has_3d):
  58. out_nc.createDimension('depth'+grid, nz)
  59. idx_flat_2d = np.repeat( np.arange(nt) * ny*nx, nxb ) \
  60. + np.tile(nbjs * nx, nt) \
  61. + np.tile(nbis, nt)
  62. time_in = in_nc.variables['time_counter']
  63. time_out = out_nc.createVariable(varname = 'time_counter', datatype = time_in.datatype, dimensions = time_in.dimensions)
  64. time_out.setncatts(time_in.__dict__)
  65. time_out[:] = time_in[:]
  66. nbi_out = out_nc.createVariable(varname='nbi'+grid, datatype='i4', dimensions=['yb', 'xb'+grid.upper()])
  67. nbi_out[0,:] = nbis[:] + 1
  68. nbj_out = out_nc.createVariable(varname='nbj'+grid, datatype='i4', dimensions=['yb', 'xb'+grid.upper()])
  69. nbj_out[0,:] = nbjs[:] + 1
  70. nbr_out = out_nc.createVariable(varname='nbr'+grid, datatype='i4', dimensions=['yb', 'xb'+grid.upper()])
  71. nbr_out[0,:] = nbrs[:]
  72. if(has_3d):
  73. depth_in = in_nc.variables['depth'+grid]
  74. depth_out = out_nc.createVariable(varname = 'depth'+grid, datatype = depth_in.datatype, dimensions = depth_in.dimensions)
  75. depth_out.setncatts(depth_in.__dict__)
  76. depth_out[:] = depth_in[:]
  77. idx_flat_3d = np.repeat( np.arange(nt*nz) * ny*nx, nxb ) \
  78. + np.tile(nbjs * nx, nt*nz) \
  79. + np.tile(nbis, nt*nz)
  80. lon_out = out_nc.createVariable(varname='nav_lon', datatype='f8', dimensions=['yb', 'xb'+grid.upper()])
  81. lon_out.standard_name = "longitude"
  82. lon_out.units = "degrees East"
  83. lon_out[0,:] = lons[:]
  84. lat_out = out_nc.createVariable(varname='nav_lat', datatype='f8', dimensions=['yb', 'xb'+grid.upper()])
  85. lat_out.standard_name = "latitude"
  86. lat_out.units = "degrees North"
  87. lat_out[0,:] = lats[:]
  88. for var in remap_vars:
  89. in_var = in_nc.variables[var]
  90. if( 'depth'+grid in in_var.dimensions ):
  91. out_var = out_nc.createVariable(var, datatype='f4', \
  92. dimensions=['time_counter', 'depth'+grid, 'yb', 'xb'+grid.upper()], fill_value = 1.e20)
  93. out_var[:,:,0,:] = np.reshape( (np.ndarray.flatten(in_var[:]))[idx_flat_3d], \
  94. (nt, nz, nxb) )
  95. if(np.ma.is_masked(out_var[:])):
  96. for t in range(0,nt):
  97. for k in range(0,nz):
  98. if(np.all( out_var[:].mask[t,k,0,:] ) ):
  99. # All masked nodes: take from upper level.
  100. out_var[t,k,0,:] = out_var[t,k-1,0,:]
  101. elif(np.any(out_var[:].mask[t,k,0,:])):
  102. # Some masked nodes: drown at same level.
  103. out_var[t,k,0,:] = nn_interp_1d(out_var[t,k,0,:])
  104. else:
  105. out_var = out_nc.createVariable(var, datatype='f4', \
  106. dimensions=['time_counter', 'yb', 'xb'+grid.upper()], \
  107. fill_value = 1.e20)
  108. out_var[:,0,:] = np.reshape( (np.ndarray.flatten(in_var[:]))[idx_flat_2d], \
  109. (nt, nxb) )
  110. if(np.ma.is_masked(out_var[:])):
  111. for t in range(0,nt):
  112. if(np.any(out_var[:].mask[t,0,:])):
  113. out_var[t,0,:] = nn_interp_1d(out_var[t,0,:])
  114. in_nc.close()
  115. out_nc.close()