123456789101112131415161718192021222324252627282930313233343536373839404142434445464748495051525354555657585960616263646566676869707172737475767778798081828384858687888990919293949596979899100101102103104105106107108109110111112113114115116117118119120121122123124125126127128129130131132133134135136137138139140141142143144145146147148149150151152153154155156157158159160161162163164165166167168169170171172173174175176177178179180181182183184185186187188189190191192193194195196197198199200201202203204205206207208209210211212213214215216217218219220221222 |
- #!/bin/bash
- #SBATCH -n 1
- #SBATCH -t 00:10:00
- #SBATCH -J interpolate_OSISAF
- #SBATCH -o slurm-%j.out
- #SBATCH -e slurm-%j.err
- #SBATCH --mem-per-cpu=8000
- #SBATCH --partition=debug # Partition sur laquelle lancer le travail
- #SBATCH --account=limhr # Compte associé au travail
- # Original: Pierre Mathiot, 2011
- # Update : Francois Massonnet, 2013
- # Update at BSC: Francois Massonnet, 2016
- # Francois Massonnet, Nov 2016 -- update to match
- # BSC conventions
- #
- # Interpolation of OSI-SAF sea ice concentration to ORCA grid
- # Particular attention is paid to "flag" values. Concentration
- # is only considered in "nominal" regions, that is, not the coastal
- # areas, the gap filling area around the north pole.
- #
- # Questions: francois.massonnet@uclouvain.be
- # francois.massonnet@bsc.es
- #module load netCDF-Fortran/4.2-foss-2015a
- set -x
- set -o nounset
- set -o errexit
- yearb=1983 # Years to process
- yeare=1983
- grid=eORCA1 # Grid type: ORCA1 or ORCA25
- nbsmooth=2 # This is important. We need to interpolate the observations
- # on the model grid. However the model is on a coarser grid.
- # Therefore, before interpolation, we will smooth the input
- # observational data. This variable tells how many times we
- # have to apply the "smooth9" function of cdo. This function
- # makes a 2-D smoothing of each grid point by weighting with
- # itself and its neighbours.
- # Roughly speaking, the variable nbsmooth should be the ratio
- # of observational resolution to model resolution.
- # At ORCA1, resolution is ~50 km at the poles and input data
- # from OSI-SAF is 10 km, hence a ratio of 5
- # At ORCA25 resolution is ~15 km hence a ratio of close to 1.
- # Set nbsmooth to 0 if you don't want smoothing.
- # AD: Set to 2 for OSI SAF 450 => 25km and eORCA1 ~50km.
- sosiedir=$HOME/tools/sosie-master/bin
- mask=/gpfs/home/acad/ucl-elic/adelhass/git/EnKF/conversion_uf/mask-${grid}.nc
- sourcedir=/gpfs/home/acad/ucl-elic/adelhass/git/EnKF/conversion_uf/OSI-SAF-450
- ioutdir=/gpfs/home/acad/ucl-elic/adelhass/git/EnKF/conversion_uf/OSI-SAF-450-on-nemo
- #mask=/esnas/autosubmit/con_files/mesh_mask_nemo.Ec3.2_O1L75.nc
- #sourcedir=/esnas/obs/eumetsat/osisaf/original_files # Where original files are located
- #ioutdir=/esnas/obs/eumetsat/osisaf/daily_mean/
- scratchdir=/gpfs/home/acad/ucl-elic/$USER/scratch_limhr/sosie
- # Directory where weights are available (can leave blank if you want to recompute weights)
- #weightdir=/esnas/scratch/Earth/nfuckar/EnKF/
- weightdir=
- # Create a directory to work
- # --------------------------
- tmpdir=$scratchdir/TMP_${RANDOM}
- echo "TMPDIR IS >>>>>> $tmpdir <<<<<<<"
- mkdir -p $tmpdir
- cp namelist* $tmpdir
- # Copy weights if they already exist
- [[ -f ${weightdir}/sosie_mapping_OSISAF-nh-${grid}.nc ]] && cp ${weightdir}/sosie_mapping_OSISAF-nh-${grid}.nc $tmpdir
- [[ -f ${weightdir}/sosie_mapping_OSISAF-sh-${grid}.nc ]] && cp ${weightdir}/sosie_mapping_OSISAF-sh-${grid}.nc $tmpdir
- cd $tmpdir
- # 1. Get the model grid and the mask
- if [ ! -f ${mask} ]
- then
- echo "${mask} does not exist."
- exit
- fi
- ln -sf ${mask} mask_out.nc
- # Link sosie
- ln -sf $sosiedir/sosie3.x .
- for year in `seq ${yearb} ${yeare}`
- do
- echo $year
- rm -f listfiles.txt
- ls $sourcedir/ice_conc_nh_ease2-250_cdr-v2p0_${year}????1200.nc > list_files.txt
-
- for file in `cat list_files.txt`
- do
- tag=`basename $file | sed -e "s/ice_conc_nh_ease2-250_cdr-v2p0_//" -e "s/1200.nc//"`
- echo $tag
- # Check that the file is also available for the other hemisphere
- nfile=`ls $sourcedir/ice_conc_?h_ease2-250_cdr-v2p0_${tag}1200.nc | wc -l`
- if [[ $nfile == 2 ]]
- then
- echo "Two files were found for the time stamp ${tag}!"
-
- for hemi in nh sh
- do
- filein=$sourcedir/ice_conc_${hemi}_ease2-250_cdr-v2p0_${tag}1200.nc
- cp -f $filein data_in_${hemi}-${tag}.nc
-
- # We only take points that have nominal (status_flag = 0) quality
- cdo chname,status_flag,mask -eqc,0 \
- -selvar,status_flag data_in_${hemi}-${tag}.nc mask_in_${hemi}-${tag}.nc
- # Smoothing input data
- if [[ $nbsmooth == 0 ]]
- then
- echo "WARNING - YOU DID NOT SMOOTH INPUT DATA"
- echo "!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!!"
- mv data_in_${hemi}-${tag}.nc data_in_smooth_${hemi}-${tag}.nc
- else
- mv data_in_${hemi}-${tag}.nc data_in_smooth0_${hemi}-${tag}.nc
- for jsmooth in `seq 1 $nbsmooth`
- do
- cdo smooth9 data_in_smooth$(( $jsmooth - 1 ))_${hemi}-${tag}.nc \
- data_in_smooth${jsmooth}_${hemi}-${tag}.nc
- done
- mv data_in_smooth${jsmooth}_${hemi}-${tag}.nc data_in_smooth_${hemi}-${tag}.nc
- fi # if smoothing is necessary
- # Interpolation of land-sea mask
- sed -e "s/TTAARRGGEETT/${grid}/" \
- -e "s/HHEEMMII/${hemi}/" \
- -e "s/TTAAGG/${tag}/" \
- namelist_OSISAF-mask > namelist
- # ./sosie3.x
- # Since we just did an interpolation, the mask will not be sharp:
- # There will be values not equal to 1. Let's remask the result
- # The threshold is pretty arbitrary here, but to be conservative
- # we just ignore points different from one.
- cdo eqc,1 tmask_OSISAF-${hemi}-${grid}_${tag}.nc OSISAF-${hemi}_mask_on_${grid}_${tag}.nc
- # 2. Interpolation of concentration
- sed -e "s/TTAARRGGEETT/${grid}/" \
- -e "s/HHEEMMII/${hemi}/" \
- -e "s/TTAAGG/${tag}/" \
- namelist_OSISAF-conc > namelist
-
- ./sosie3.x
- # Mask the interpolated ice concentration using the interpolated mask.
- # Also, put missing values returned by sosie (<0) to zero, so that
- # we'll be able to add northern and southern hemispheres later
- cdo setmisstoc,0 \
- -mul -selvar,at_i at_i_OSISAF-${hemi}-${grid}_${tag}.nc \
- -selvar,tmask OSISAF-${hemi}_mask_on_${grid}_${tag}.nc \
- at_i_OSISAF-${hemi}-${grid}_${tag}_masked.nc
- # 3. Interpolation of standard deviation
- #
- # This is a point to worry about, although there is not enough
- # at this stage to do better. Interpolation of second-order moments
- # should account for the fact that errors are correlated, however
- # we don't have this information from the OSI-SAF product.
- # The best we can do is to assume that these errors are not correlated
- # and can be interpolated as is.
- sed -e "s/TTAARRGGEETT/${grid}/" \
- -e "s/HHEEMMII/${hemi}/" \
- -e "s/TTAAGG/${tag}/" \
- namelist_OSISAF-error > namelist
- ./sosie3.x
- # Mask the interpolated error using the interpolated mask
- cdo setmisstoc,0 \
- -mul -selvar,error_at_i error_at_i_OSISAF-${hemi}-${grid}_${tag}.nc \
- -selvar,tmask OSISAF-${hemi}_mask_on_${grid}_${tag}.nc \
- error_at_i_OSISAF-${hemi}-${grid}_${tag}_masked.nc
- done # for each hemisphere
-
- # Merge North and South data in one file
- for var in at_i error_at_i
- do
- ncbo -O -F -v ${var} --op_typ=add ${var}_OSISAF-nh-${grid}_${tag}_masked.nc \
- ${var}_OSISAF-sh-${grid}_${tag}_masked.nc \
- ${var}_OSISAF-${grid}_${tag}.nc
- done
- # Dump errors into concentration files
- ncks -F -A -v error_at_i error_at_i_OSISAF-${grid}_${tag}.nc at_i_OSISAF-${grid}_${tag}.nc
- ncrename -v at_i,sic -v error_at_i,error_sic at_i_OSISAF-${grid}_${tag}.nc
- mv at_i_OSISAF-${grid}_${tag}.nc $outdir/sic-${grid}/
- # Do a bit of cleaning here!
- # rm -f at_i_OSISAF-?h-${grid}_${tag}_masked.nc \
- # error_at_i_OSISAF-?h-${grid}_${tag}_masked.nc \
- # error_at_i_OSISAF-?h-${grid}_${tag}_masked.nc \
- # error_at_i_OSISAF-${grid}_${tag}.nc \
- # OSISAF-?h_mask_on_${grid}_${tag}.nc \
- # error_at_i_OSISAF-?h-${grid}_${tag}.nc \
- # at_i_OSISAF-?h-${grid}_${tag}.nc \
- # tmask_OSISAF-?h-${grid}_${tag}.nc \
- # data_in_smooth*${tag}.nc \
- # mask_in_?h-${tag}.nc
- else
- echo "Only one file was found for the time stamp $tag"
- echo "This is not coded yet"
- fi # if there are two files
- done # for each file of the year
- done # for each year
|