# liblsm.sh is a library of shell script functions required for the osm and lpjg-offline runscripts # # Usage: source ./liblsm.sh # Function to create and ICMCL* file with daily veg from an IFS experiment # Usage: osm_gen_icmcl_from_ifs function osm_gen_icmcl_from_ifs() { ## For stand-alone testing only: variables required # export GRIB_BIN_PATH=/usr/local/apps/grib_api/1.12.3/INTEL/140/bin # grib_copy=${GRIB_BIN_PATH}${GRIB_BIN_PATH:+/}grib_copy # run_start_date="1990-01-01" # osm_ifs_output_dir="${SCRATCH}/ECEARTH-RUNS/esma/output/ifs/" FYEAR=$1 #grib codes 66/67/27/28/29/30 snameGG="lai_lv/lai_hv/cvl/cvh/tvl/tvh" if [ $FYEAR = $(date -u -d "$run_start_date" +%Y) ] then ## first year, we need the +000000 file ff0=$(find ${osm_ifs_output_dir} | grep "ICMGG....+000000") else ## do we really need the entire previous month? the last day or timestep should be enough, can add a (-w dataDate) condition ff0=$(find ${osm_ifs_output_dir} | grep "ICMGG....+$(( $FYEAR -1 ))12") fi [[ "$ff0" == "" ]] && error "Cannot find forcing for year $FYEAR!" rm -f osm_veg.grb ${grib_copy} -w shortName=$snameGG,dataTime=0000,day=1 $ff0 osm_veg.grb ffs=$(find ${osm_ifs_output_dir} | grep "ICMGG....+${FYEAR}.." | sort) [[ "$ffs" == "" ]] && error "Cannot find forcing for year $FYEAR!" for ff in $ffs do ${grib_copy} -w shortName=$snameGG,dataTime=0000 $ff osm_tmp.grb cat osm_tmp.grb >> osm_veg.grb # break done rm -f osm_tmp.grb if [[ -r ICMCL${exp_name}INIT ]] then mv ICMCL${exp_name}INIT ICMCL${exp_name}INIT_ORI fi mv osm_veg.grb ICMCL${exp_name}INIT } # Function to create time varying vegetation for OSM run # Usage: osm_gen_veg function osm_gen_veg() { cp ${ecearth_src_dir}/ifs-${ifs_version}/src/surf/offline/scripts/init_clim.py . cp ${ecearth_src_dir}/ifs-${ifs_version}/src/surf/offline/scripts/pyfunc.py . cat > pyrun.py << EOF from init_clim import * from pyfunc import * FVEGgrb='ICMCL${exp_name}INIT' CFSURF='osm/init/surfclim' ## Generate netcdf files (empty) ginfo = get_grib_grid(FVEGgrb,retbounds=True) gen_file(FOUT='surfveg_all',FTYPE='veg',ginfo=ginfo) ## Convert grib to netcdf grib2nc(FNC='surfveg_all',FGRB=FVEGgrb,gb2NC=gb2NCveg,FTYPE='veg') ## get mask ncCLM = Dataset(CFSURF,'r') JPOINTS = ncCLM.variables['x'][:] ncCLM.close() Xmask = np.zeros((ginfo['nptot'])) Xmask[JPOINTS-1]=1 mask = Xmask == 1 ## Create final files cutting down mask cut_mask(mask=mask,FIN='surfveg_all',FOUT='surfveg') EOF # Run python in a sub-shell to handle different environment ( configure_python ; python pyrun.py ) rm -f pyfunc.p* init_clim.p* pyrun.py surfveg_all } # Function to create initial and climatological files for OSM run # Usage: osm_gen_init_clim function osm_gen_init_clim() { if ! [ -r osm/init/surfclim ] then # assumes that is it running in the running directory where previous data has been already linked # make sure we remove existing files rm -f osm_init.grb osm_clim.grb #--------------------------------------------- #1. Initial conditions #-------------------------------------------- ${grib_copy} -w shortName=swvl1/swvl2/swvl3/swvl4/stl1/stl2/stl3/stl4/skt/asn/rsn/sd/tsn ICMGG${exp_name}INIT osm_init.grb #-------------------------------------------- # 2. Climatological conditions #-------------------------------------------- ${grib_copy} -w shortName=lsm/sdor/z/slt/cvl/cvh/tvl/tvh ICMGG${exp_name}INIT osm_clim.grb ${grib_copy} -w parameter=117/118/119/120 ICMGG${exp_name}INIT osm_tmp.grb cat osm_tmp.grb >> osm_clim.grb ; rm -f osm_tmp.grb ## No need for this if we have LRDVEG=TRUE and LRDALB=FALSE # montly albedo, lai rm -f osm_mon_alb osm_mon_alnid osm_mon_aluvd osm_mon_lai_lv osm_mon_lai_hv osm_tmp.grb for im in 01 02 03 04 05 06 07 08 09 10 11 12 do for sn in lai_lv lai_hv do ${grib_copy} -w shortName=$sn,month=$im,year=$leg_start_date_yyyy ICMCL${exp_name}INIT osm_tmp.grb cat osm_tmp.grb >> osm_mon_${sn} rm -f osm_tmp.grb done for sn in alnid aluvd do ${grib_copy} -w shortName=$sn ${ini_data_dir}/ifs/${ifs_grid}/climate/ICMCL-$im osm_tmp.grb cat osm_tmp.grb >> osm_mon_${sn} rm -f osm_tmp.grb done done cdo -add -mulc,0.45976 osm_mon_aluvd -mulc,0.54024 osm_mon_alnid osm_mon_alb ${grib_set} -s shortName=al osm_mon_alb osm_tmp.grb ; mv osm_tmp.grb osm_mon_alb cat osm_mon_alb >> osm_clim.grb cat osm_mon_lai_hv >> osm_clim.grb cat osm_mon_lai_lv >> osm_clim.grb case "${ifs_grid}" in T159L*) ifs_gauss_grid=80 ;; T255L*) ifs_gauss_grid=128 ;; T511L*) ifs_gauss_grid=256 ;; *) error "Can't set gauss grid for unknown horizontal grid: ${ifs_grid}" ;; esac ## For model orography we need to interpolate the spectral Z to grid-point space # if emos_tool is not available on the platform, use existing files if [ ! -z "${emos_tool-}" ]; then ${grib_copy} -w shortName=z ICMSH${exp_name}INIT z_spec.grb (configure_python ;${emos_tool} --reduced=${ifs_gauss_grid} z_spec.grb z_gp.grb) cat z_gp.grb >> osm_clim.grb rm -f z_spec.grb z_gp.grb CF_????_???? else cat ${ini_data_dir}/osm/z_n${ifs_gauss_grid}.grb >> osm_clim.grb fi rm -f osm_mon_alb osm_mon_alnid osm_mon_aluvd osm_mon_lai_lv osm_mon_lai_hv osm_tmp.grb #-------------------------------------------- # 3. Generate netcdf files #-------------------------------------------- cp ${ecearth_src_dir}/ifs-${ifs_version}/src/surf/offline/scripts/init_clim.py . cp ${ecearth_src_dir}/ifs-${ifs_version}/src/surf/offline/scripts/pyfunc.py . cat > pyrun.py << EOF from init_clim import * from pyfunc import * FCLIMgrb='osm_clim.grb' FINITgrb='osm_init.grb' ## Generate netcdf files (empty) ginfo = get_grib_grid(FCLIMgrb,retbounds=True) gen_file(FOUT='surfclim_all',FTYPE='clm',ginfo=ginfo,nlevs=${NCSS:-4}) gen_file(FOUT='soilinit_all',FTYPE='ini',ginfo=ginfo,nlevs=${NCSS:-4}) ## Convert grib to netcdf grib2nc(FNC='soilinit_all',FGRB=FINITgrb) grib2nc(FNC='surfclim_all',FGRB=FCLIMgrb) ### Defaults defining points LLAND=True LLAKE=False LOCEAN=False BBOX=[-190,190,-91,91] ## find mask, i.e. point where we'll ran the model mask = get_mask('surfclim_all',LLAND,LLAKE,LOCEAN,BBOX) ## Create final files cutting down mask cut_mask(mask=mask,FIN='surfclim_all',FOUT='surfclim') cut_mask(mask=mask,FIN='soilinit_all',FOUT='soilinit') ## finally set Mask to 1 nc = Dataset('surfclim','r+') nc.variables['Mask'][:] = 1 nc.close() EOF # Run python in a sub-shell to handle different environment ( configure_python ; python pyrun.py ) mkdir -p osm/init mv osm_init.grb osm_clim.grb surfclim_all soilinit_all osm/init cp surfclim soilinit osm/init rm -f pyfunc.p* init_clim.p* pyrun.py else # osm/init/surfclim exists cp osm/init/surfclim . cp osm/init/soilinit . fi } function osm_gen_forcing1() { # generic script to create/prepare forcing FYEAR=$1 # year to process forcing # ${osm_forcing_dir} : must be defined as where the forcing is present or created # ${osm_ifs_output_dir} : must be defined, in case of forcing from IFS, location of raw output # ${osm_forcing_type} : must be defined, only 'ifs' is supported, in the future could take other values like gswp3, etc... if ! [ -r ${osm_forcing_dir}/$FYEAR/Rainf.nc ] then # file not available, will need to be created if [ $osm_forcing_type = 'ifs' ] then # forcing from an IFS experiment osm_gen_forcing_ifs $FYEAR else error "Forcing year $FYEAR is unavailable, only osm_forcing_type=ifs is supported in osm_gen_forcing (using $osm_forcing_type)" fi fi # end of forcing processing } function osm_gen_forcing() { ## generic script to create/prepare forcing FYEAR1=$1 # first year to process forcing FYEAR2=$2 # last year to process forcing osm_forcing_dir_merged=${FYEAR1}-${FYEAR2} # if only 1 year is required, generate forcing for that year if [ $FYEAR1 == $FYEAR2 ] then osm_gen_forcing1 $FYEAR1 return fi ## skip if data already there [[ -r ${osm_forcing_dir}/${osm_forcing_dir_merged}/Rainf.nc ]] && return 0 ## generate forcing for each year for (( yr=FYEAR1 ; yr<=FYEAR2 ; yr+=1 )) do osm_gen_forcing1 $yr done ## merge yearly files into multi-year files for f in LWdown.nc PSurf.nc Qair.nc Rainf.nc Snowf.nc SWdown.nc Tair.nc Wind.nc do cdo_str="cdo -O -f nc4c -z zip_2 mergetime" for (( yr=FYEAR1; yr<=FYEAR2 ; yr+=1 )) do cdo_str+=" ${yr}/$f " done cdo_str+=" ${osm_forcing_dir_merged}/$f " mkdir -p ${osm_forcing_dir}/${osm_forcing_dir_merged} ( cd ${osm_forcing_dir} && export SKIP_SAME_TIME=1 && $cdo_str ) done } function osm_gen_forcing_ifs() { FYEAR=$1 #grib codes 134/167/168/165/166/169/175/142/143/144/228 snameGG="sp/2t/2d/10u/10v/ssrd/strd/lsp/cp/sf/tp" if [ $FYEAR = $(date -u -d "$run_start_date" +%Y) ] then ## first year, we need the +000000 file ff0=$(find ${osm_ifs_output_dir} -name "ICMGG????+000000.grb") [[ "$ff0" == "" ]] && ff0=$(find ${osm_ifs_output_dir} -name "ICMGG????+000000") ## we might have output from a run which started earlier [[ "$ff0" == "" ]] && ff0=$(find ${osm_ifs_output_dir} -name "ICMGG????+$(( $FYEAR -1 ))12.grb") [[ "$ff0" == "" ]] && ff0=$(find ${osm_ifs_output_dir} -name "ICMGG????+$(( $FYEAR -1 ))12") else ## do we really need the entire previous month? the last day or timestep should be enough, can add a (-w dataDate) condition ff0=$(find ${osm_ifs_output_dir} -name "ICMGG????+$(( $FYEAR -1 ))12.grb") [[ "$ff0" == "" ]] && ff0=$(find ${osm_ifs_output_dir} -name "ICMGG????+$(( $FYEAR -1 ))12") fi [[ "$ff0" == "" ]] && error "Cannot find forcing for year $FYEAR!" rm -f osm_for.grb ${grib_copy} -w shortName=$snameGG $ff0 osm_for.grb ffs=$(find ${osm_ifs_output_dir} -name "ICMGG????+${FYEAR}??.grb" | sort) [[ "$ffs" == "" ]] && ffs=$(find ${osm_ifs_output_dir} -name "ICMGG????+${FYEAR}??" | sort) [[ "$ffs" == "" ]] && error "Cannot find forcing for year $FYEAR!" for ff in $ffs do ${grib_copy} -w shortName=$snameGG $ff osm_tmp.grb cat osm_tmp.grb >> osm_for.grb done ${grib_copy} osm_for.grb osm_for_[shortName].grb rm -f osm_for.grb cp ${ecearth_src_dir}/ifs-${ifs_version}/src/surf/offline/scripts/create_forcing.py . cat > pyrun.py << EOF from create_forcing import * SURFCLIM="surfclim" INPUTGRB="osm_for_[shortName].grb" FTYPE='SFC' tunits="seconds since $FYEAR-01-01 00:00:00" process(SURFCLIM,INPUTGRB,FTYPE,tunits) EOF ( configure_python ; python pyrun.py ) # TODO check if we have to re-enable Ctpf, it was disabled some time ago #cdo -L -merge -selvar,Ctpf Ctpf.nc Rainf.nc tmp.nc; mv tmp.nc Rainf.nc #rm -f Ctpf.nc mkdir -p ${osm_forcing_dir}/$FYEAR/ mv Tair.nc Qair.nc PSurf.nc Wind.nc SWdown.nc LWdown.nc Snowf.nc Rainf.nc ${osm_forcing_dir}/$FYEAR/ rm -f osm_tmp.grb osm_for_*.grb create_forcing.p* pyrun.py } function osm_post_all() { # do basic OSM output post-processing osm_post_output $1 # save icmcl files osm_post_icmcl $1 # save era-land surface fields mkdir -p $2 osm_post_land_param $1 $2 # generate lpjg_forcing mkdir -p $3 config=lpjg_forcing lpjg_gen_forcing $leg_start_date_yyyy $leg_end_date_yyyy_full } function osm_post_output() { DATADIR=$1 # location where the output was saved local t1=$(date +%s) ofreqh=$(( ($ifs_output_freq * $ifs_time_step_sec) / 3600 )) #cd ${run_dir}/$DATADIR mkdir -p $TMPDIR/$$/osm/post_$$ cd $TMPDIR/$$/osm/post_$$ # convert the netcdf files to one big grib file - or use the one generated previously if found if [ -f ${run_dir}/$DATADIR/out.grb1 ] then cp ${run_dir}/$DATADIR/out.grb1 . else cp ${ecearth_src_dir}/ifs-${ifs_version}/src/surf/offline/scripts/convNc2Grb.py . ${grib_copy} -w shortName=skt ${run_dir}/osm/init/osm_init.grb grib_template.grb cat > pyrun.py << EOFPY from convNc2Grb import * pp_gg=['39.128','40.128','41.128','42.128','139.128','170.128','183.128','236.128','235.128','238.128','141.128','33.128','32.128','198.128'] pp_d2m=['167.128','168.128'] pp_sus=['243.128','66.128','67.128','27.128','28.128','29.128','30.128'] pp_efl=['176.128','177.128','147.128','146.128','169.128','175.128'] pp_wat=['182.128','205.128','144.128','45.128','228.128','8.128','9.128'] pp_cld=['260038','3066'] pp_eva=['228100','228101','228102','44.128'] pp=pp_gg+pp_d2m+pp_sus+pp_efl+pp_wat # TODO add a flag for this, when LPROD2=.TRUE. in osm namelist #pp=pp+pp_cld+pp_eva print(str(pp)) write2grb(CFSURF="${run_dir}/osm/init/surfclim", #write2grb(CFSURF="osm/init/surfclim", CFTEMP="grib_template.grb", CFBASE="${run_dir}/${DATADIR}/", # CFBASE="./", # CFBASE="$DATADIR", ZMISS=9999, FCTYPE='AN', FGOUT='out.grb', # FGOUT='$DATADIR/out.grb', PARAMS=pp, LACUM=False,LRESET=-1,DATE0=None,TIME0=0) EOFPY ( configure_python ; python -u pyrun.py) ## split grib1 from grib2 (otherwise cdo gets a bit confused) and order by date/time ${grib_copy} -B "date:i asc, time asc, paramId:i asc" out.grb 'out.grb[editionNumber]' rm -f out.grb convNc2Grb.p* pyrun.py grib_template.grb cp out.grb? ${run_dir}/$DATADIR fi ## Now split the files per monthly chuncks as the IFS output mdate=$leg_start_date while [[ $mdate != $leg_end_date ]]; do d0=$(date -u -d "$mdate + $ofreqh hours" +"%Y-%m-%dT%H:%M:%S") d1=$(date -u -d "$mdate + 1 month" +"%Y-%m-%dT%H:%M:%S") dtag=$(date -u -d "$mdate" +"%Y%m") FGOUT=ICMGG${exp_name}+$dtag # skip this file if already generated if [ ! -f ${run_dir}/$DATADIR/$FGOUT ] then # grb2 output disabled, to enable set LWRWAT=.TRUE. in osm namelist for ff in grb1 #grb2 do #( [ -f out.$ff ] && cdo seldate,$d0,$d1 out.$ff ICMGG${exp_name}+$dtag.$ff || echo "file out.$ff not found!" [ -f ICMGG${exp_name}+$dtag.$ff ] && cdo timmean ICMGG${exp_name}+$dtag.$ff MEAN_ICMGG${exp_name}+$dtag.$ff #) & done #wait mv ICMGG${exp_name}+$dtag.grb1 $FGOUT ## add depthBelowLandLayer to soil moisture/temp to conform to IFS standard (needed for lpjg_forcing) ${grib_set} -w paramId=39/139 -s indicatorOfTypeOfLevel=112,topLevel=0,bottomLevel=7 $FGOUT temp ; mv temp $FGOUT ${grib_set} -w paramId=40/170 -s indicatorOfTypeOfLevel=112,topLevel=7,bottomLevel=28 $FGOUT temp ; mv temp $FGOUT ${grib_set} -w paramId=41/183 -s indicatorOfTypeOfLevel=112,topLevel=28,bottomLevel=100 $FGOUT temp ; mv temp $FGOUT ${grib_set} -w paramId=42/236 -s indicatorOfTypeOfLevel=112,topLevel=100,bottomLevel=missing $FGOUT temp ; mv temp $FGOUT #mv MEAN_ICMGG${exp_name}+$dtag.grb1 MEAN_ICMGG${exp_name}+$dtag gzip -c MEAN_ICMGG${exp_name}+$dtag.grb1 > MML_${exp_name}_3h_GG_$dtag.grb.gz rm -f MEAN_ICMGG${exp_name}+$dtag.grb1 ## move files from TMPDIR to outdir - comment this if running in ${run_dir}/$DATADIR mv ${FGOUT}* MML_${exp_name}_3h_GG_$dtag.grb.gz ${run_dir}/$DATADIR fi mdate=$(date -uR -d "$mdate + 1 month") done rm -f out.grb? ${run_dir}/$DATADIR/out.grb? ## compress netcdf output to netcdf 4 ## these netcdf files are not much important, could be removed at some point #for ff in $( ls *.nc ) #do # ( # #nccopy -4 -d 6 $ff 1_$ff # nccopy -k 4 -d 6 $ff 1_$ff # mv 1_$ff $ff # ) & #done #wait local t2=$(date +%s) local tr=$(date -d "0 -$t1 sec + $t2 sec" +%T) echo "Elapsed: $tr" cd - } # save surface fields for an era-land like reanalysis function osm_post_land_param() { DATADIR=$1 # location where the output was saved OUTDIR=$2 # location where to put era-land files land_param="39.128/40.128/41.128/42.128/139.128/170.128/183.128/236.128/235.128/238.128/141.128/33.128/32.128" run_start_date_yyyymm=$(date -u -d "${run_start_date}" +%Y%m) mkdir -p $OUTDIR #cd $OUTDIR mkdir -p $TMPDIR/$$/osm/land_param cd $TMPDIR/$$/osm/land_param #rm -f land_param_${exp_name}_* # save land fields for 1st day of each month for f in ${run_dir}/$DATADIR/ICMGG${exp_name}+?????? do # hack to get first timestep of the run which is at 0300h instead of 0000h # remove this if the OSM post script is fixed to output first timestep fname=`basename $f` if [ ${fname: -6:6} == $run_start_date_yyyymm ] then ${grib_copy} -w param=${land_param},dataTime=0300,day=1 $f land_param_${exp_name}_[dataDate]00 tmpfile=land_param_${exp_name}_${run_start_date_yyyymm}0100 if [ -f $tmpfile ] then ${grib_set} -s dataTime=0000 $tmpfile tmp1 mv tmp1 $tmpfile fi fi ${grib_copy} -w param=${land_param},dataTime=0000,day=1 $f land_param_${exp_name}_[dataDate]00 done for f in land_param_${exp_name}_?????????? ; do gzip -c $f > ${OUTDIR}/$f.gz ; rm $f ; done } # Save icmcl files containing vegetation state # These files are similar to the "era20c vegetation from an off-line LPJ-Guess run forced with ERA20C" # files used in the EC-Earth runscripts # This is only only useful for LPJG runs, in which vegetation is transient # This function can also be used with IFS output, with some minor changes function osm_post_icmcl() { DATADIR=$1 # location where the output was saved cd ${run_dir}/$DATADIR vars="var66,var67,var27,var28,var29,var30" vars_grib="66/67/27/28/29/30" vars_mean="var66,var67,var27,var28" vars_type="var29,var30" for (( year=leg_start_date_yyyy ; year<=leg_end_date_yyyy_full ; year+=1 )) do # process each monthly file for month in `seq -w 1 12` do prefix=ICMGG${exp_name} rm -f icmcl_${year}${month}.grb icmcl_${year}${month}_{mean,type}.grb tmp.grb ${grib_copy} -w paramId=${vars_grib},dataTime=0000 ${prefix}+${year}${month} tmp.grb cdo -O settaxis,${year}-${month}-15,00:00:00 -timmean -selvar,${vars_mean} tmp.grb icmcl_${year}${month}_mean.grb cdo -O seldate,${year}-${month}-15T00:00:00 -selvar,${vars_type} tmp.grb icmcl_${year}${month}_type.grb cdo -O merge icmcl_${year}${month}_*.grb icmcl_${year}${month}.grb rm -f icmcl_${year}${month}_{mean,type}.grb tmp.grb done # merge all monthly files into one yearly file cdo -O mergetime icmcl_${year}??.grb icmcl_${year}.grb rm -f icmcl_${year}??.grb # re-order them in the same order as era20c files cdo -O splitname icmcl_${year}.grb icmcl_${year}_ ifiles="" IFS="," for v in $vars ; do ifiles+=" icmcl_${year}_${v}.grb " ; done unset IFS cdo -O merge $ifiles icmcl_${year}.grb # fixes for IFS (set missing data to 0, fix wrong metadata) cdo setmisstoc,0. icmcl_${year}.grb tmp.grb ${grib_set} -s gridType=reduced_gg,timeRangeIndicator=10 tmp.grb icmcl_${year}.grb rm -f icmcl_${year}_* tmp.grb done } function osm_post_gen_script() { DATADIR=$1 # location where the output was saved LEG=$2 has_config osm:post_all && do_post_all=true || do_post_all=false has_config osm:post_spinup && do_post_spinup=true || do_post_spinup=false cat > ${exp_name}_osm_post_${LEG}.sh << EOF #!/usr/bin/env bash set -ex # declare essential variables exp_name="$exp_name" member="${member:-}" start_dir="$start_dir" out_dir="${DATADIR}" run_start_date="$run_start_date" run_start_date_as="${run_start_date_as:-}" leg_start_date="$leg_start_date" leg_end_date="$leg_end_date" leg_start_date_yyyy="$leg_start_date_yyyy" leg_end_date_yyyy_full="$leg_end_date_yyyy_full" ifs_grid=$ifs_grid ifs_output_freq="$ifs_output_freq" ifs_time_step_sec="$ifs_time_step_sec" ifs_version="$ifs_version" # setup environment source $start_dir/ecconf.cfg source $start_dir/librunscript.sh source $start_dir/liblsm.sh configure [ -z \$TMPDIR ] && error "you must define TMPDIR for your platform in configure()!!!" # configure_python # NSC-Tetralith: Uncomment to get the correct GRIB_BIN_PATH for grib_set and grib_copy grib_copy=${GRIB_BIN_PATH}${GRIB_BIN_PATH:+/}grib_copy grib_set=${GRIB_BIN_PATH}${GRIB_BIN_PATH:+/}grib_set #configure_python set -u #override some defaults set by configure() in ecconf.cfg run_dir="$run_dir" land_param_dir=\${run_dir}/output/osm/land_param lpjg_ifs_output_dir="\$run_dir/\$out_dir" lpjg_forcing_dir="\${run_dir}/output/osm/lpjg_forcing" osm_forcing_type="${osm_forcing_type}" if ${do_post_all} then # do osm+land_param+lpjg_forcing+icmcl post-processing #config=lpjg_forcing osm_post_all \$out_dir \${land_param_dir} \${lpjg_forcing_dir} else # do basic OSM output post-processing, which generates grib output files) osm_post_output \$out_dir fi if ${do_post_spinup} then # run lpjg_gen_forcing_spinup to get lpjg_forcing yearly files and lpjg fast files for spinup config_bak="\${config:-}" config="lpjg:spinup" lpjg_gen_forcing_spinup config="\${config_bak}" fi EOF # # To run sequentially just do: # chmod +x ${exp_name}_osm_post_${LEG}.sh # ./${exp_name}_osm_post_${LEG}.sh ## To submit a job : platform dependent... #sbatch -n 1 -o ${start_dir}/${exp_name}_osm_post_${LEG}.out -p np16 ${exp_name}_osm_post_${LEG}.sh #sbatch -n 6 -o ${start_dir}/${exp_name}_osm_post_${LEG}.out -q debug ${exp_name}_osm_post_${LEG}.sh } function lpjg_gen_forcing1() { # generic script to create/prepare forcing # IFS/OSM forcing is always implied when usign lpjf_forcing has_config lpjg_forcing && lpjg_forcing_type='ifs' || error "only IFS forcing supported for LPJG-offline" FYEAR=$1 # year to process forcing # ${lpjg_forcing_dir} : must be defined as where the forcing is present or created # ${lpjg_ifs_output_dir} : must be defined, in case of forcing from IFS, location of raw output if $(has_config lpjg_forcing:gen_forcing) || ! [ -r ${lpjg_forcing_dir}/var170_${FYEAR}_dayavg.nc ] then # file not available, will need to be created if [ $lpjg_forcing_type = 'ifs' ] then echo "generating LPJG forcing in ${lpjg_forcing_dir} from data in ${lpjg_ifs_output_dir}" # forcing from an IFS experiment lpjg_gen_forcing_ifs $FYEAR else error "only IFS forcing supported for LPJG-offline" fi fi # end of forcing processing } function lpjg_gen_forcing() { ## generic script to create/prepare forcing FYEAR1=$1 # first year to process forcing FYEAR2=$2 # last year to process forcing ## generate forcing for each year for (( yr=$FYEAR1 ; yr<=$FYEAR2 ; yr+=1 )) do lpjg_gen_forcing1 $yr done } function lpjg_gen_forcing_ifs() { # set variables required by extract_daily_forcing_ifs y1=$1 y2=$1 y0=$(date -u -d "$run_start_date" +%Y) case "${ifs_grid}" in T159L*) npts=35718 ;; T255L*) npts=88838 ;; *) error "Can't set npts for unknown horizontal grid: ${ifs_grid}" ;; esac srcdir=$lpjg_ifs_output_dir #srcext=${lpjg_ifs_output_ext:-} outdir=$lpjg_forcing_dir tmpdir=$TMPDIR/$$/lpjg/lpjg_gen_forcing_ifs #tmpdir=${lpjg_forcing_dir}/tmp grib_filter=${GRIB_BIN_PATH}${GRIB_BIN_PATH:+/}grib_filter extract_daily_forcing_ifs } # This script creates netcdf files used by sparring to drive LPJG offline from raw IFS output. # # Created By Klaus Wyser, some modifications by Etienne Tourigny # ============================================================================== # set these variables before calling extract_daily_forcing_ifs() #expname=a0l3; npts=88838 #expname=AHIL; npts=35718 #y1=1990 ; y2=1990 ; y0=1990 #srcdir=/gpfs/scratch/bsc32/bsc32051/lpjg/a0l3/in/output/ifs #outdir=/gpfs/scratch/bsc32/bsc32051/lpjg/a0l3/out #tmpdir=$SNIC_TMP #grib_filter=grib_filter # ============================================================================== function extract_daily_forcing_ifs() { mkdir -p $outdir mkdir -p $tmpdir pushd . cd $tmpdir # create grib_filter script # add 169 (ssrd) and 175 (strd) for surface downward radiation, LPJG currently uses surface net radiation # soil moisture (39 40 41 42) and soil temp levels 1 and 4 (139 236) are currently not used by LPJG # total precipitation tp (228) is output by OSM, instead of lsp and cp (142 and 143) # leave out 33 (snow density) and 141 (snow depth) since they are not used by LPJG anyway # removed 144 (sf) since snowfall is included in cp and lsp sstring= for c in 39 40 41 42 139 167 170 183 236; do sstring+=" || marsParam is \"$c.128\"" done sstring=${sstring:4:${#sstring}} fstring= for c in 142 143 228 176 177; do fstring+=" || marsParam is \"$c.128\"" done fstring=${fstring:4:${#fstring}} cat > gf << EOT if ( $sstring ) { write "x_state" ; } if ( $fstring ) { write "x_flux" ; } EOT # first timestep # for decadal runs, we start LPJG on Jan 1st but IFS output is available before that, # so we always use the output of the last timestep of the previous month if it exists # first look for .grb file, if not found look for file without extension ff0=$(find $srcdir -name "ICMGG????+$(( y1 -1 ))12.grb") [[ "$ff0" == "" ]] && ff0=$(find $srcdir -name "ICMGG????+$(( y1 -1 ))12") [[ "$ff0" == "" ]] && ff0=$(find $srcdir -name "ICMGG????+000000.grb") [[ "$ff0" == "" ]] && ff0=$(find $srcdir -name "ICMGG????+000000") ff0=( $ff0 ) #[[ "$ff0" == "" ]] && error "Cannot find forcing for year $y1!" ym=$((y1-1))12 # if first timestep is not found, continue (this happens with OSM output) #if [[ "$ff0" == "" ]] ; then if [ ${#ff0[@]} -eq 0 ] ; then echo "Cannot find first timestep of forcing for year $y1!" else # exit if found multiple files [ ${#ff0[@]} -gt 1 ] && error "Found ${#ff0[@]} files for first timestep of forcing for year $y1!" ${grib_filter} gf $ff0 mv x_state x_state_$ym #cdo shifttime,"-6hour" x_flux x_flux_$ym mv x_flux x_flux_$ym rm -f x_flux fi for y in $(seq $y1 $y2); do for m in $(seq 1 12) ; do ym=$((100*y+m)) mkdir -p $tmpdir/data_$ym cd $tmpdir/data_$ym #${grib_filter} ../gf $srcdir/output/ifs/$(printf %03d $((y-$((y0-1)))))/ICMGG????+$ym ff0=$(find $srcdir -name "ICMGG????+${ym}.grb") [[ "$ff0" == "" ]] && ff0=$(find $srcdir -name "ICMGG????+${ym}") ff0=( $ff0 ) [ ${#ff0[@]} -eq 0 ] && error "Cannot find forcing for $ym!" # exit if found multiple files [ ${#ff0[@]} -gt 1 ] && error "Found ${#ff0[@]} files for forcing for $ym - ${ff0[@]}" ${grib_filter} ../gf $ff0 mv x_state ../x_state_$ym #cdo shifttime,"-6hour" x_flux ../x_flux_$ym mv x_flux ../x_flux_$ym cd - rm -r $tmpdir/data_$ym done done # compute post-processing timestep, used to compute daily fluxes # TODO check cmip6 output with 3h and 6h output pptime=$(cdo showtime -seltimestep,1,2 x_flux_$ym | \ tr -s ' ' ':' | awk -F: '{print ($5-$2)*3600+($6-$3)*60+($7-$4)}' ) #pptimeh=$(bc -l <<< "$pptime / 3600") for y in $(seq $y1 $y2); do #cdo cat x_state_$((y-1))12 x_state_$y* y_state_$y #[ ! -f x_state_$((y-1))12 ] && cdo settime,00:00:00 -seltimestep,1 x_state_${y}01 x_state_$((y-1))12 if [ -f x_state_$((y-1))12 ] then echo "File x_state_$((y-1))12 exists" else echo "Creating file x_state_$((y-1))12 from file x_state_${y}01 ...." cdo -seltimestep,1 x_state_${y}01 test cdo settime,00:00:00 test x_state_$((y-1))12 rm -f test fi cdo cat x_state_$((y-1))12 x_state_$y* y_state_$y cdo splitcode -daymean -selyear,$y y_state_$y z_${y}_ cdo cat x_flux_$y* y_flux_$y #cdo splitcode -daymean -selyear,$y y_flux_$y* z_${y}_ cdo splitcode -daymean -selyear,$y -shifttime,"-${pptime}seconds" y_flux_$y* z_${y}_ done for f in $(/bin/ls z_*grb ); do fnew=$(basename $f .grb | cut -d _ -f 3 ) year=$(basename $f .grb | cut -d _ -f 2 ) # set missval to reasonable values, following ifs-36r4/src/surf/offline/driver/cnt31s.F90 case ${fnew} in "039" | "040" | "041" | "042" ) missval="0.2" ;; # SoilMVeg "139" | "170" | "183" | "236" ) missval="280" ;; # SoilTVeg "167" ) missval="280" ;; # T2MVeg "142" | "143" | "228" | "144" ) missval="0" ;; # TPVeg "176" | "177" ) missval="0" ;; # SSRVeg SLRVeg "033" ) missval="330" ;; # SDensVeg "141" ) missval="0" ;; # SDVeg * ) missval="" ; ;; esac [ $missval != "" ] && setmissval="-setmissval,$missval" || setmissval="" # convert fluxes to daily values as sent from IFS to LPJG case ${fnew} in # convert timestep accumulated J m-2 to daily W m-2 # first multiply daily average of timestep accumulated (z_) by #timesteps in day, # then divide by sec/day to get W m-2 per day -> (86400/pptime)/86400 = 1/pptime "176" | "177" ) convert="-divc,${pptime}" ;; # SSRVeg SLRVeg # precip - convert m to kg m-2 s-1 "142" | "143" | "228" | "144" ) convert="-mulc,"$( bc -l <<< "1000.0 / $pptime" ) ;; # TPVeg * ) convert="" ; ;; esac cdo -f nc4c -z zip_2 setgrid,g${npts}x1 $convert $setmissval $f $outdir/var${fnew}_${year}_dayavg.nc done # cleanup rm -f gf x_state_* x_flux_* y_state_* y_flux_* z_*grb popd } function lpjg_gen_forcing_spinup() { if ! $(has_config lpjg:spinup) then echo "not doing anything, since lpjg:spinup is not in config" return fi y0=$(date -u -d "${run_start_date}" +%Y) y1=$(( y0 + 9 )) fileyears=${y0}-${y1} vars_depth="39 40 41 42 170 183" vars_nodepth="167 176 177 228" if [ -r ${lpjg_forcing_dir}/var170_${fileyears}_fast.nc ] then echo "skipping lpjg_gen_forcing_spinup() since forcings have been found in {lpjg_forcing_dir}" return fi # first make sure we have all yearly forcings local config_bak="${config}" config=lpjg_forcing lpjg_gen_forcing $y0 $y1 config="${config_bak}" cd $lpjg_forcing_dir # concatenate all files for var in ${vars_depth} ${vars_nodepth} do var0=$(printf %03d $var) cdo_str="" #[ -f var${var}_$fileyears.nc ] && continue for y in `seq $y0 $y1` do cdo_str+=var${var0}_${y}_dayavg.nc" " done cdo -O -f nc4c -z zip_2 mergetime $cdo_str var${var}_$fileyears.nc done # nco module is needed for ncpdq # adjust this for your platform, this works on marenostrum4 module load gsl/2.4 udunits/2.2.25 nco/4.2.3_netcdf-4.2 # convert to "fast" format for var in ${vars_depth} do #[ ! -f var$var"_"$fileyears"_fast.nc" ] && ncpdq -O -L 2 -a x,time,y,depth var$var"_"$fileyears".nc" var$var"_"$fileyears"_fast.nc" ncpdq -O -L 2 -a x,time,y,depth var$var"_"$fileyears".nc" var$var"_"$fileyears"_fast.nc" #ncpdq -O -a x,time,y,depth $var"_"$fileyears".nc" $var"_"$fileyears"_fast.nc" done for var in ${vars_nodepth} do ncpdq -O -L 2 -a x,time,y var$var"_"$fileyears".nc" var$var"_"$fileyears"_fast.nc" done cd - }