Browse Source

Lemaitre3 adaptations

Pierre-Yves Barriat 3 years ago
parent
commit
b7e7054f1e
39 changed files with 2200 additions and 456 deletions
  1. 6 0
      .gitignore
  2. 202 1
      README.md
  3. 7 13
      barakuda.sh
  4. 19 19
      build_clim.sh
  5. 14 0
      cdftools_light/macro/macro.easybuild
  6. 17 0
      cdftools_light/macro/macro.gfortran_datarmor
  7. 14 0
      cdftools_light/make.macro
  8. 5 1
      cdftools_light/src/cdficediags.f90
  9. 235 0
      configs/config_NANUK025_L75_datarmor.sh
  10. 239 0
      configs/config_ORCA1_L75_NEMO4_lm3.sh
  11. 238 0
      configs/config_ORCA1_L75_lemaitre3.sh
  12. 238 0
      configs/config_ORCA1_L75_lemaitre3_ec.sh
  13. BIN
      data/mesh_mask_nemo.N3.6_ORCA1L75.nc
  14. 57 40
      python/exec/convert_ps_to_SA.py
  15. 32 14
      python/exec/convert_pt_to_CT.py
  16. 5 1
      python/exec/cross_sections.py
  17. 154 0
      python/exec/field_to_mask.py
  18. 35 21
      python/exec/image_to_netcdf.py
  19. 5 1
      python/exec/mean_3d.py
  20. 1 1
      python/exec/movie_nemo_globe.py
  21. 384 0
      python/exec/movie_nemo_section.py
  22. 9 9
      python/exec/netcdf_to_image_bw.py
  23. 1 0
      python/exec/prepare_movies.py
  24. 3 3
      python/exec/show_global_orca_field.py
  25. 4 1
      python/exec/temp_sal.py
  26. 4 4
      python/exec/zgr2_slider.py
  27. 20 12
      python/modules/barakuda_colmap.py
  28. 80 53
      python/modules/barakuda_ncio.py
  29. 12 12
      python/modules/barakuda_plot.py
  30. 1 1
      python/modules/barakuda_plot_extra.py
  31. 40 207
      python/modules/barakuda_thermo.py
  32. 43 11
      python/modules/barakuda_tool.py
  33. 2 2
      python/test_a_python_diag.sh
  34. 40 0
      rebuild.sh
  35. BIN
      seaice_diags.nc
  36. 32 27
      src/bash/bash_functions.bash
  37. 1 1
      src/html/conf_end.html
  38. 1 1
      src/html/conf_start.html
  39. BIN
      tmp_ice.nc

+ 6 - 0
.gitignore

@@ -0,0 +1,6 @@
+*.pyc
+*.o
+*.mod
+*.log
+*.err
+cdftools_light/bin/

+ 202 - 1
README.md

@@ -1,3 +1,204 @@
 # barakuda
 
-Fork of: https://github.com/brodeau/barakuda
+Fork of:
+
+![Barakuda Logo](https://brodeau.github.io/barakuda/logo.svg)
+
+Example of a set of web pages generated by Barakuda:
+https://brodeau.github.io/barakuda/example/
+
+## Getting-started with Barakuda
+
+### Requirements
+
+* A FORTRAN 90 compiler
+
+* netcdf library with support for the appropriate F90 compiler
+
+* NCO
+
+* 'convert' from ImageMagick if you want to generate GIF movies
+
+* 'ffmpeg' with x264 support if you want to generate MP4 movies
+
+* For time-series and 2D plots, the following up-to-date packages:
+  => python-netcdf4 (from netCDF4 import Dataset) and Matplotlib
+  => for map projections you'll also need the Basemap package
+  
+  A good idea is to install a shiny python distribution, something like Canopy:
+  => https://www.enthought.com/products/canopy/
+
+  In any case, specify the appropriate "PYTHON_HOME" environment variable in
+  your ${BARAKUDA_ROOT}/configs/config_<MYCONF>.sh or ./config_<MYCONF>.sh file
+
+* NEMO output data! => A directory containing the MONTHLY-AVERAGED, global
+                       (rebuilt), NEMO output to analyze
+  (grid_T, grid_U, grid_V and icemod files) as "*.nc", "*.nc.gz" or ".nc4"
+
+  For NEMO 3.6 and above some appropriate sets of xml configuration files for
+  XIOS2 can be found in: src/xios2_xml/
+
+* a NEMO mesh_mask file and the the corresponding basin_mask (ocean basins).
+  (variables MM_FILE and BM_FILE into the config_<MYCONF>.sh file you use).
+  
+  To create the NEMO mesh_mask.nc just launch the relevant NEMO experiment with the
+  namelist parameter nn_msh set to 1 !
+
+  For both ORCA1 and ORCA025 configs (regardless of the number of levels) it is
+  safe to use the basin_mask.nc provided in the "data/" sub-directory of Barakuda.
+  
+  If you want to create your own basin_mask.nc containing your favorite
+  seas/regions, proceed as follow:
+
+  1. use the "python/exec/orca_mesh_mask_to_bitmap.py" python script to create a
+  black-and-white bitmap image of the land-sea-mask from the mesh_mask.nc
+  generated by NEMO.
+
+  2. use your favorite rater image editor (Gimp, PhotoShop, Paint, etc) to
+  easily edit the bitmap image and create a new image of your sea/region of
+  interest. Save it as a "tiff" image!
+
+  3. then you can use "python/exec/tiff_to_orca_mask.py" python script to
+  generate the new basin_mask.nc netcdf file out of your tiff images!
+
+
+
+### I / Compile CDFTOOLS executables 
+
+ * CDFTOOLS is a set of FORTRAN executables intended to perform a multitude of
+   ocean diagnostics based on NEMO output
+   (https://github.com/meom-group/CDFTOOLS). However, this is a slightly
+   modified light version here...  SO DO NOT USE AN OFFICIAL CDFTOOLS
+   DISTRIBUTION, stick to the one that comes with Barakuda!
+
+* move to the 'barakuda/cdftools_light' directory
+
+* configure your own 'make.macro' for your system (some templates for gfortran
+  and Intel are provided...)
+    => just copy or link your own "macro.your_arch" to "make.macro" !
+    => F90 compiler and related netcdf library to use
+
+* compile with 'gmake'
+
+* if that was successful the 'barakuda/bin' directory should contain the 8
+  following executables:
+
+        * cdficediags.x
+        * cdfmaxmoc.x
+        * cdfmhst.x
+        * cdfmoc.x
+        * cdfpsi.x
+        * cdfsigtrp.x
+        * cdficeflux.x
+        * cdftransportiz.x
+        * cdfvT.x
+
+           
+
+### II / Create and configure your own "config_<MY_CONF>.sh"
+
+All setup related to your host, simulation, location of third party files is
+defined in the "config_<MY_CONF>.sh" file.
+
+You can either used to chose a config file located in the
+"${BARAKUDA_ROOT}/configs" directory of Barakuda:
+('${BARAKUDA_ROOT}/configs/config_<MY_CONF>.sh')
+
+Or, in case you have no write access into ${BARAKUDA_ROOT}/ and call the Barakuda
+suite of scripts from another location, hereafter "work directory", you can use
+a "config_<MY_CONF>.sh" present in the "work directory".
+
+Note: if a given "config_<MY_CONF>.sh" exists both in "${BARAKUDA_ROOT}/configs"
+and the "work directory", Barakuda will always refer to "config_<MY_CONF>.sh"
+present in the "work directory".
+
+IMPORTANT: Always refer to the most relevant
+'${BARAKUDA_ROOT}/configs/config_*_TEMPLATE.sh' file to design or re-adjust
+yours! These are symbolic links pointing to the last officially supported and
+most up-to-date config files.  It should be sufficiently well commented for you
+to be able to adjust your own config file.
+
+MY_CONF should always be of the form: "(e)ORCA<RES>_L<NLEV>_<blabla>.sh"
+        ( with NLEV being the number of z levels )
+
+NEMO output files must be monthly averages and of the following form:
+
+        <EXP NAME>_1m_<YEAR>0101_<YEAR>1231_<GRID_TYPE>.nc(.gz)
+
+        (GRID_TYPE=grid_T/grid_U/grid_V/icemod) 
+
+Gzipped or not!
+
+All files for all years must all be saved in the same directory (see
+NEMO_OUT_STRCT in the config file). Better if this directory only contains NEMO
+output files and nothing else!
+
+Alternatively NEMO files can be saved/organized in sub-directories a la
+EC-Earth: (ex: year 1995 of experiment started in 1990 is the 6th year so files for
+1995 are saved into sub-directory (of NEMO_OUT_STRCT) "006" (set 'ece_exp' to 1
+or 2 then).
+
+If you want to perform the "climatology" plots (see section IV) you will need
+monthly "observed" 2D and 3D of T and S (and sea-ice fraction) data interpolated
+on the ORCA grid you are using. Usually you should already have them since they
+are needed to initialize your simulation (initial state for T & S). These are
+the following files in your Barakuda config file: F_T_OBS_3D_12, F_S_OBS_3D_12,
+F_SST_OBS_12, F_ICE_OBS_12.
+
+Alternatively, you can download these climatologies for the ORCA1.L75 and
+ORCA025.L75 configurations here:
+http://misu228.misu.su.se/barakuda/input_barakuda/ORCA1.L75_barakuda.tar
+http://misu228.misu.su.se/barakuda/input_barakuda/ORCA025.L75_barakuda.tar
+
+The "CONF_INI_DIR" variable in your config file should point to the location of
+the directory you created by untaring one of these tar archives.
+
+
+
+### III) Create diagnostics
+
+
+Launch "barakuda.sh"
+
+       ./barakuda.sh -C <MY_CONF> -R <EXP> -f <years> -y <YYYY>
+
+       (ex: ./barakuda.sh -C ORCA1_L75_v36_triolith -R SL36C00)
+
+Use the -h switch to see available options.
+
+
+
+### IV) Create figures and browsable HTML page
+
+* Once the previous job has finished to run, launch
+
+To only generate time-series plots use the "-e" switch:
+
+        ./barakuda.sh -C <MY_CONF> -R <EXP> -e
+
+        (ex: ./barakuda.sh -C ORCA1_L75_v36_triolith -R SL36C00 -e)
+
+To generate time-series + 2D climatology plots use the "-E" switch, provided you
+have built the monthly/annual climatology (based on N years of your simulation)
+out of your experiment with the "build_clim.sh" script (see next bullet point):
+     
+        ./barakuda.sh -C <MY_CONF> -R <EXP> -E
+
+* To be able to create the "climatology" plots (maps, sections, etc, based on a monthly climatology of a few years) you will have to
+
+  1. create the climatology with the "build_clim.sh" script:
+
+        ./build_clim.sh -C <MY_CONF> -R <EXP> -i <first_year> -e <last_year>
+
+    Use the -h switch to see available options.
+      
+  2. the you can tell "barakuda.sh" to create climatology-related plots by using
+     the "-E" switch instead of "-e" (see point V/A)
+
+
+* To compare time-series between at least 2 (already diagnosed) experiments:
+   
+         ./compare_time-series.sh -C <MY_CONF> -R <EXP1>,<EXP2>,...,<EXPn>
+
+         (ex: ./compare_time-series.sh -C ORCA1_L75_v36_triolith -R SL36C00,SL36EIE )
+

+ 7 - 13
barakuda.sh

@@ -57,16 +57,12 @@ else
 fi
 echo
 
-
-
-
 # If auto-submit experiment (ece_exp=10) then overides a few functions with:
 if [ ${ece_exp} -ge 10 ]; then
     echo "Sourcing ${BARAKUDA_ROOT}/src/bash/bash_functions_autosub.bash"
     . ${BARAKUDA_ROOT}/src/bash/bash_functions_autosub.bash
 fi
 
-
 # If 3D fieds are annual averaged then overides a few functions with:
 if [ ! "${ANNUAL_3D}" = "" ]; then
     . ${BARAKUDA_ROOT}/src/bash/bash_functions_1y.bash
@@ -90,7 +86,6 @@ echo "   *** IFREQ_SAV_YEARS = ${IFREQ_SAV_YEARS} "
 echo "   *** NCDF_DIR        = ${NCDF_DIR} "
 echo
 
-
 if [ ${ISTAGE} -eq 1 ]; then
     barakuda_first_last_years ; # look at NEMO files to know what are first and last years available...
     echo ${IFREQ_SAV_YEARS} > ${DIAG_D}/numb_year_per_file.info
@@ -104,13 +99,12 @@ cyear_ini=`printf "%04d" ${YEAR_INI}`
 cyear_end=`printf "%04d" ${YEAR_END}`
 
 # For proper python executables and scripts to be found:
-export PATH=${PYBRKD_EXEC_PATH}:${BARAKUDA_ROOT}/src/bash:${PYTHON_HOME}/bin:${PATH}
+#export PATH=${PYBRKD_EXEC_PATH}:${BARAKUDA_ROOT}/src/bash:${PYTHON_HOME}/bin:${PATH}
+export PATH=${PYBRKD_EXEC_PATH}:${BARAKUDA_ROOT}/src/bash:${PATH}
 
 #                                   setup over
 ######################################################################################
 
-
-
 jyear=${YEAR_INI}
 
 fcompletion=${DIAG_D}/last_year_done.info
@@ -139,12 +133,12 @@ while ${lcontinue}; do
 
     export cyear=`printf "%04d" ${jyear}`
     cpf=""
-    if [ ${ISTAGE} -eq 1 ] && [ ${ece_exp} -gt 0 ]; then
+    #if [ ${ISTAGE} -eq 1 ] && [ ${ece_exp} -gt 0 ]; then
         iy=$((${jyear}-${YEAR_INI}+1+${YEAR_INI}-${YEAR_INI_F}))
         dir_ece=`printf "%03d" ${iy}`
         echo " *** ${cyear} => dir_ece = ${dir_ece}"
         cpf="${dir_ece}/"
-    fi
+    #fi
 
     i_get_file=0
     if [ $((${jyear}%${IFREQ_SAV_YEARS})) -eq 0 ]; then
@@ -211,7 +205,7 @@ while ${lcontinue}; do
         if [ ${ece_exp} -eq 2 ] && [ ${NBL} -eq 75 ] && [ ${i_do_ifs_flx} -eq 1 ]; then
             echo; echo; echo "Fluxes of freshwater at the surface from IFS..."
             echo " *** CALLING: extract_ifs_surf_fluxes.sh &"
-            extract_ifs_surf_fluxes.sh &
+            ${BARAKUDA_ROOT}/src/bash/misc_ifs/extract_ifs_surf_fluxes.sh &
             pid_flxl=$! ; echo
         fi
 
@@ -270,7 +264,7 @@ while ${lcontinue}; do
             echo " *** CALLING: prepare_movies.py ${fj1m} ${jyear} ice &"
             prepare_movies.py ${fj1m} ${jyear} ice &
             pid_movi=$! ; echo
-        fi        
+        fi      
 
         # ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
         # Computing time-series of spatially-averaged variables
@@ -969,6 +963,6 @@ else
     echo
 fi
 
-rm -rf ${TMP_DIR} 2>/dev/null ; #debug
+#rm -rf ${TMP_DIR} 2>/dev/null ; #debug
 
 echo

+ 19 - 19
build_clim.sh

@@ -131,33 +131,33 @@ C2EU="nav_lon,nav_lat,depthu"
 C2EV="nav_lon,nav_lat,depthv"
 C2EW="nav_lon,nav_lat,depthw"
 
-GRID_IMP="grid_T"
+GRIDIMP="gridT"
 if [ ${ivt} -eq 1 ] || [ ${ibpsi} -eq 1 ] || [ ${icurl} -eq 1 ]; then
-    GRID_IMP+=" grid_U"
+    GRIDIMP+=" gridU"
 fi
 if [ ${iamoc} -eq 1 ] || [ ${ivt} -eq 1 ] || [ ${ibpsi} -eq 1 ] || [ ${icurl} -eq 1 ]; then
-    GRID_IMP+=" grid_V"
+    GRIDIMP+=" gridV"
 fi
 if [ `contains_string ${FILE_ICE_SUFFIX} ${NEMO_SAVED_FILES}` -eq 1 ]; then
-    GRID_IMP+=" ${FILE_ICE_SUFFIX}"
+    GRIDIMP+=" ${FILE_ICE_SUFFIX}"
 fi
 if [ `contains_string SBC ${NEMO_SAVED_FILES}` -eq 1 ]; then
-    GRID_IMP+=" SBC"
+    GRIDIMP+=" ${FILE_FLX_SUFFIX}"
 fi
-echo; echo " GRID_IMP = ${GRID_IMP}"; echo
+echo; echo " GRIDIMP = ${GRIDIMP}"; echo
 
 
 # Checking what files we have / plan to use:
 if [ -z "${NEMO_SAVED_FILES}" ]; then
-    echo "Please specify which NEMO files are saved (file suffixes, grid_T, ..., icemod) ?"
+    echo "Please specify which NEMO files are saved (file suffixes, gridT, ..., icemod) ?"
     echo " => set the variable NEMO_SAVED_FILES in your config_${CONFIG}.sh file!"; exit
 fi
-VAF=( "grid_T" "grid_U" "grid_V" "icemod" "SBC" )
+VAF=( "gridT" "gridU" "gridV" "${FILE_ICE_SUFFIX}" "${FILE_FLX_SUFFIX}" )
 js=0 ; gimp_new=""
 for sf in ${VAF[*]}; do
     echo "Checking ${sf}..."
     ca=`echo "${NEMO_SAVED_FILES} ${NEMO_SAVED_FILES_3D}" | grep ${sf}`
-    cb=`echo "${GRID_IMP}"         | grep ${sf}`
+    cb=`echo "${GRIDIMP}"         | grep ${sf}`
     if [ "${ca}" = "" ]; then
         if [ "${cb}" != "" ]; then
             echo "PROBLEM! The diags you specified say you need ${sf} files"
@@ -168,8 +168,8 @@ for sf in ${VAF[*]}; do
     fi
     ((js++))
 done
-GRID_IMP=${gimp_new}
-echo; echo "File types to import: ${GRID_IMP}"; echo; echo
+GRIDIMP=${gimp_new}
+echo; echo "File types to import: ${GRIDIMP}"; echo; echo
 
 
 VCM=( "01" "02" "03" "04" "05" "06" "07" "08" "09" "10" "11" "12" )
@@ -221,15 +221,15 @@ while [ ${jyear} -le ${Y2} ]; do
     barakuda_import_files
 
     # Monthly files to work with for current year:
-    ft1m=${CRT1M}_grid_T.nc
-    fu1m=${CRT1M}_grid_U.nc
-    fv1m=${CRT1M}_grid_V.nc
+    ft1m=${CRT1M}_gridT.nc
+    fu1m=${CRT1M}_gridU.nc
+    fv1m=${CRT1M}_gridV.nc
     # Annual files to work with for current year:
     CRT1Y=`echo ${CRT1M} | sed -e s/"_${TSTAMP}_"/"_${ANNUAL_3D}_"/g`
-    ft1y=${CRT1Y}_grid_T.nc
-    fu1y=${CRT1Y}_grid_U.nc
-    fv1y=${CRT1Y}_grid_V.nc
-    fj1y=${CRT1Y}_${FILE_ICE_SUFFIX}.nc ; # can be icemod or grid_T ....
+    ft1y=${CRT1Y}_gridT.nc
+    fu1y=${CRT1Y}_gridU.nc
+    fv1y=${CRT1Y}_gridV.nc
+    fj1y=${CRT1Y}_${FILE_ICE_SUFFIX}.nc ; # can be icemod or gridT ....
     CFG3D=${CRT1M}
     CPREF3D=${CPRMN}
     #
@@ -238,7 +238,7 @@ while [ ${jyear} -le ${Y2} ]; do
     fu3d=${fu1m}
     fv3d=${fv1m}
     if [ "${ANNUAL_3D}" = "1y" ]; then
-        [[ ${NEMO_SAVED_FILES_3D} =~ (^|[[:space:]])"grid_U"($|[[:space:]]) ]] \
+        [[ ${NEMO_SAVED_FILES_3D} =~ (^|[[:space:]])"gridU"($|[[:space:]]) ]] \
             && CPREF3D=${CPRAN}; CFG3D=${CRT1Y}; ft3d=${ft1y}; fu3d=${fu1y}; fv3d=${fv1y} \
             || echo "...default"
         echo ""

+ 14 - 0
cdftools_light/macro/macro.easybuild

@@ -0,0 +1,14 @@
+# Makefile for CDFTOOLS
+# --------------------------------------------------------------
+
+NCDF_DIR=$(EBROOTNETCDF)
+NCDFF_DIR=$(EBROOTNETCDFMINFORTRAN)
+
+NCDF= -I$(NCDF_DIR)/include -I$(NCDFF_DIR)/include -L$(NCDF_DIR)/lib64 -lnetcdf -L$(NCDFF_DIR)/lib -lnetcdff
+
+# -lnetcdff
+
+F90=gfortran
+
+FFLAGS= -O2 -I./mod -J./mod $(NCDF)
+

+ 17 - 0
cdftools_light/macro/macro.gfortran_datarmor

@@ -0,0 +1,17 @@
+# Makefile for CDFTOOLS
+#    $Rev: 173 $
+#    $Date: 2008-03-17 11:42:21 +0100 (Mon, 17 Mar 2008) $
+# --------------------------------------------------------------
+
+NCDF_DIR=/appli/netCDF/netcdf-4.4.1.1__gcc-6.3.0__nop
+
+NCDF= -I$(NCDF_DIR)/include -L$(NCDF_DIR)/lib -lnetcdf -lnetcdff
+
+F90=gfortran
+
+# -fdefault-real-8 -fbounds-check
+
+FFLAGS= -O2 -I./mod -J./mod $(NCDF)
+
+#INSTALL=../bin/
+

+ 14 - 0
cdftools_light/make.macro

@@ -0,0 +1,14 @@
+# Makefile for CDFTOOLS
+# --------------------------------------------------------------
+
+NCDF_DIR=$(EBROOTNETCDF)
+NCDFF_DIR=$(EBROOTNETCDFMINFORTRAN)
+
+NCDF= -I$(NCDF_DIR)/include -I$(NCDFF_DIR)/include -L$(NCDF_DIR)/lib64 -lnetcdf -L$(NCDFF_DIR)/lib -lnetcdff
+
+# -lnetcdff
+
+F90=gfortran
+
+FFLAGS= -O2 -I./mod -J./mod $(NCDF)
+

+ 5 - 1
cdftools_light/src/cdficediags.f90

@@ -55,7 +55,11 @@ PROGRAM cdficediag
   INTEGER :: id_volu_n, id_area_n, id_volu_s, id_area_s
   !! LOLO.
 
-
+  ! Sea-ice extent and volume...
+  !ncks  -A -v siconc NPRIM5_1m_19580101_19581231_icemod.nc -o tmp_ice.nc
+  !ncks  -A -v sivolu NPRIM5_1m_19580101_19581231_icemod.nc -o tmp_ice.nc
+  ! *** CALLING: ./cdficediags.x tmp_ice.nc 1958
+  ! /scratch/ucl/elic/pbarriat/nemo/archive/<EXP>/diag/ORCA1.L75-NPRIM5
 
   ! constants
 

+ 235 - 0
configs/config_NANUK025_L75_datarmor.sh

@@ -0,0 +1,235 @@
+#!/bin/bash
+
+#==========================================================
+#
+#         Configuration file for
+#
+# OCEAN MONITORING for NEMO v3.6 of NANUK
+#
+#        Machine: triolith.nsc.liu.se
+#
+#        L. Brodeau, 2017
+#
+#===========================================================
+
+export CONF=NANUK025 ; # horizontal global ORCA configuration
+export NBL=75           ; # number of levels
+
+export HOST=datarmor2 ; # this has no importance at all, it will just become an "info" on the web-page!
+export MASTERMIND="Laurent B." ; # same here, who's the person who designed/ran this simulation?
+
+export EXTRA_CONF="OPA (NEMO 3.6) -- OASIS -- neXtSIM" ;   #  // same here ...
+
+# Path / directory structure in which to find NEMO output file (you can use
+# <ORCA> and <EXP> as substitute to your ORCA grid and experiment (EXP) name):
+export NEMO_OUT_STRCT="/home3/datawork/lbrodeau/<ORCA>/<ORCA>-<EXP>-S/opa/00000001-00035040"
+
+# Path to root directory where to save the diagnostics (diagnostics for this "CONF"):
+export DIAG_DIR="/home3/scratch/lbrodeau/barakuda"
+
+# Path to directory containing some 2D and 3D climatologies on the relevant ORCA grid:
+export CONF_INI_DIR="/home3/datawork/lbrodeau/NANUK025/NANUK025-I/barakuda"
+
+# Temporary file system (scratch) on which to perform the job you can use <JOB_ID> if scracth depends on JOB ID:
+#export SCRATCH="/scratch/local/<JOB_ID>"
+export SCRATCH="/home3/scratch/lbrodeau/tmp"
+
+export PYTHON_HOME="/appli/anaconda/2.7/envs/intel-python" ; # HOME to python distribution with matplotlib and basemap !
+
+export DIR_NCVIEW_CMAP="${BARAKUDA_ROOT}/src/ncview_colormaps"
+
+# Is it an ec-earth experiment?
+export ece_exp=0 ; # 0 => not an EC-Earth experiment, it's a "pure" ocean-only NEMO experiment done from traditional NEMO setup
+#                  # 1 => it's an OCEAN-ONLY EC-Earth experiment done from a EC-Earth setup
+#                  # 2 => it's a  COUPLED  EC-Earth experiment
+#                  #      Both 1 and 2 imply that NEMO files are stored in something like
+#                  #       ${SOMEWHERE}/<EXP>/output/nemo/<YYY>
+#                  #       where YYY starts from '001' to
+#                  #      If you select '2', make sure 'cdo' is available and working!!!
+#                  # 10 => this experiment controled by AutoSubmit (so NEMO files are tared somerwhere?)
+#
+export Y_INI_EC=1995 ;    # initial year if ece_exp /= 0 !!!
+export M_INI_EC="01" ;    # initial month, only needed if ece_exp >= 10 !!!
+export NCHNKS_Y=1    ;    # number of chunks per year if ece_exp >= 10 (only needed if NCHNKS_Y >= 2 !)
+export TRES_IFS=511  ;    # spectral resolution for IFS, ex: T255 => TRES_IFS=255
+###--- end EC-Earth IFS relate section ---
+
+export ATMO_INFO="IFS T${TRES_IFS}" ; # Name of atmospheric model or forcing used (ex: COREv2, DFS5.2, IFS T255, ect...)
+
+# List of suffix of files that have been saved by NEMO and contain MONTHLY averages:
+export NEMO_SAVED_FILES="gridT gridU gridV icemod flxT"
+
+export TSTAMP="1m"   ; # output time-frequency stamp as in NEMO output files...
+
+# In case 3D fields have been saved on an annual mean basis rather than montly:
+export ANNUAL_3D="" ;   # leave blanck "" if 3D fields are in monthly files...
+export NEMO_SAVED_FILES_3D="" ; #     ''
+
+# How does the nemo files prefix looks like
+# Everything before "<year_related_info>_grid_<X>" or "<year_related_info>_icemod"
+# use <ORCA>, <EXP> and <TSTAMP>=>  Ex: export NEMO_FILE_PREFIX="<ORCA>-<EXP>_<TSTAMP>_"
+export NEMO_FILE_PREFIX="<ORCA>-<EXP>_<TSTAMP>_"
+# => should get rid of TSTAMP actually...
+
+
+####### NEMO => what fields in what files ??? ############
+#       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+#   => depends on the XIOS *.xml setup you used...
+#   => always specify a string for the NN_* variables
+#      USE "X" if the field is not present in your NEMO output
+#
+# State variables and others in grid_T files:
+export NN_SST="tos"
+export NN_SSS="sos"
+export NN_SSH="zos"
+export NN_T="thetao"
+export NN_S="so"
+export NN_MLD="mldr10_1"
+#
+# State variables and others in grid_U files:
+export NN_U="uo"
+export NN_TAUX="tauuo"
+export NN_U_EIV="0" ; # 0 => ignore
+# State variables and others in grid_V files:
+export NN_V="vo"
+export NN_TAUY="tauvo"
+export NN_V_EIV="0" ; # 0 => ignore
+#
+# Sea-ice fields:
+export FILE_ICE_SUFFIX="icemod" ; # in what file type extension to find ice fields
+export NN_ICEF="siconc" ; # name of ice fraction in "FILE_ICE_SUFFIX" file...
+export NN_ICET="sivolu" ; # ice thickness or rather volume...
+export NN_ICEU="sivelu" ; # ice U-velocity
+export NN_ICEV="sivelv" ; # ice V-velocity
+#
+# Surface fluxes:
+export FILE_FLX_SUFFIX="flxT" ; # in what file type extension to find surface fluxes (normally: "SBC")
+####                           # => mind that $FILE_FLX_SUFFIX must be also in NEMO_SAVED_FILES (above)
+#### Note: in fields marked with *+/-* you can use a sum or substraction of variables (no space allowed!)
+####       ex: NN_EMP="evap_ao_cea+subl_ai_cea-precip"
+####           NN_QNET="qsr+qnsol"
+# ++ Surface freswater fluxes:
+export NN_FWF="wfo"       ; # *+/-* name of net freshwater flux (E-P-R) in "FILE_FLX_SUFFIX" file...
+export NN_EMP="evap_ao_cea+subl_ai_cea-precip" ; # *+/-* name of E-P in "FILE_FLX_SUFFIX" file...
+export NN_P="precip"      ; # name of total precipitation (solid+liquid) in "FILE_FLX_SUFFIX" file...
+export NN_RNF="runoffs"   ; # name of continental runoffs in "FILE_FLX_SUFFIX" file...
+export NN_CLV="calving"   ; # calving from icebergs in "FILE_FLX_SUFFIX" file...
+export NN_E="evap_ao_cea+subl_ai_cea" ; # *+/-* name of total evaporation in "FILE_FLX_SUFFIX" file...
+# ++ Surface heat fluxes:
+export NN_QNET="qt_oce"   ; # *+/-* name of total net surface heat flux in "FILE_FLX_SUFFIX" file...
+export NN_QSOL="rsntds"   ; # name of net surface solar flux in "FILE_FLX_SUFFIX" file...
+# ++ Wind-stress module:
+export NN_TAUM="taum"        ; # name of surface wind stress module in "FILE_FLX_SUFFIX" file...
+export NN_WNDM="windsp"      ; # name of surface wind  speed module in "FILE_FLX_SUFFIX" file...
+#
+################################################################################################
+
+# Land-sea mask and basins files:
+export MM_FILE=${CONF_INI_DIR}/mesh_mask_NANUK025_3.6.nc
+export BM_FILE=${MM_FILE}
+
+# OBSERVATIONS / REFERENCES
+# 3D monthly climatologies of potential temperature and salinity (can be those you used for the NEMO experiment):
+export NM_TS_OBS="EN4.2.0 [1990-2010]"
+export F_T_OBS_3D_12=${CONF_INI_DIR}/thetao_EN.4.2.0_ORCA025L75_mclim_1990-2010.nc4
+export F_S_OBS_3D_12=${CONF_INI_DIR}/so_EN.4.2.0_ORCA025L75_mclim_1990-2010.nc4
+export F_SST_OBS_12=${CONF_INI_DIR}/thetao_EN.4.2.0_ORCA025L75_mclim_1990-2010.nc4
+export NN_T_OBS="thetao"
+export NN_S_OBS="so"
+export NN_SST_OBS="thetao"
+#
+# Sea-ice:
+export NM_IC_OBS="Hurrell et al 2008 [1980-1999]"
+export F_ICE_OBS_12=${CONF_INI_DIR}/ice_cover_180x360-CREG025_Hurrell_monthly_mean1980-1999.nc
+export NN_ICEF_OBS="ice_cover"
+#
+# Surface Heat fluxes:
+export NM_QSOL_OBS="NOCS 2.0 [1980-2005]"
+export F_QSOL_OBS_12=${BARAKUDA_ROOT}/data/obs/radsw_monthly_clim_1980-2005_NOCS2.nc4
+export NN_QSOL_OBS="radsw"
+
+
+# A text file where the cross sections (to compute transports) are defined :
+export TRANSPORT_SECTION_FILE="${BARAKUDA_ROOT}/data/transportiz_ORCA025_y1050.dat"        ; # set i_do_trsp=1 !
+export TRANSPORT_SECTION_FILE_ICE="${BARAKUDA_ROOT}/data/transport_ice_ORCA025_y1050.dat"  ; # set i_do_trsp_ice=1 !
+
+# For transport by sigma-class:
+export DENSITY_SECTION_FILE="${BARAKUDA_ROOT}/data/dens_section_ORCA025_y1050.dat"
+
+# Files with the list of rectangular domains to "analyze" more closely:
+export FILE_DEF_BOXES="${BARAKUDA_ROOT}/data/def_boxes_convection_ORCA025_y1050.txt"
+export FILE_DMV_BOXES="${BARAKUDA_ROOT}/data/def_boxes_convection_ORCA025_y1050.txt"
+
+# In what format should figures be produced ('png' recommanded, but 'svg' supported!):
+export FIG_FORM="png"
+
+# About remote HOST to send/install HTML pages to:
+export ihttp=0                ; # do we export on a remote http server (1) or keep on the local machine (0)
+export RHOST=whitehouse.gov   ; # remote host to send diagnostic page to///
+export RUSER=donald           ; # username associated to remote host (for file export)
+export RWWWD=/data/www/barakuda/ec-earth_3.2b ; # directory of the local or remote host to send the diagnostic page to
+
+
+#########################
+# Diags to be performed #
+#########################
+
+# Movies of SST and SSS compared to OBS:
+export i_do_movi=0
+export iffmpeg_x264=0 ; # is, by chance, ffmpeg with support for x264 encoding available on your stystem? => 1 !
+
+# Basic 3D and surface averages:
+export i_do_mean=0
+
+# IFS surface fluxes of heat and freshwater
+export i_do_ifs_flx=0 ; # only relevant when ece_exp=2...
+
+# AMOC:
+export i_do_amoc=0
+export LMOCLAT="20-23 30-33 40-43 45-48 50-53" ; # List of latitude bands to look in for max of AMOC
+
+# Sea-ice diags
+export i_do_ice=1  ; # Sea-ice diags
+
+# Transport of mass, heat and salt through specified sections (into TRANSPORT_SECTION_FILE):
+export i_do_trsp=0  ; # transport of mass, heat and salt through specified sections
+#              # i_do_trsp=2 => treat also different depths range!
+z1_trsp=100  ; # first  depth: i_do_trsp must be set to 2
+z2_trsp=1000 ; # second depth: i_do_trsp must be set to 2
+
+# Solid freshwater transport through sections due to sea-ice drift
+export i_do_trsp_ice=0 ; # must have i_do_ice=1
+
+# Meridional heat/salt transport (advective)
+export i_do_mht=0
+
+# Transport by sigma class
+export i_do_sigt=0
+
+# Budget on pre-defined (FILE_DEF_BOXES) rectangular domains:
+export i_do_bb=0   ; # Budget and other stuffs on a given rectangular box!
+#             # => needs file FILE_DEF_BOXES !!!
+# => produces time-series f(t)  (mean of 2D fields)
+
+# Vertical profiles on of box-averaged as a function of time...
+export i_do_box_TS_z=0 ; # do sigma vert. profiles on given boxes... # 1 => no figures, 2 => figures
+#                 # => needs file FILE_DEF_BOXES !!!
+# => produces time-series f(t,z)
+
+# Deep Mixed volume in prescribed boxes:
+export i_do_dmv=0
+export MLD_CRIT="1000,725,500"
+
+# User-defined meridional or zonal cross sections (for temperature and salinity)
+# => TS_SECTION_FILE must be defined!
+export i_do_sect=0
+export TS_SECTION_FILE="${BARAKUDA_ROOT}/data/TS_sections.dat"
+
+
+# BETA / TESTING / NERDY (at your own risks...):
+#
+export i_do_ssx_box=0 ; # zoom on given boxes (+spatially-averaged values) for surface properties
+#                     # boxes defined into barakuda_orca.py ...
+
+# Some nerdy stuffs about the critical depth in prescribed boxes:
+export i_do_zcrit=0

+ 239 - 0
configs/config_ORCA1_L75_NEMO4_lm3.sh

@@ -0,0 +1,239 @@
+#!/bin/bash 
+
+#==========================================================
+#
+#         Configuration file for
+#
+# OCEAN MONITORING for NEMO v4 on 75 levels
+#
+#        Machine: lemaitre3.cism.ucl.ac.be
+#
+#==========================================================
+
+module purge
+module load releases/2018b use.own
+module load netCDF-Fortran/4.4.4-foss-2018b ELIC_Python/1-foss-2018b-Python-2.7.15
+module load ImageMagick/7.0.8-11-GCCcore-7.3.0 FFmpeg/4.1-foss-2018b
+
+export CONF=ORCA1.L75 ; # horizontal global ORCA configuration
+export NBL=75         ; # number of levels
+
+export HOST=lemaitre3.cism.ucl.ac.be ; # this has no importance at all, it will just become an "info" on the web-page!
+export MASTERMIND="UCL-ELIC / PY Barriat" ; # same here, who's the person who designed/ran this simulation?
+
+export EXTRA_CONF="NEMO 4.0.6";   #  // same here ...
+
+# Path / directory structure in which to find NEMO output file (you can use
+# <ORCA> and <EXP> as substitute to your ORCA grid and experiment (EXP) name):
+export NEMO_OUT_STRCT="/scratch/ucl/elic/pbarriat/nemo/archive/$EXP/output"
+
+# Path to root directory where to save the diagnostics (diagnostics for this "CONF"):
+export DIAG_DIR="/scratch/ucl/elic/pbarriat/nemo/archive/$EXP/diag"
+
+# Path to directory containing some 2D and 3D climatologies on the relevant ORCA grid:
+export CONF_INI_DIR="/home/ucl/elic/pbarriat/modeles/barakuda/data"
+export CONF_INI_SCRATCH="/scratch/ucl/elic/pbarriat/data/nemo/CONFIG_FILES/ORCA1"
+
+# Temporary file system (scratch) on which to perform the job you can use <JOB_ID> if scracth depends on JOB ID:
+export SCRATCH="/scratch/ucl/elic/pbarriat/barakuda"
+
+export PYTHON_HOME="${EBROOTELIC_PYTHON}" ; # HOME to python distribution with matplotlib and basemap !
+
+export DIR_NCVIEW_CMAP="${BARAKUDA_ROOT}/src/ncview_colormaps"
+
+# Is it an ec-earth experiment?
+export ece_exp=0 ; # 0 => not an EC-Earth experiment, it's a "pure" ocean-only NEMO experiment done from traditional NEMO setup
+#                  # 1 => it's an OCEAN-ONLY EC-Earth experiment done from a EC-Earth setup
+#                  # 2 => it's a  COUPLED  EC-Earth experiment
+#                  #      Both 1 and 2 imply that NEMO files are stored in something like
+#                  #       ${SOMEWHERE}/<EXP>/output/nemo/<YYY>
+#                  #       where YYY starts from '001' to
+#                  #      If you select '2', make sure 'cdo' is available and working!!!
+#                  # 10 => this experiment controled by AutoSubmit (so NEMO files are tared somerwhere?)
+#
+export Y_INI_EC=1960 ;    # initial year if ece_exp /= 0 !!!
+export M_INI_EC="01" ;    # initial month, only needed if ece_exp >= 10 !!!
+export NCHNKS_Y=1    ;    # number of chunks per year if ece_exp >= 10 (only needed if NCHNKS_Y >= 2 !)
+export TRES_IFS=255  ;    # spectral resolution for IFS, ex: T255 => TRES_IFS=255
+###--- end EC-Earth IFS relate section ---
+
+export ATMO_INFO="IFS T${TRES_IFS}" ; # Name of atmospheric model or forcing used (ex: COREv2, DFS5.2, IFS T255, ect...)
+
+# List of suffix of files that have been saved by NEMO and contain MONTHLY averages:
+export NEMO_SAVED_FILES="grid_T grid_U grid_V icemod SBC"
+
+export TSTAMP="1m"   ; # output time-frequency stamp as in NEMO output files...
+
+# In case 3D fields have been saved on an annual mean basis rather than montly:
+export ANNUAL_3D="" ;   # leave blanck "" if 3D fields are in monthly files...
+export NEMO_SAVED_FILES_3D="" ; #     ''
+
+# How does the nemo files prefix looks like
+# Everything before "<year_related_info>_grid_<X>" or "<year_related_info>_icemod"
+# use <ORCA>, <EXP> and <TSTAMP>=>  Ex: export NEMO_FILE_PREFIX="<ORCA>-<EXP>_<TSTAMP>_"
+export NEMO_FILE_PREFIX="<EXP>_<TSTAMP>_"
+# => should get rid of TSTAMP actually...
+
+
+####### NEMO => what fields in what files ??? ############
+#       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+#   => depends on the XIOS *.xml setup you used...
+#   => always specify a string for the NN_* variables
+#      USE "X" if the field is not present in your NEMO output
+#
+# State variables and others in grid_T files:
+export NN_SST="tos"
+export NN_SSS="sos"
+export NN_SSH="zos"
+export NN_T="thetao"
+export NN_S="so"
+export NN_MLD="mlotst"
+#
+# State variables and others in grid_U files:
+export NN_U="uo"
+export NN_TAUX="tauuo"
+export NN_U_EIV="0" ; # 0 => ignore
+# State variables and others in grid_V files:
+export NN_V="vo"
+export NN_TAUY="tauvo"
+export NN_V_EIV="0" ; # 0 => ignore
+#
+# Sea-ice fields:
+export FILE_ICE_SUFFIX="icemod" ; # in what file type extension to find ice fields
+export NN_ICEF="siconc" ; # name of ice fraction in "FILE_ICE_SUFFIX" file...
+export NN_ICET="sivolu" ; # ice thickness or rather volume...
+export NN_ICEU="sivelu" ; # ice U-velocity
+export NN_ICEV="sivelv" ; # ice V-velocity
+#
+# Surface fluxes:
+export FILE_FLX_SUFFIX="SBC" ; # in what file type extension to find surface fluxes (normally: "SBC")
+####                           # => mind that $FILE_FLX_SUFFIX must be also in NEMO_SAVED_FILES (above)
+#### Note: in fields marked with *+/-* you can use a sum or substraction of variables (no space allowed!)
+####       ex: NN_EMP="evap_ao_cea+subl_ai_cea-precip"
+####           NN_QNET="qsr+qnsol"
+# ++ Surface freswater fluxes:
+export NN_FWF="wfo"       ; # *+/-* name of net freshwater flux (E-P-R) in "FILE_FLX_SUFFIX" file...
+export NN_EMP="evap_ao_cea+subl_ai_cea-precip" ; # *+/-* name of E-P in "FILE_FLX_SUFFIX" file...
+#export NN_EMP="emp_oce"   ; # name of Evap minus Precip over ocean 
+export NN_P="precip"      ; # name of total precipitation (solid+liquid) in "FILE_FLX_SUFFIX" file...
+export NN_RNF="runoffs"   ; # name of continental runoffs in "FILE_FLX_SUFFIX" file...
+export NN_CLV="calving_cea"   ; # calving from icebergs in "FILE_FLX_SUFFIX" file...
+export NN_E="evap_ao_cea+subl_ai_cea" ; # *+/-* name of total evaporation in "FILE_FLX_SUFFIX" file...
+# ++ Surface heat fluxes:
+export NN_QNET="qt_oce"   ; # *+/-* name of total net surface heat flux in "FILE_FLX_SUFFIX" file...
+export NN_QSOL="rsntds"   ; # name of net surface solar flux in "FILE_FLX_SUFFIX" file...
+# ++ Wind-stress module:
+export NN_TAUM="taum"        ; # name of surface wind stress module in "FILE_FLX_SUFFIX" file...
+export NN_WNDM="windsp"      ; # name of surface wind  speed module in "FILE_FLX_SUFFIX" file...
+#
+################################################################################################
+
+# Land-sea mask and basins files:
+export MM_FILE=${CONF_INI_DIR}/mesh_mask_nemo.N3.6_ORCA1L75.nc
+export BM_FILE=${BARAKUDA_ROOT}/data/basin_mask_ORCA1_ece3.2_2017.nc4
+
+# OBSERVATIONS / REFERENCES
+# 3D monthly climatologies of potential temperature and salinity (can be those you used for the NEMO experiment):
+export NM_TS_OBS="EN4.2.0 [1990-2010]"
+export F_T_OBS_3D_12=${CONF_INI_SCRATCH}/conservative_temperature_WOA13_decav_ORCA1L75_clim.nc
+export F_S_OBS_3D_12=${CONF_INI_SCRATCH}/absolute_salinity_WOA13_decav_ORCA1L75_clim.nc
+export F_SST_OBS_12=${CONF_INI_SCRATCH}/conservative_temperature_WOA13_decav_ORCA1L75_clim.nc
+export NN_T_OBS="votemper"
+export NN_S_OBS="vosaline"
+export NN_SST_OBS="votemper"
+#
+# Sea-ice:
+export NM_IC_OBS="Hurrell et al 2008 [1980-1999]"
+export F_ICE_OBS_12=${CONF_INI_DIR}/ice_cover_180x360-ORCA1_Hurrell_monthly_mean1980-1999.nc4
+export NN_ICEF_OBS="ice_cover"
+#
+# Surface Heat fluxes:
+export NM_QSOL_OBS="NOCS 2.0 [1980-2005]"
+export F_QSOL_OBS_12=${BARAKUDA_ROOT}/data/obs/radsw_monthly_clim_1980-2005_NOCS2.nc4
+export NN_QSOL_OBS="radsw"
+
+
+# A text file where the cross sections (to compute transports) are defined :
+export TRANSPORT_SECTION_FILE="${BARAKUDA_ROOT}/data/transportiz_ORCA1.dat"        ; # set i_do_trsp=1 !
+export TRANSPORT_SECTION_FILE_ICE="${BARAKUDA_ROOT}/data/transport_ice_ORCA1.dat"  ; # set i_do_trsp_ice=1 !
+
+# For transport by sigma-class:
+export DENSITY_SECTION_FILE="${BARAKUDA_ROOT}/data/dens_section_ORCA1.dat"
+
+# Files with the list of rectangular domains to "analyze" more closely:
+export FILE_DEF_BOXES="${BARAKUDA_ROOT}/data/def_boxes_convection_ORCA1.txt"
+export FILE_DMV_BOXES="${BARAKUDA_ROOT}/data/def_boxes_convection_ORCA1.txt"
+
+# In what format should figures be produced ('png' recommanded, but 'svg' supported!):
+export FIG_FORM="png"
+
+# About remote HOST to send/install HTML pages to:
+export ihttp=0                ; # do we export on a remote http server (1) or keep on the local machine (0)
+export RHOST=whitehouse.gov   ; # remote host to send diagnostic page to///
+export RUSER=donald           ; # username associated to remote host (for file export)
+export RWWWD=/data/www/barakuda/ec-earth_3.2b ; # directory of the local or remote host to send the diagnostic page to
+
+
+#########################
+# Diags to be performed #
+#########################
+
+# Movies of SST and SSS compared to OBS:
+export i_do_movi=1
+export iffmpeg_x264=0 ; # is, by chance, ffmpeg with support for x264 encoding available on your stystem? => 1 !
+
+# Basic 3D and surface averages:
+export i_do_mean=1
+
+# IFS surface fluxes of heat and freshwater
+export i_do_ifs_flx=1 ; # only relevant when ece_exp=2...
+
+# AMOC:
+export i_do_amoc=1
+export LMOCLAT="20-23 30-33 40-43 45-48 50-53" ; # List of latitude bands to look in for max of AMOC
+
+# Sea-ice diags
+export i_do_ice=1  ; # Sea-ice diags
+
+# Transport of mass, heat and salt through specified sections (into TRANSPORT_SECTION_FILE):
+export i_do_trsp=1  ; # transport of mass, heat and salt through specified sections
+#              # i_do_trsp=2 => treat also different depths range!
+z1_trsp=100  ; # first  depth: i_do_trsp must be set to 2
+z2_trsp=1000 ; # second depth: i_do_trsp must be set to 2
+
+# Solid freshwater transport through sections due to sea-ice drift
+export i_do_trsp_ice=1 ; # must have i_do_ice=1
+
+# Meridional heat/salt transport (advective)
+export i_do_mht=0
+
+# Transport by sigma class
+export i_do_sigt=1
+
+# Budget on pre-defined (FILE_DEF_BOXES) rectangular domains:
+export i_do_bb=0   ; # Budget and other stuffs on a given rectangular box!
+#             # => needs file FILE_DEF_BOXES !!!
+# => produces time-series f(t)  (mean of 2D fields)
+
+# Vertical profiles on of box-averaged as a function of time...
+export i_do_box_TS_z=0 ; # do sigma vert. profiles on given boxes... # 1 => no figures, 2 => figures
+#                 # => needs file FILE_DEF_BOXES !!!
+# => produces time-series f(t,z)
+
+# Deep Mixed volume in prescribed boxes:
+export i_do_dmv=1
+export MLD_CRIT="1000,725,500"
+
+# User-defined meridional or zonal cross sections (for temperature and salinity)
+# => TS_SECTION_FILE must be defined!
+export i_do_sect=1
+export TS_SECTION_FILE="${BARAKUDA_ROOT}/data/TS_sections.dat"
+
+
+# BETA / TESTING / NERDY (at your own risks...):
+#
+export i_do_ssx_box=0 ; # zoom on given boxes (+spatially-averaged values) for surface properties
+#                     # boxes defined into barakuda_orca.py ...
+
+# Some nerdy stuffs about the critical depth in prescribed boxes:
+export i_do_zcrit=0

+ 238 - 0
configs/config_ORCA1_L75_lemaitre3.sh

@@ -0,0 +1,238 @@
+#!/bin/bash
+
+#==========================================================
+#
+#         Configuration file for
+#
+# OCEAN MONITORING for NEMO v3.6 on 75 levels
+#
+#        Machine: lemaitre3.cism.ucl.ac.be
+#
+#==========================================================
+
+module purge
+module load releases/2018b use.own
+module load netCDF-Fortran/4.4.4-foss-2018b ELIC_Python/1-foss-2018b-Python-2.7.15
+module load ImageMagick/7.0.8-11-GCCcore-7.3.0 FFmpeg/4.1-foss-2018b
+
+export CONF=ORCA1.L75 ; # horizontal global ORCA configuration
+export NBL=75         ; # number of levels
+
+export HOST=lemaitre3.cism.ucl.ac.be ; # this has no importance at all, it will just become an "info" on the web-page!
+export MASTERMIND="UCL-ELIC / PY Barriat" ; # same here, who's the person who designed/ran this simulation?
+
+export EXTRA_CONF="NEMO 3.6";   #  // same here ...
+
+# Path / directory structure in which to find NEMO output file (you can use
+# <ORCA> and <EXP> as substitute to your ORCA grid and experiment (EXP) name):
+export NEMO_OUT_STRCT="/scratch/ucl/elic/pbarriat/nemo/archive/$EXP/output"
+
+# Path to root directory where to save the diagnostics (diagnostics for this "CONF"):
+export DIAG_DIR="/scratch/ucl/elic/pbarriat/nemo/archive/$EXP/diag"
+
+# Path to directory containing some 2D and 3D climatologies on the relevant ORCA grid:
+export CONF_INI_DIR="/home/ucl/elic/pbarriat/modeles/barakuda/data"
+export CONF_INI_SCRATCH="/scratch/ucl/elic/pbarriat/data/nemo/CONFIG_FILES/ORCA1"
+
+# Temporary file system (scratch) on which to perform the job you can use <JOB_ID> if scracth depends on JOB ID:
+export SCRATCH="/scratch/ucl/elic/pbarriat/barakuda"
+
+export PYTHON_HOME="${EBROOTELIC_PYTHON}" ; # HOME to python distribution with matplotlib and basemap !
+
+export DIR_NCVIEW_CMAP="${BARAKUDA_ROOT}/src/ncview_colormaps"
+
+# Is it an ec-earth experiment?
+export ece_exp=0 ; # 0 => not an EC-Earth experiment, it's a "pure" ocean-only NEMO experiment done from traditional NEMO setup
+#                  # 1 => it's an OCEAN-ONLY EC-Earth experiment done from a EC-Earth setup
+#                  # 2 => it's a  COUPLED  EC-Earth experiment
+#                  #      Both 1 and 2 imply that NEMO files are stored in something like
+#                  #       ${SOMEWHERE}/<EXP>/output/nemo/<YYY>
+#                  #       where YYY starts from '001' to
+#                  #      If you select '2', make sure 'cdo' is available and working!!!
+#                  # 10 => this experiment controled by AutoSubmit (so NEMO files are tared somerwhere?)
+#
+export Y_INI_EC=1960 ;    # initial year if ece_exp /= 0 !!!
+export M_INI_EC="01" ;    # initial month, only needed if ece_exp >= 10 !!!
+export NCHNKS_Y=1    ;    # number of chunks per year if ece_exp >= 10 (only needed if NCHNKS_Y >= 2 !)
+export TRES_IFS=255  ;    # spectral resolution for IFS, ex: T255 => TRES_IFS=255
+###--- end EC-Earth IFS relate section ---
+
+export ATMO_INFO="IFS T${TRES_IFS}" ; # Name of atmospheric model or forcing used (ex: COREv2, DFS5.2, IFS T255, ect...)
+
+# List of suffix of files that have been saved by NEMO and contain MONTHLY averages:
+export NEMO_SAVED_FILES="grid_T grid_U grid_V icemod SBC"
+
+export TSTAMP="1m"   ; # output time-frequency stamp as in NEMO output files...
+
+# In case 3D fields have been saved on an annual mean basis rather than montly:
+export ANNUAL_3D="" ;   # leave blanck "" if 3D fields are in monthly files...
+export NEMO_SAVED_FILES_3D="" ; #     ''
+
+# How does the nemo files prefix looks like
+# Everything before "<year_related_info>_grid_<X>" or "<year_related_info>_icemod"
+# use <ORCA>, <EXP> and <TSTAMP>=>  Ex: export NEMO_FILE_PREFIX="<ORCA>-<EXP>_<TSTAMP>_"
+export NEMO_FILE_PREFIX="<EXP>_<TSTAMP>_"
+# => should get rid of TSTAMP actually...
+
+
+####### NEMO => what fields in what files ??? ############
+#       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+#   => depends on the XIOS *.xml setup you used...
+#   => always specify a string for the NN_* variables
+#      USE "X" if the field is not present in your NEMO output
+#
+# State variables and others in grid_T files:
+export NN_SST="tos"
+export NN_SSS="sos"
+export NN_SSH="zos"
+export NN_T="thetao"
+export NN_S="so"
+export NN_MLD="mlotst"
+#
+# State variables and others in grid_U files:
+export NN_U="uo"
+export NN_TAUX="tauuo"
+export NN_U_EIV="0" ; # 0 => ignore
+# State variables and others in grid_V files:
+export NN_V="vo"
+export NN_TAUY="tauvo"
+export NN_V_EIV="0" ; # 0 => ignore
+#
+# Sea-ice fields:
+export FILE_ICE_SUFFIX="icemod" ; # in what file type extension to find ice fields
+export NN_ICEF="siconc" ; # name of ice fraction in "FILE_ICE_SUFFIX" file...
+export NN_ICET="sivolu" ; # ice thickness or rather volume...
+export NN_ICEU="sivelu" ; # ice U-velocity
+export NN_ICEV="sivelv" ; # ice V-velocity
+#
+# Surface fluxes:
+export FILE_FLX_SUFFIX="SBC" ; # in what file type extension to find surface fluxes (normally: "SBC")
+####                           # => mind that $FILE_FLX_SUFFIX must be also in NEMO_SAVED_FILES (above)
+#### Note: in fields marked with *+/-* you can use a sum or substraction of variables (no space allowed!)
+####       ex: NN_EMP="evap_ao_cea+subl_ai_cea-precip"
+####           NN_QNET="qsr+qnsol"
+# ++ Surface freswater fluxes:
+export NN_FWF="wfo"       ; # *+/-* name of net freshwater flux (E-P-R) in "FILE_FLX_SUFFIX" file...
+export NN_EMP="evap_ao_cea+subl_ai_cea-precip" ; # *+/-* name of E-P in "FILE_FLX_SUFFIX" file...
+export NN_P="precip"      ; # name of total precipitation (solid+liquid) in "FILE_FLX_SUFFIX" file...
+export NN_RNF="runoffs"   ; # name of continental runoffs in "FILE_FLX_SUFFIX" file...
+export NN_CLV="calving"   ; # calving from icebergs in "FILE_FLX_SUFFIX" file...
+export NN_E="evap_ao_cea+subl_ai_cea" ; # *+/-* name of total evaporation in "FILE_FLX_SUFFIX" file...
+# ++ Surface heat fluxes:
+export NN_QNET="qt_oce"   ; # *+/-* name of total net surface heat flux in "FILE_FLX_SUFFIX" file...
+export NN_QSOL="rsntds"   ; # name of net surface solar flux in "FILE_FLX_SUFFIX" file...
+# ++ Wind-stress module:
+export NN_TAUM="taum"        ; # name of surface wind stress module in "FILE_FLX_SUFFIX" file...
+export NN_WNDM="windsp"      ; # name of surface wind  speed module in "FILE_FLX_SUFFIX" file...
+#
+################################################################################################
+
+# Land-sea mask and basins files:
+export MM_FILE=${CONF_INI_DIR}/mesh_mask_nemo.N3.6_ORCA1L75.nc
+export BM_FILE=${BARAKUDA_ROOT}/data/basin_mask_ORCA1_ece3.2_2017.nc4
+
+# OBSERVATIONS / REFERENCES
+# 3D monthly climatologies of potential temperature and salinity (can be those you used for the NEMO experiment):
+export NM_TS_OBS="EN4.2.0 [1990-2010]"
+export F_T_OBS_3D_12=${CONF_INI_SCRATCH}/conservative_temperature_WOA13_decav_ORCA1L75_clim.nc
+export F_S_OBS_3D_12=${CONF_INI_SCRATCH}/absolute_salinity_WOA13_decav_ORCA1L75_clim.nc
+export F_SST_OBS_12=${CONF_INI_SCRATCH}/conservative_temperature_WOA13_decav_ORCA1L75_clim.nc
+export NN_T_OBS="votemper"
+export NN_S_OBS="vosaline"
+export NN_SST_OBS="votemper"
+#
+# Sea-ice:
+export NM_IC_OBS="Hurrell et al 2008 [1980-1999]"
+export F_ICE_OBS_12=${CONF_INI_DIR}/ice_cover_180x360-ORCA1_Hurrell_monthly_mean1980-1999.nc4
+export NN_ICEF_OBS="ice_cover"
+#
+# Surface Heat fluxes:
+export NM_QSOL_OBS="NOCS 2.0 [1980-2005]"
+export F_QSOL_OBS_12=${BARAKUDA_ROOT}/data/obs/radsw_monthly_clim_1980-2005_NOCS2.nc4
+export NN_QSOL_OBS="radsw"
+
+
+# A text file where the cross sections (to compute transports) are defined :
+export TRANSPORT_SECTION_FILE="${BARAKUDA_ROOT}/data/transportiz_ORCA1.dat"        ; # set i_do_trsp=1 !
+export TRANSPORT_SECTION_FILE_ICE="${BARAKUDA_ROOT}/data/transport_ice_ORCA1.dat"  ; # set i_do_trsp_ice=1 !
+
+# For transport by sigma-class:
+export DENSITY_SECTION_FILE="${BARAKUDA_ROOT}/data/dens_section_ORCA1.dat"
+
+# Files with the list of rectangular domains to "analyze" more closely:
+export FILE_DEF_BOXES="${BARAKUDA_ROOT}/data/def_boxes_convection_ORCA1.txt"
+export FILE_DMV_BOXES="${BARAKUDA_ROOT}/data/def_boxes_convection_ORCA1.txt"
+
+# In what format should figures be produced ('png' recommanded, but 'svg' supported!):
+export FIG_FORM="png"
+
+# About remote HOST to send/install HTML pages to:
+export ihttp=0                ; # do we export on a remote http server (1) or keep on the local machine (0)
+export RHOST=whitehouse.gov   ; # remote host to send diagnostic page to///
+export RUSER=donald           ; # username associated to remote host (for file export)
+export RWWWD=/data/www/barakuda/ec-earth_3.2b ; # directory of the local or remote host to send the diagnostic page to
+
+
+#########################
+# Diags to be performed #
+#########################
+
+# Movies of SST and SSS compared to OBS:
+export i_do_movi=1
+export iffmpeg_x264=0 ; # is, by chance, ffmpeg with support for x264 encoding available on your stystem? => 1 !
+
+# Basic 3D and surface averages:
+export i_do_mean=1
+
+# IFS surface fluxes of heat and freshwater
+export i_do_ifs_flx=1 ; # only relevant when ece_exp=2...
+
+# AMOC:
+export i_do_amoc=1
+export LMOCLAT="20-23 30-33 40-43 45-48 50-53" ; # List of latitude bands to look in for max of AMOC
+
+# Sea-ice diags
+export i_do_ice=1  ; # Sea-ice diags
+
+# Transport of mass, heat and salt through specified sections (into TRANSPORT_SECTION_FILE):
+export i_do_trsp=1  ; # transport of mass, heat and salt through specified sections
+#              # i_do_trsp=2 => treat also different depths range!
+z1_trsp=100  ; # first  depth: i_do_trsp must be set to 2
+z2_trsp=1000 ; # second depth: i_do_trsp must be set to 2
+
+# Solid freshwater transport through sections due to sea-ice drift
+export i_do_trsp_ice=1 ; # must have i_do_ice=1
+
+# Meridional heat/salt transport (advective)
+export i_do_mht=1
+
+# Transport by sigma class
+export i_do_sigt=1
+
+# Budget on pre-defined (FILE_DEF_BOXES) rectangular domains:
+export i_do_bb=0   ; # Budget and other stuffs on a given rectangular box!
+#             # => needs file FILE_DEF_BOXES !!!
+# => produces time-series f(t)  (mean of 2D fields)
+
+# Vertical profiles on of box-averaged as a function of time...
+export i_do_box_TS_z=0 ; # do sigma vert. profiles on given boxes... # 1 => no figures, 2 => figures
+#                 # => needs file FILE_DEF_BOXES !!!
+# => produces time-series f(t,z)
+
+# Deep Mixed volume in prescribed boxes:
+export i_do_dmv=1
+export MLD_CRIT="1000,725,500"
+
+# User-defined meridional or zonal cross sections (for temperature and salinity)
+# => TS_SECTION_FILE must be defined!
+export i_do_sect=1
+export TS_SECTION_FILE="${BARAKUDA_ROOT}/data/TS_sections.dat"
+
+
+# BETA / TESTING / NERDY (at your own risks...):
+#
+export i_do_ssx_box=0 ; # zoom on given boxes (+spatially-averaged values) for surface properties
+#                     # boxes defined into barakuda_orca.py ...
+
+# Some nerdy stuffs about the critical depth in prescribed boxes:
+export i_do_zcrit=0

+ 238 - 0
configs/config_ORCA1_L75_lemaitre3_ec.sh

@@ -0,0 +1,238 @@
+#!/bin/bash
+
+#==========================================================
+#
+#         Configuration file for
+#
+# OCEAN MONITORING for NEMO v3.6 on 75 levels
+#
+#        Machine: lemaitre3.cism.ucl.ac.be
+#
+#==========================================================
+
+module purge
+module load releases/2018b use.own
+module load netCDF-Fortran/4.4.4-foss-2018b ELIC_Python/1-foss-2018b-Python-2.7.15
+module load ImageMagick/7.0.8-11-GCCcore-7.3.0 FFmpeg/4.1-foss-2018b
+
+export CONF=ORCA1.L75 ; # horizontal global ORCA configuration
+export NBL=75         ; # number of levels
+
+export HOST=lemaitre3.cism.ucl.ac.be ; # this has no importance at all, it will just become an "info" on the web-page!
+export MASTERMIND="UCL-ELIC / PY Barriat" ; # same here, who's the person who designed/ran this simulation?
+
+export EXTRA_CONF="NEMO 3.6 + LIM 3 (ecearth 3.3.3.2)";   #  // same here ...
+
+# Path / directory structure in which to find NEMO output file (you can use
+# <ORCA> and <EXP> as substitute to your ORCA grid and experiment (EXP) name):
+export NEMO_OUT_STRCT="/CECI/trsf/hazevedo/ecearth/$EXP/output/nemo"
+
+# Path to root directory where to save the diagnostics (diagnostics for this "CONF"):
+export DIAG_DIR="/scratch/ucl/elic/pbarriat/nemo/archive/$EXP/diag"
+
+# Path to directory containing some 2D and 3D climatologies on the relevant ORCA grid:
+export CONF_INI_DIR="/home/ucl/elic/pbarriat/modeles/barakuda/data"
+export CONF_INI_SCRATCH="/scratch/ucl/elic/pbarriat/data/nemo/CONFIG_FILES/ORCA1"
+
+# Temporary file system (scratch) on which to perform the job you can use <JOB_ID> if scracth depends on JOB ID:
+export SCRATCH="/scratch/ucl/elic/pbarriat/barakuda"
+
+export PYTHON_HOME="${EBROOTELIC_PYTHON}" ; # HOME to python distribution with matplotlib and basemap !
+
+export DIR_NCVIEW_CMAP="${BARAKUDA_ROOT}/src/ncview_colormaps"
+
+# Is it an ec-earth exper
+export ece_exp=2 ; # 0 => not an EC-Earth experiment, it's a "pure" ocean-only NEMO experiment done from traditional NEMO setup
+#                  # 1 => it's an OCEAN-ONLY EC-Earth experiment done from a EC-Earth setup
+#                  # 2 => it's a  COUPLED  EC-Earth experiment
+#                  #      Both 1 and 2 imply that NEMO files are stored in something like
+#                  #       ${SOMEWHERE}/<EXP>/output/nemo/<YYY>
+#                  #       where YYY starts from '001' to
+#                  #      If you select '2', make sure 'cdo' is available and working!!!
+#                  # 10 => this experiment controled by AutoSubmit (so NEMO files are tared somerwhere?)
+#
+export Y_INI_EC=1850 ;    # initial year if ece_exp /= 0 !!!
+export M_INI_EC="01" ;    # initial month, only needed if ece_exp >= 10 !!!
+export NCHNKS_Y=1    ;    # number of chunks per year if ece_exp >= 10 (only needed if NCHNKS_Y >= 2 !)
+export TRES_IFS=255  ;    # spectral resolution for IFS, ex: T255 => TRES_IFS=255
+###--- end EC-Earth IFS relate section ---
+
+export ATMO_INFO="IFS T${TRES_IFS}" ; # Name of atmospheric model or forcing used (ex: COREv2, DFS5.2, IFS T255, ect...)
+
+# List of suffix of files that have been saved by NEMO and contain MONTHLY averages:
+export NEMO_SAVED_FILES="grid_T grid_U grid_V icemod SBC"
+
+export TSTAMP="1m"   ; # output time-frequency stamp as in NEMO output files...
+
+# In case 3D fields have been saved on an annual mean basis rather than montly:
+export ANNUAL_3D="" ;   # leave blanck "" if 3D fields are in monthly files...
+export NEMO_SAVED_FILES_3D="" ; #     ''
+
+# How does the nemo files prefix looks like
+# Everything before "<year_related_info>_grid_<X>" or "<year_related_info>_icemod"
+# use <ORCA>, <EXP> and <TSTAMP>=>  Ex: export NEMO_FILE_PREFIX="<ORCA>-<EXP>_<TSTAMP>_"
+export NEMO_FILE_PREFIX="<EXP>_<TSTAMP>_"
+# => should get rid of TSTAMP actually...
+
+
+####### NEMO => what fields in what files ??? ############
+#       ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+#   => depends on the XIOS *.xml setup you used...
+#   => always specify a string for the NN_* variables
+#      USE "X" if the field is not present in your NEMO output
+#
+# State variables and others in grid_T files:
+export NN_SST="tos"
+export NN_SSS="sos"
+export NN_SSH="zos"
+export NN_T="thetao"
+export NN_S="so"
+export NN_MLD="mlotst"
+#
+# State variables and others in grid_U files:
+export NN_U="uo"
+export NN_TAUX="tauuo"
+export NN_U_EIV="0" ; # 0 => ignore
+# State variables and others in grid_V files:
+export NN_V="vo"
+export NN_TAUY="tauvo"
+export NN_V_EIV="0" ; # 0 => ignore
+#
+# Sea-ice fields:
+export FILE_ICE_SUFFIX="icemod" ; # in what file type extension to find ice fields
+export NN_ICEF="siconc" ; # name of ice fraction in "FILE_ICE_SUFFIX" file...
+export NN_ICET="sivolu" ; # ice thickness or rather volume...
+export NN_ICEU="siu" ; # ice U-velocity
+export NN_ICEV="siv" ; # ice V-velocity
+#
+# Surface fluxes:
+export FILE_FLX_SUFFIX="SBC" ; # in what file type extension to find surface fluxes (normally: "SBC")
+####                           # => mind that $FILE_FLX_SUFFIX must be also in NEMO_SAVED_FILES (above)
+#### Note: in fields marked with *+/-* you can use a sum or substraction of variables (no space allowed!)
+####       ex: NN_EMP="evap_ao_cea+subl_ai_cea-precip"
+####           NN_QNET="qsr+qnsol"
+# ++ Surface freswater fluxes:
+export NN_FWF="wfo"       ; # *+/-* name of net freshwater flux (E-P-R) in "FILE_FLX_SUFFIX" file...
+export NN_EMP="evap_ao_cea+subl_ai_cea-precip" ; # *+/-* name of E-P in "FILE_FLX_SUFFIX" file...
+export NN_P="precip"      ; # name of total precipitation (solid+liquid) in "FILE_FLX_SUFFIX" file...
+export NN_RNF="runoffs"   ; # name of continental runoffs in "FILE_FLX_SUFFIX" file...
+export NN_CLV="calving"   ; # calving from icebergs in "FILE_FLX_SUFFIX" file...
+export NN_E="evap_ao_cea+subl_ai_cea" ; # *+/-* name of total evaporation in "FILE_FLX_SUFFIX" file...
+# ++ Surface heat fluxes:
+export NN_QNET="qt_oce"   ; # *+/-* name of total net surface heat flux in "FILE_FLX_SUFFIX" file...
+export NN_QSOL="rsntds"   ; # name of net surface solar flux in "FILE_FLX_SUFFIX" file...
+# ++ Wind-stress module:
+export NN_TAUM="taum"        ; # name of surface wind stress module in "FILE_FLX_SUFFIX" file...
+export NN_WNDM="windsp"      ; # name of surface wind  speed module in "FILE_FLX_SUFFIX" file...
+#
+################################################################################################
+
+# Land-sea mask and basins files:
+export MM_FILE=${CONF_INI_DIR}/mesh_mask_nemo.N3.6_ORCA1L75.nc
+export BM_FILE=${BARAKUDA_ROOT}/data/basin_mask_ORCA1_ece3.2_2017.nc4
+
+# OBSERVATIONS / REFERENCES
+# 3D monthly climatologies of potential temperature and salinity (can be those you used for the NEMO experiment):
+export NM_TS_OBS="EN4.2.0 [1990-2010]"
+export F_T_OBS_3D_12=${CONF_INI_SCRATCH}/conservative_temperature_WOA13_decav_ORCA1L75_clim.nc
+export F_S_OBS_3D_12=${CONF_INI_SCRATCH}/absolute_salinity_WOA13_decav_ORCA1L75_clim.nc
+export F_SST_OBS_12=${CONF_INI_SCRATCH}/conservative_temperature_WOA13_decav_ORCA1L75_clim.nc
+export NN_T_OBS="votemper"
+export NN_S_OBS="vosaline"
+export NN_SST_OBS="votemper"
+#
+# Sea-ice:
+export NM_IC_OBS="Hurrell et al 2008 [1980-1999]"
+export F_ICE_OBS_12=${CONF_INI_DIR}/ice_cover_180x360-ORCA1_Hurrell_monthly_mean1980-1999.nc4
+export NN_ICEF_OBS="ice_cover"
+#
+# Surface Heat fluxes:
+export NM_QSOL_OBS="NOCS 2.0 [1980-2005]"
+export F_QSOL_OBS_12=${BARAKUDA_ROOT}/data/obs/radsw_monthly_clim_1980-2005_NOCS2.nc4
+export NN_QSOL_OBS="radsw"
+
+
+# A text file where the cross sections (to compute transports) are defined :
+export TRANSPORT_SECTION_FILE="${BARAKUDA_ROOT}/data/transportiz_ORCA1.dat"        ; # set i_do_trsp=1 !
+export TRANSPORT_SECTION_FILE_ICE="${BARAKUDA_ROOT}/data/transport_ice_ORCA1.dat"  ; # set i_do_trsp_ice=1 !
+
+# For transport by sigma-class:
+export DENSITY_SECTION_FILE="${BARAKUDA_ROOT}/data/dens_section_ORCA1.dat"
+
+# Files with the list of rectangular domains to "analyze" more closely:
+export FILE_DEF_BOXES="${BARAKUDA_ROOT}/data/def_boxes_convection_ORCA1.txt"
+export FILE_DMV_BOXES="${BARAKUDA_ROOT}/data/def_boxes_convection_ORCA1.txt"
+
+# In what format should figures be produced ('png' recommanded, but 'svg' supported!):
+export FIG_FORM="png"
+
+# About remote HOST to send/install HTML pages to:
+export ihttp=0                ; # do we export on a remote http server (1) or keep on the local machine (0)
+export RHOST=whitehouse.gov   ; # remote host to send diagnostic page to///
+export RUSER=donald           ; # username associated to remote host (for file export)
+export RWWWD=/data/www/barakuda/ec-earth_3.2b ; # directory of the local or remote host to send the diagnostic page to
+
+
+#########################
+# Diags to be performed #
+#########################
+
+# Movies of SST and SSS compared to OBS:
+export i_do_movi=1
+export iffmpeg_x264=0 ; # is, by chance, ffmpeg with support for x264 encoding available on your stystem? => 1 !
+
+# Basic 3D and surface averages:
+export i_do_mean=1
+
+# IFS surface fluxes of heat and freshwater
+export i_do_ifs_flx=1 ; # only relevant when ece_exp=2...
+
+# AMOC:
+export i_do_amoc=1
+export LMOCLAT="20-23 30-33 40-43 45-48 50-53" ; # List of latitude bands to look in for max of AMOC
+
+# Sea-ice diags
+export i_do_ice=1  ; # Sea-ice diags
+
+# Transport of mass, heat and salt through specified sections (into TRANSPORT_SECTION_FILE):
+export i_do_trsp=1  ; # transport of mass, heat and salt through specified sections
+#              # i_do_trsp=2 => treat also different depths range!
+z1_trsp=100  ; # first  depth: i_do_trsp must be set to 2
+z2_trsp=1000 ; # second depth: i_do_trsp must be set to 2
+
+# Solid freshwater transport through sections due to sea-ice drift
+export i_do_trsp_ice=1 ; # must have i_do_ice=1
+
+# Meridional heat/salt transport (advective)
+export i_do_mht=1
+
+# Transport by sigma class
+export i_do_sigt=1
+
+# Budget on pre-defined (FILE_DEF_BOXES) rectangular domains:
+export i_do_bb=0   ; # Budget and other stuffs on a given rectangular box!
+#             # => needs file FILE_DEF_BOXES !!!
+# => produces time-series f(t)  (mean of 2D fields)
+
+# Vertical profiles on of box-averaged as a function of time...
+export i_do_box_TS_z=0 ; # do sigma vert. profiles on given boxes... # 1 => no figures, 2 => figures
+#                 # => needs file FILE_DEF_BOXES !!!
+# => produces time-series f(t,z)
+
+# Deep Mixed volume in prescribed boxes:
+export i_do_dmv=1
+export MLD_CRIT="1000,725,500"
+
+# User-defined meridional or zonal cross sections (for temperature and salinity)
+# => TS_SECTION_FILE must be defined!
+export i_do_sect=1
+export TS_SECTION_FILE="${BARAKUDA_ROOT}/data/TS_sections.dat"
+
+
+# BETA / TESTING / NERDY (at your own risks...):
+#
+export i_do_ssx_box=0 ; # zoom on given boxes (+spatially-averaged values) for surface properties
+#                     # boxes defined into barakuda_orca.py ...
+
+# Some nerdy stuffs about the critical depth in prescribed boxes:
+export i_do_zcrit=0

BIN
data/mesh_mask_nemo.N3.6_ORCA1L75.nc


+ 57 - 40
python/exec/convert_ps_to_SA.py

@@ -14,36 +14,26 @@ from string import replace
 
 import gsw
 
-#l_accurate = True
-l_accurate = False
-
 SSO = 35.16504
 
+cdepth = 'deptht'
 
-if len(sys.argv) < 3:
-    print 'Usage: '+sys.argv[0]+' <Salinity_file_to_convert> <salinity_name> (2d)'
+if len(sys.argv) != 3:
+    print 'Usage: '+sys.argv[0]+' <Salinity_file_to_convert> <salinity_name>'
     sys.exit(0)
 
 
 cf_sal  = sys.argv[1]
 cv_sal  = sys.argv[2]
 
-l2d = False
-
-if len(sys.argv) == 4:
-    cv_2d  = sys.argv[3]
-    if cv_2d != '2d': print 'Usage: '+sys.argv[0]+' <Salinity_file_to_convert> <salinity_name> (2d)'
-    l2d=True
-
-
-cf_out = replace(cf_sal, cf_sal, 'absolute_salinity_'+cf_sal)
+cf_out = replace(cf_sal, cv_sal, cv_sal+'_TEOS10')
 
 
 os.system('rm -f '+cf_out)
 os.system('cp '+cf_sal+' '+cf_out)
 
 
-
+l_accurate = False
 
 
 
@@ -54,42 +44,69 @@ print '\n'
 f_out = Dataset(cf_out, 'r+')     # r+ => can read and write in the file... )
 print 'File ', cf_out, 'is open...\n'
 
-# Extracting tmask at surface level:
-if l2d:
-    xsal  = f_out.variables[cv_sal][:,:,:]
-else:
-    xsal  = f_out.variables[cv_sal][:,:,:,:]
+# Inquire variables in the file to see if a depth is there...
+list_var = f_out.variables.keys() ; print ' *** list_var =', list_var
 
-if l_accurate and not l2d:
-    
-    vz    = f_out.variables['deptht'][:]
-    
-    [nt,nk,nj,ni] = nmp.shape(xsal)
 
-    xdepth = nmp.zeros((nk,nj,ni))
+if cdepth in list_var: l_accurate = True
 
-    # building 3d arrays of depth, lon and lat:
-    for jk in range(nk): xdepth[jk,:,:] = vz[jk]
 
-    # pressure should be in dbar and it's the same as the depth in metre actually:
-    for jt in range(nt):
-        print ' jt =', jt
-        f_out.variables[cv_sal][jt,:,:,:] = gsw.SA_from_SP(xsal[jt,:,:,:], xdepth, -140., 0.)
+vcdim = f_out.variables[cv_sal].dimensions
+cv_t = vcdim[0]; print ' *** record dimension is called "'+cv_t+'"'
+Nt = f_out.dimensions[cv_t].size ; print ' *** There are '+str(Nt)+' time records...\n'
 
-else:
-    # Fabien says it's enough:
-    if l2d:
-        f_out.variables[cv_sal][:,:,:]   = xsal[:,:,:]*SSO/35.
-    else:
-        f_out.variables[cv_sal][:,:,:,:] = xsal[:,:,:,:]*SSO/35. 
+# Inquire the shape of arrays:
+nb_dim = len(vcdim)
+print ' *** '+cf_out+' has '+str(nb_dim)+' dimmensions!'
 
+if not nb_dim in [ 2, 3, 4 ]: print ' ERROR: unsupported number of dimmensions! =>', nb_dim ; sys.exit(0)
 
-f_out.variables[cv_sal].long_name = 'Absolute Salinity (TEOS10) build from practical salinity (*35.16504/35)'
 
-f_out.close()
 
+for jt in range(Nt):
+         
+    print '\n --- treating record # '+str(jt)
+    
+    if nb_dim==4: xsal = f_out.variables[cv_sal][jt,:,:,:]
+    if nb_dim==3: xsal = f_out.variables[cv_sal][jt,:,:]
+    if nb_dim==2: xsal = f_out.variables[cv_sal][jt,:]
+
+    if jt == 0: shp = nmp.shape(xsal)
+
+    xtmp = nmp.zeros(shp)
+    xtmp = xsal
+    
+    if l_accurate:
+
+        if jt == 0:
+            print '\n Using accurate method with depth !'        
+            vz    = f_out.variables['deptht'][:]
+            xdepth = nmp.zeros(shp)
+            nk = f_out.dimensions['z'].size ; print ' *** There are '+str(nk)+' vertical levels...'
+            # building 3d arrays of depth:
+            for jk in range(nk):
+                if nb_dim==4: xdepth[jk,:,:] = vz[jk]
+                if nb_dim==3: xdepth[jk,:]   = vz[jk]
+                if nb_dim==2: xdepth[jk]     = vz[jk]
+
+        # pressure should be in dbar and it's the same as the depth in metre actually:
+        if nb_dim==4: f_out.variables[cv_sal][jt,:,:,:] = gsw.SA_from_SP(xtmp[:,:,:], xdepth, -140., 0.)
+        if nb_dim==3: f_out.variables[cv_sal][jt,:,:]   = gsw.SA_from_SP(xtmp[:,:],   xdepth, -140., 0.)
+        if nb_dim==2: f_out.variables[cv_sal][jt,:]     = gsw.SA_from_SP(xtmp[:],     xdepth, -140., 0.)
+    
+    else:
+    
+        # Fabien says it's enough:
+        if nb_dim==4: f_out.variables[cv_sal][jt,:,:,:] = xtmp[:,:,:]*SSO/35.
+        if nb_dim==3: f_out.variables[cv_sal][jt,:,:]   = xtmp[:,:]*SSO/35.
+        if nb_dim==2: f_out.variables[cv_sal][jt,:]     = xtmp[:]*SSO/35.
 
 
+    
+        
+f_out.variables[cv_sal].long_name = 'Absolute Salinity (TEOS10) build from practical salinity'
+    
+f_out.close()
 
 print cf_out+' sucessfully created!'
 

+ 32 - 14
python/exec/convert_pt_to_CT.py

@@ -14,9 +14,6 @@ from string import replace
 
 import gsw
 
-#SSO = 35.16504
-
-
 if len(sys.argv) != 5:
     print 'Usage: '+sys.argv[0]+' <Temperature_file_to_convert> <temperature_name> <Absolute_salinity_file> <salinity_name>'
     sys.exit(0)
@@ -27,7 +24,7 @@ cv_temp  = sys.argv[2]
 cf_sal   = sys.argv[3]
 cv_sal   = sys.argv[4]
 
-cf_out = replace(cf_temp, cf_temp, 'conservative_temperature_'+cf_temp)
+cf_out = replace(cf_temp, cv_temp, cv_temp+'_TEOS10')
 
 os.system('rm -f '+cf_out)
 os.system('cp '+cf_temp+' '+cf_out)
@@ -42,34 +39,55 @@ print '\n'
 
 
 f_sal = Dataset(cf_sal)     # r+ => can read and write in the file... )
-xsal  = f_sal.variables[cv_sal][:,:,:,:]
-f_sal.close()
 
 
-print '\n'
+vcdim = f_sal.variables[cv_sal].dimensions
+cv_t = vcdim[0]; print ' *** record dimension is called "'+cv_t+'"'
+Nt = f_sal.dimensions[cv_t].size ; print ' *** There are '+str(Nt)+' time records...\n'
+
+# Inquire the shape of arrays:
+nb_dim = len(vcdim)
+print ' *** '+cf_sal+' has '+str(nb_dim)+' dimmensions!'
+
+if not nb_dim in [ 2, 3, 4 ]: print ' ERROR: unsupported number of dimmensions! =>', nb_dim ; sys.exit(0)
 
 
-# Opening the Netcdf file:
+# Opening the Netcdf output file:
 f_out = Dataset(cf_out, 'r+')     # r+ => can read and write in the file... )
 print 'File ', cf_out, 'is open...\n'
 
-# Extracting tmask at surface level:
-xtemp  = f_out.variables[cv_temp][:,:,:,:]
 
-#xtemp[:,:,:,:] = xtemp[:,:,:,:]*2.
 
+for jt in range(Nt):
 
-#gsw.CT_from_pt(SA, pt)
+    print '\n --- treating record # '+str(jt)
 
-f_out.variables[cv_temp][:,:,:,:] = gsw.CT_from_pt(xsal, xtemp)
+    if nb_dim==4: xsal = f_sal.variables[cv_sal][jt,:,:,:]
+    if nb_dim==3: xsal = f_sal.variables[cv_sal][jt,:,:]
+    if nb_dim==2: xsal = f_sal.variables[cv_sal][jt,:]
+
+    # Extracting tmask at surface level:
+    if nb_dim==4:
+        xtemp = f_out.variables[cv_temp][jt,:,:,:]
+        f_out.variables[cv_temp][jt,:,:,:] = gsw.CT_from_pt(xsal, xtemp)
+
+    if nb_dim==3:
+        xtemp = f_out.variables[cv_temp][jt,:,:]
+        f_out.variables[cv_temp][jt,:,:] = gsw.CT_from_pt(xsal, xtemp)
+
+    if nb_dim==2:
+        xtemp = f_out.variables[cv_temp][jt,:]
+        f_out.variables[cv_temp][jt,:] = gsw.CT_from_pt(xsal, xtemp)
+
+    
 
 
 f_out.variables[cv_temp].long_name = 'Conservative Temperature (TEOS10) built from potential temperature'
 
+f_sal.close()
 f_out.close()
 
 
 
-
 print cf_out+' sucessfully created!'
 

+ 5 - 1
python/exec/cross_sections.py

@@ -56,7 +56,11 @@ id_mm.close()
 
 bt.chck4f(cf_in)
 id_in  = Dataset(cf_in)
-vdepth = id_in.variables['deptht'][:]
+try:
+  vdepth = id_in.variables['deptht'][:]
+except KeyError as e:
+  vdepth = id_in.variables['olevel'][:]
+
 XT     = id_in.variables[cv_t][:,:,:,:]
 XS     = id_in.variables[cv_s][:,:,:,:]
 id_in.close()

+ 154 - 0
python/exec/field_to_mask.py

@@ -0,0 +1,154 @@
+#!/usr/bin/env python
+
+#       B a r a K u d a
+#
+#       L. Brodeau, 2017]
+
+import sys
+import numpy as nmp
+import string
+import os
+from netCDF4 import Dataset
+
+#l_fake_coor = True
+#l_fake_coor = False
+
+l_use_fillval = True
+
+narg = len(sys.argv)
+if narg not in [3,4]:
+    print 'Usage: '+sys.argv[0]+' <netcdf_file.nc> <2D or 3D netCDF field> (<value>)'
+    print '  => if no <value> is specified: the "_FillValue" attribute is used!\n'
+    sys.exit(0)
+
+cf_nc = sys.argv[1]
+cv_nc = sys.argv[2]
+
+if narg == 4:
+    l_use_fillval = False
+    rfill_val = float(sys.argv[3])
+    
+cfname, cncext = os.path.splitext(cf_nc)
+
+
+#cf_msk = 'lsm_'+string.replace(os.path.basename(cf_nc), cv_nc, 'mask')
+
+cf_msk = 'mask.nc'
+
+print ' *** Will create mask '+cf_msk
+
+
+
+
+
+
+# Reading data array:
+f_nc = Dataset(cf_nc)
+ndim = len(f_nc.variables[cv_nc].dimensions)
+#
+if l_use_fillval:
+    list_att_var = f_nc.variables[cv_nc].ncattrs()
+    if '_FillValue' in list_att_var:
+        rfill_val = f_nc.variables[cv_nc]._FillValue
+    elif 'missing_value' in list_att_var:
+        rfill_val = f_nc.variables[cv_nc].missing_value
+    else:
+        print 'ERROR: found neither "_FillValue" nor "missing_value" attribute for variable '+cv_nc+' !'; sys.exit(0)
+        #
+print '\n *** Field value to use to generate mask: rfill_val =',rfill_val,'\n'
+#
+# Looking at the dimmensions of the variable:
+list_dim_var = f_nc.dimensions.keys()
+print 'list_dim_var = ', list_dim_var
+# Check if one is unlimited:
+inu = 0
+for cd in list_dim_var:
+    if f_nc.dimensions[cd].isunlimited(): inu = inu + 1
+if inu > 1:
+    print 'PROBLEM: there are more than one UNLIMITED dimension in the file!'
+    sys.exit(0)
+
+NbDim = 3
+#
+if   ndim == 4:
+        xfield = f_nc.variables[cv_nc][0,:,:,:] # 3D !       BAD!
+elif ndim == 3:
+    if inu==1:
+        xfield = f_nc.variables[cv_nc][0,:,:] ; # 2D !
+        NbDim = 2
+    else:
+        xfield = f_nc.variables[cv_nc][:,:,:] ; # 3D !       
+elif ndim == 2:
+        if inu==0:
+            xfield = f_nc.variables[cv_nc][:,:]
+            NbDim = 2
+        else:
+            print 'PROBLEM: your field does not seem to be 3D!'
+else:
+    print ' ERROR (mk_zonal_average.py) => weird shape for your mask array!'
+    sys.exit(0)
+#xfield  = f_nc.variables[cv_nc][:,:]
+f_nc.close()
+
+
+nz = -1
+if NbDim==3:
+    (nz,ny,nx) = nmp.shape(xfield)
+    print("nx, ny, nz =",nx,ny,nz)
+    mask = nmp.zeros((nz,ny,nx))
+else:
+    (ny,nx) = nmp.shape(xfield)
+    print("nx, ny =",nx,ny)
+    mask = nmp.zeros((ny,nx))
+
+
+if l_use_fillval:
+    if rfill_val > 0:
+        idd = nmp.where( xfield < rfill_val )
+    else:
+        idd = nmp.where( xfield > rfill_val )
+    #
+else:
+    idd = nmp.where( xfield != rfill_val )
+        
+mask[idd]=1
+
+
+
+f_out = Dataset(cf_msk, 'w', format='NETCDF4')
+
+# Dimensions:
+cdim_x = 'x'
+cdim_y = 'y'
+cdim_z = 'z'
+
+f_out.createDimension(cdim_x, nx)
+f_out.createDimension(cdim_y, ny)
+if NbDim==3: f_out.createDimension(cdim_z, nz)
+
+
+#if l_fake_coor:
+#    id_lon  = f_out.createVariable('lon0','f4',(cdim_x,))
+#    id_lat  = f_out.createVariable('lat0','f4',(cdim_y,))
+#    id_lon[:] = vlon[:]
+#    id_lat[:] = vlat[:]
+
+
+if NbDim==3:
+    id_msk  = f_out.createVariable('mask','i1',(cdim_z,cdim_y,cdim_x,))
+    id_msk[:,:,:]   = mask[:,:,:]
+else:
+    id_msk  = f_out.createVariable('mask','i1',(cdim_y,cdim_x,))
+    id_msk[:,:]   = mask[:,:]
+
+id_msk.long_name = 'Land-Sea mask'
+
+
+f_out.About  = 'Variable '+cv_nc+' converted to a mask...'
+f_out.Author = 'Generated with image_to_netcdf.py of BARAKUDA (https://github.com/brodeau/barakuda)'
+
+f_out.close()
+
+
+
+print cf_msk+' created!!!'

+ 35 - 21
python/exec/image_to_netcdf.py

@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 #       B a r a K u d a
 #
@@ -7,37 +7,35 @@
 import sys
 import numpy as nmp
 from PIL import Image
-import string
 import os
 from netCDF4 import Dataset
 import datetime
 
-l_fake_coor = True
-#l_fake_coor = False
-
+#l_fake_coor = True
+l_fake_coor = False
 
+l_nemo_like = True
 
 narg = len(sys.argv)
 if not narg in [2, 3]:
-    print 'Usage: '+sys.argv[0]+' <image> (<field divider for field>)'; sys.exit(0)
+    print('Usage: '+sys.argv[0]+' <image> (<field divider for field>)'); sys.exit(0)
 
 cf_im = sys.argv[1]
 
 idiv = 1
 if narg == 3: idiv = int(sys.argv[2])
 
-print idiv
+print(idiv)
 
 cfname, cfext = os.path.splitext(cf_im)
 
 
 #(nj,ni) = nmp.shape(nav_lon)
 
-cf_nc = string.replace(os.path.basename(cf_im), cfext, '.nc')
+cf_nc = str.replace(os.path.basename(cf_im), cfext, '.nc')
 
 # Opening Images:
-print ' *** Opening image '+cf_nc
-
+print(' *** Opening image '+cf_im)
 pic = Image.open(cf_im)
 
 
@@ -49,22 +47,22 @@ vshape_pic = nmp.shape(pic)
 
 if len(vshape_pic) == 3:
     (ny,nx,nrgb) = vshape_pic
-    if nrgb != 3: print ' Problem #1 with your image, not what we expected!' ; sys.exit(0)
+    if nrgb != 3: print(' Problem #1 with your image, not what we expected!') ; sys.exit(0)
     lcolor = True    ;  # RGB color picture => 3 2D array
-    print "\n It's a RGB color picture!\n"
+    print("\n It's a RGB color picture!\n")
     
 elif len(vshape_pic) == 2:
     lcolor = False   ;  # grey-scale picture (true black and white) => 1 2D array
     (ny,nx) = vshape_pic
     nrgb = 1
-    print "\n It's a grey-scale B&W picture!\n"
+    print("\n It's a grey-scale B&W picture!\n")
 else:
-    print ' Problem #2 with your image, not what we expected!' ; sys.exit(0)
+    print(' Problem #2 with your image, not what we expected!') ; sys.exit(0)
 
 
 
 
-print " *** shape of pic: ", (ny,nx)
+print(" *** shape of pic: ", (ny,nx))
 
 xpic = nmp.array(pic)
 
@@ -76,7 +74,7 @@ if l_fake_coor:
     
     vlat = nmp.zeros(ny) ; dy = 180./float(ny)
     for jj in range(ny): vlat[jj] = -90 + (float(jj) + 0.5)*dy
-    #print vlat[:]
+    #print(vlat[:])
     #sys.exit(0)
 
 
@@ -84,14 +82,24 @@ f_out = Dataset(cf_nc, 'w', format='NETCDF4')
 
 # Dimensions:
 
-cdim_x = 'x'
-cdim_y = 'y'
+cdim_x = 'longitude'
+cdim_y = 'latitude'
+
+if l_nemo_like:
+    cdim_x = 'x'
+    cdim_y = 'y'
+
+
 #if l_fake_coor:
 #    cdim_x = 'lon'
 #    cdim_y = 'lat'
+
+
 f_out.createDimension(cdim_x, nx)
 f_out.createDimension(cdim_y, ny)
 
+#if l_nemo_like: f_out.createDimension('t', None)
+
 if l_fake_coor:
     id_lon  = f_out.createVariable('lon0','f4',(cdim_x,))
     id_lat  = f_out.createVariable('lat0','f4',(cdim_y,))
@@ -116,9 +124,15 @@ if lcolor:
     id_blue[:,:]  = nmp.flipud(xpic[:,:,2])
 
 else:
-    id_bw  = f_out.createVariable('bw','f4',(cdim_y,cdim_x,))
+    #if l_nemo_like:
+    #    id_bw  = f_out.createVariable('bw','i1',('t',cdim_y,cdim_x,))
+    #    id_bw.long_name = 'Grey scale'
+    #    #id_bw[0,:,:]   = nmp.flipud(xpic[:,:]) / idiv
+    #    id_bw[0,:,:]   = 1 - (nmp.flipud(xpic[:,:]) + 1)/idiv
+    #else:
+    id_bw  = f_out.createVariable('bw','i1',(cdim_y,cdim_x,))
     id_bw.long_name = 'Grey scale'
-    id_bw[:,:]   = nmp.flipud(xpic[:,:]) / idiv
+    id_bw[:,:]   = 1 - (nmp.flipud(xpic[:,:]) + 1)/idiv
 
 
 f_out.About  = 'Image '+cf_im+' converted to netcdf.'
@@ -128,5 +142,5 @@ f_out.close()
 
 
 
-print cf_nc+' created!!!'
+print(cf_nc+' created!!!')
 

+ 5 - 1
python/exec/mean_3d.py

@@ -106,7 +106,11 @@ print '      ==> variable '+cvar
 
 # DATA:
 id_in = Dataset(cf_T_in)
-vdepth = id_in.variables['deptht'][:]
+try:
+    vdepth = id_in.variables['deptht'][:]
+except KeyError as e:
+    vdepth = id_in.variables['olevel'][:]
+
 Xd_m = id_in.variables[cvar][:,:,:,:]
 id_in.close()
 

+ 1 - 1
python/exec/movie_nemo_globe.py

@@ -190,7 +190,7 @@ for jt in range(jt0,jtN):
         print ' *** reference longitude =', rot
 
     	fig = plt.figure(num = 1, figsize=(rh,1.167*rh), dpi=None, facecolor='b', edgecolor='k')
-    	ax  = plt.axes([0.005, 0.05, 0.99, 0.99], axisbg = '0.35')
+    	ax  = plt.axes([0.005, 0.05, 0.99, 0.99], facecolor = '0.35')
 
         plt.title('Ocean (NEMO@ORCA12 + IFS@'+CTATM+'): '+cfield+', '+cdate, **cfont_title)
 

+ 384 - 0
python/exec/movie_nemo_section.py

@@ -0,0 +1,384 @@
+#!/usr/bin/env python
+
+#       B a r a K u d a
+#
+#  Prepare 2D maps (monthly) that will later become a GIF animation!
+#  NEMO output and observations needed
+#
+#    L. Brodeau, May 2018
+
+import sys
+import os
+from string import replace
+import numpy as nmp
+
+from netCDF4 import Dataset
+
+import matplotlib as mpl
+mpl.use('Agg')
+import matplotlib.pyplot as plt
+import matplotlib.colors as colors
+
+import warnings
+warnings.filterwarnings("ignore")
+
+from calendar import isleap
+import datetime
+
+import barakuda_colmap as bcm
+
+import barakuda_tool as bt
+
+
+vmn = [ 31, 28, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 ]
+vml = [ 31, 29, 31, 30, 31, 30, 31, 31, 30, 31, 30, 31 ]
+
+
+color_top = 'white'
+#color_top = 'k'
+
+
+
+#jt0 = 248
+jt0 = 0
+
+
+j2=0
+k2=0
+l_show_cb = True
+l_show_date = True
+l_log_field = False
+l_pow_field = False
+l_annotate_name = False
+
+
+narg = len(sys.argv)
+if narg < 6: print 'Usage: '+sys.argv[0]+' <NEMOCONF> <file> <variable> <LSM_file> <YYYYMMDD (start)>'; sys.exit(0)
+CNEMO  = sys.argv[1]
+cf_in = sys.argv[2] ; cv_in=sys.argv[3]
+cf_lsm=sys.argv[4] ; cf_date0=sys.argv[5]
+
+
+if CNEMO == 'eNATL60':
+    Nk0 = 300
+    Nj0 = 4729-1
+    l_show_cb = False
+    l_show_date = True
+    cdt = '1h'
+    #cbox = 'FullMed' ; j1=5400 ; k1=1530 ; j2=Nj0 ; k2=3310 ; rfact_zoom = 0.79 ; vcb = [0.5, 0.875, 0.485, 0.02] ; font_rat = 2.*rfact_zoom ; l_annotate_name=False
+    cbox = 'ALL' ; j1=0 ; k1=0 ; j2=Nj0 ; k2=Nk0 ; rfact_zoom = 0.3047 ; vcb = [0.59, 0.1, 0.38, 0.018] ; font_rat = 8.*rfact_zoom
+    #cbox = 'Portrait' ; j1=2760 ; k1=1000 ; j2=4870 ; k2=4000 ; rfact_zoom = 1. ; vcb = [0.59, 0.1, 0.38, 0.018] ; font_rat = 1.*rfact_zoom ; l_annotate_name=False; l_show_date=False
+    x_date = 1900 ; y_date = 20 ; # where to put the date
+
+if CNEMO == 'NATL60':
+    Nk0 = 300
+    Nj0 = 3454-1
+    #l_pow_field = True ; pow_field = 1.5
+    l_show_cb = False
+    l_show_date = False
+    cdt = '1h'
+    #cbox = 'zoom1' ; j1 = 1800 ; k1 = 950 ; j2 = j1+1920 ; k2 = k1+1080 ; rfact_zoom = 1. ; vcb = [0.5, 0.875, 0.485, 0.02] ; font_rat = 8.*rfact_zoom ; l_show_lsm = False
+    #cbox = 'zoom1' ; j1 = 1800 ; k1 = 950 ; j2 = j1+2560 ; k2 = k1+1440 ; rfact_zoom = 1. ; vcb = [0.5, 0.875, 0.485, 0.02] ; font_rat = 8.*rfact_zoom
+    cbox = 'ALL' ; j1=0 ; k1=0 ; j2=Nj0 ; k2=Nk0 ; rfact_zoom = 1. ; vcb = [0.59, 0.1, 0.38, 0.018] ; font_rat = 4.*rfact_zoom
+    x_date = 350 ; y_date = 7 ; # where to put the date
+
+
+if CNEMO == 'NANUK025':
+    cdt = '3h'; cbox = 'ALL' ; j1 = 0 ; k1 = 0 ; j2 = 492 ; k2 = 614 ; rfact_zoom = 2. ; vcb = [0.5, 0.875, 0.485, 0.02] ; font_rat = 8.*rfact_zoom
+    x_date = 350 ; y_date = 7 ; # where to put the date
+
+
+print '\n rfact_zoom = ', rfact_zoom
+print ' font_rat = ', font_rat, '\n'
+
+nx_res = j2-j1
+ny_res = k2-k1
+
+print ' *** nx_res, ny_res =', nx_res, ny_res
+
+
+print ' j1,j2,k1,k2 =>', j1,j2,k1,k2
+
+yx_ratio = float(ny_res)/float(nx_res)
+
+nxr = int(rfact_zoom*nx_res) ; # widt image (in pixels)
+nyr = int(rfact_zoom*ny_res) ; # height image (in pixels)
+
+
+
+
+dpi = 110
+
+rh = round(float(nxr)/float(dpi),3) ; # width of figure as for figure...
+
+fig_type='png'
+
+
+
+cyr0=cf_date0[0:4]
+cmn0=cf_date0[4:6]
+cdd0=cf_date0[6:8]
+
+
+l_3d_field = False
+
+
+if cv_in in ['sosstsst','tos']:
+    cfield = 'SST'
+    tmin=0. ;  tmax=30.   ;  df = 2. ; cpal_fld = 'ncview_nrl'
+    #tmin=0. ;  tmax=32.   ;  df = 2. ; cpal_fld = 'viridis'
+    #tmin=4. ;  tmax=20.   ;  df = 1. ; cpal_fld = 'PuBu'
+    cunit = r'SST ($^{\circ}$C)'
+    cb_jump = 2
+    l_show_cb = True
+
+if cv_in == 'sossheig':
+    cfield = 'SSH'
+    #tmin=-0.5 ;  tmax=0.5   ;  df = 0.05
+    #tmin=-1.2 ;  tmax=2.3   ;  df = 0.05 ; l_apply_lap = True
+    #cpal_fld = 'ncview_jaisnc'
+    #cpal_fld = 'PuBu'
+    #cpal_fld = 'RdBu'
+    #cpal_fld = 'BrBG'
+    #
+    #cpal_fld = 'on3' ; tmin=-1.2 ;  tmax=2.3   ;  df = 0.05 ; l_apply_lap = True
+    cpal_fld = 'on2' ; tmin=-1.2 ;  tmax=1.2   ;  df = 0.05
+    #cpal_fld = 'coolwarm' ; tmin=-1. ;  tmax=1.   ;  df = 0.05 ; l_apply_lap = True
+    #cpal_fld = 'RdBu_r' ; tmin=-0.9 ;  tmax=-tmin   ;  df = 0.05 ; l_apply_lap = True
+    #cpal_fld = 'gray_r' ; tmin=-0.3 ;  tmax=0.3   ;  df = 0.05 ; l_apply_lap = True
+    #cpal_fld = 'bone_r' ; tmin=-0.9 ;  tmax=-tmin   ;  df = 0.05 ; l_apply_lap = True ; l_pow_field = True ; pow_field = 2.
+    cunit = r'SSH (m)'
+    cb_jump = 1
+
+if cv_in == 'socurloverf':
+    cfield = 'RV'
+    cpal_fld = 'on2' ; tmin=-1. ;  tmax=1. ;  df = 0.05
+    cunit = ''
+    cb_jump = 1
+
+
+if cv_in == 'vozocrtx':
+    cfield = 'U'
+    #cpal_fld = 'on2'
+    cpal_fld = 'RdBu'
+    tmin=-0.25 ;  tmax=0.25 ;  df = 0.05
+    cunit = ''
+    cb_jump = 1
+
+
+else:
+    print 'ERROR: we do not know cv!'
+    sys.exit(0)
+
+
+
+    
+
+
+bt.chck4f(cf_lsm)
+bt.chck4f(cf_in)
+#id_fld = Dataset(cf_in)
+#vtime = id_fld.variables['time_counter'][:]
+#id_fld.close()
+#Nt = len(vtime)
+
+cv_lsm = 'tmask'
+
+if cv_in == 'vozocrtx': cv_lsm = 'umask'
+
+
+bt.chck4f(cf_lsm)
+print '\n '+cv_lsm+' !!!'
+id_lsm = Dataset(cf_lsm)
+nb_dim = len(id_lsm.variables[cv_lsm].dimensions)
+print ' The mesh_mask has '+str(nb_dim)+' dimmensions!'
+if nb_dim==4: XMSK  = id_lsm.variables[cv_lsm][0,k1:k2,j1:j2,0]
+if nb_dim==3: XMSK  = id_lsm.variables[cv_lsm][k1:k2,j1:j2,0]
+if nb_dim==2: XMSK  = id_lsm.variables[cv_lsm][k1:k2,j1:j2]
+(nk,nj) = nmp.shape(XMSK)
+#XE1T2 = id_lsm.variables['e1t'][0,k1:k2,j1:j2]
+#XE2T2 = id_lsm.variables['e2t'][0,k1:k2,j1:j2]
+#(nk,nj) = nmp.shape(XE1T2)
+#XE1T2 = XE1T2*XE1T2
+#XE2T2 = XE2T2*XE2T2
+id_lsm.close()
+
+print 'Shape Arrays => nj,nk =', nj,nk
+
+id_fld = Dataset(cf_in)
+Nt = len(id_fld.variables[cv_in][:,0,0])
+id_fld.close()
+
+print ' *** Nt = ', Nt
+print 'Done!\n'
+
+
+
+
+pmsk = nmp.ma.masked_where(XMSK[:,:] > 0.2, XMSK[:,:]*0.+40.)
+
+
+
+#font_rat
+#params = { 'font.family':'Ubuntu',
+params = { 'font.family':'Helvetica Neue',
+           'font.weight':    'normal',
+           'font.size':       int(9.*font_rat),
+           'legend.fontsize': int(9.*font_rat),
+           'xtick.labelsize': int(9.*font_rat),
+           'ytick.labelsize': int(9.*font_rat),
+           'axes.labelsize':  int(9.*font_rat) }
+mpl.rcParams.update(params)
+cfont_clb  = { 'fontname':'Helvetica Neue', 'fontweight':'medium', 'fontsize':int(8.*font_rat), 'color':'w'}
+cfont_date = { 'fontname':'Ubuntu Mono', 'fontweight':'normal', 'fontsize':int(12.*font_rat), 'color':'w' }
+cfont_mail = { 'fontname':'Times New Roman', 'fontweight':'normal', 'fontstyle':'italic', 'fontsize':int(14.*font_rat), 'color':'0.8'}
+cfont_titl = { 'fontname':'Helvetica Neue', 'fontweight':'light', 'fontsize':int(30.*font_rat), 'color':'w' }
+
+
+# Colormaps for fields:
+pal_fld = bcm.chose_colmap(cpal_fld)
+if l_log_field:
+    norm_fld = colors.LogNorm(  vmin = tmin, vmax = tmax, clip = False)
+if l_pow_field:
+    norm_fld = colors.PowerNorm(gamma=pow_field, vmin = tmin, vmax = tmax, clip = False)
+else:
+    norm_fld = colors.Normalize(vmin = tmin, vmax = tmax, clip = False)
+
+
+pal_lsm = bcm.chose_colmap('land_dark')
+norm_lsm = colors.Normalize(vmin = 0., vmax = 1., clip = False)
+
+
+
+
+if cdt == '3h':
+    dt = 3
+elif cdt == '1h':
+    dt = 1
+else:
+    print 'ERROR: unknown dt!'
+
+
+
+
+print ' *** Dimension image:', rh*float(dpi), rh*yx_ratio*float(dpi),'\n'
+
+
+ntpd = 24/dt
+
+
+vm = vmn
+if isleap(int(cyr0)): vm = vml
+#print ' year is ', vm, nmp.sum(vm)
+
+jd = int(cdd0) - 1
+jm = int(cmn0)
+
+for jt in range(jt0,Nt):
+
+    jh = (jt*dt)%24
+    jdc = (jt*dt)/24 + 1
+
+    if jt%ntpd == 0: jd = jd + 1
+
+    if jd == vm[jm-1]+1 and (jt)%ntpd == 0 :
+        jd = 1
+        jm = jm + 1
+
+    ch = '%2.2i'%(jh)
+    #cdc= '%3.3i'%(jdc)
+    cd = '%3.3i'%(jd)
+    cm = '%2.2i'%(jm)
+
+    #print '\n\n *** jt, ch, cd, cm =>', jt, ch, cd, cm
+
+
+    ct = str(datetime.datetime.strptime(cyr0+'-'+cm+'-'+cd+' '+ch, '%Y-%m-%j %H'))
+    ct=ct[:5]+cm+ct[7:] #lolo bug !!! need to do that to get the month and not "01"
+    print ' ct = ', ct
+    cday  = ct[:10]   ; print ' *** cday  :', cday
+    chour = ct[11:13] ; print ' *** chour :', chour
+
+
+
+    cfig = 'figs/'+cv_in+'_NEMO_'+CNEMO+'_'+cbox+'_'+cday+'_'+chour+'_'+cpal_fld+'.'+fig_type
+
+    fig = plt.figure(num = 1, figsize=(rh,rh*yx_ratio*3), dpi=None, facecolor='w', edgecolor='0.5')
+
+    #ax  = plt.axes([0.065, 0.05, 0.9, 1.], facecolor = '0.5')
+    ax  = plt.axes([0., 0., 1., 1.], facecolor = '0.5')
+
+    vc_fld = nmp.arange(tmin, tmax + df, df)
+
+
+    print "Reading record #"+str(jt)+" of "+cv_in+" in "+cf_in
+    id_fld = Dataset(cf_in)
+    XFLD  = id_fld.variables[cv_in][jt,k1:k2,j1:j2,0]
+    id_fld.close()
+    print "Done!"
+
+
+    #if not l_show_lsm and jt == jt0: ( nk , nj ) = nmp.shape(XFLD)
+    print '  *** dimension of array => ', nj, nk
+
+    print "Ploting"
+    cf = plt.imshow(XFLD[:,:], cmap = pal_fld, norm = norm_fld, interpolation='none')
+    del XFLD
+    print "Done!"
+
+    #cm = plt.imshow(nmp.flipud(pmsk), cmap = pal_lsm, norm = norm_lsm, interpolation='none')
+
+    #plt.axis([ 0, nj, 0, nk])
+
+    #plt.title('NEMO: '+cfield+', coupled '+CNEMO+', '+cday+' '+chour+':00', **cfont_title)
+
+
+
+    if l_show_cb:
+        color_top='w'
+        ax2 = plt.axes(vcb)
+        clb = mpl.colorbar.ColorbarBase(ax2, ticks=vc_fld, cmap=pal_fld, norm=norm_fld, orientation='horizontal', extend='both')
+        if cb_jump > 1:
+            cb_labs = [] ; cpt = 0
+            for rr in vc_fld:
+                if cpt % cb_jump == 0:
+                    if df >= 1.: cb_labs.append(str(int(rr)))
+                    if df <  1.: cb_labs.append(str(rr))
+                else:
+                    cb_labs.append(' ')
+                    cpt = cpt + 1
+                    clb.ax.set_xticklabels(cb_labs, **cfont_clb)
+                    clb.set_label(cunit, **cfont_clb)
+                    clb.ax.yaxis.set_tick_params(color=color_top) ; # set colorbar tick color
+                    clb.outline.set_edgecolor(color_top) ; # set colorbar edgecolor
+                    plt.setp(plt.getp(clb.ax.axes, 'xticklabels'), color=color_top) ; # set colorbar ticklabels
+
+    del cf
+
+
+
+
+
+    if l_show_date:
+        xl = float(x_date)/rfact_zoom
+        yl = float(y_date)/rfact_zoom
+        ax.annotate('Date: '+cday+' '+chour+':00', xy=(1, 4), xytext=(xl,yl), **cfont_date)
+
+    #ax.annotate('laurent.brodeau@ocean-next.fr', xy=(1, 4), xytext=(xl+150, 20), **cfont_mail)
+
+
+
+    if l_annotate_name:
+        xl = float(nxr)/20./rfact_zoom
+        yl = float(nyr)/1.33/rfact_zoom
+        ax.annotate(CNEMO, xy=(1, 4), xytext=(xl, yl), **cfont_titl)
+
+
+
+    plt.savefig(cfig, dpi=dpi, orientation='portrait', facecolor='k')
+    print cfig+' created!\n'
+    plt.close(1)
+
+
+    del cm, fig, ax
+    if l_show_cb: del clb

+ 9 - 9
python/exec/netcdf_to_image_bw.py

@@ -1,4 +1,4 @@
-#!/usr/bin/env python
+#!/usr/bin/env python3
 
 #       B a r a K u d a
 #
@@ -7,7 +7,7 @@
 import sys
 import numpy as nmp
 from PIL import Image
-import string
+#import string
 import os
 from netCDF4 import Dataset
 
@@ -18,7 +18,7 @@ l_fake_coor = True
 
 narg = len(sys.argv)
 if narg not in [4, 5]:
-    print 'Usage: '+sys.argv[0]+' <netcdf_file.nc> <netcdf_variable> <image_extension (jpg,png,bmp,...)> (mutiple to field)'; sys.exit(0)
+    print('Usage: '+sys.argv[0]+' <netcdf_file.nc> <netcdf_variable> <image_extension (jpg,png,bmp,...)> (mutiple to field)'); sys.exit(0)
 
 cf_nc = sys.argv[1]
 cv_nc = sys.argv[2]
@@ -28,14 +28,14 @@ imult = 1
 if narg == 5: imult = int(sys.argv[4])
 
 
-print imult
+print(imult)
     
 cfname, cncext = os.path.splitext(cf_nc)
 
 
-cf_im = string.replace(os.path.basename(cf_nc), cncext, '.'+ciext)
+cf_im = str.replace(os.path.basename(cf_nc), cncext, '.'+ciext)
 
-print ' *** Will create image '+cf_im
+print(' *** Will create image '+cf_im)
 
 
 
@@ -52,7 +52,7 @@ elif Ndim == 3:
 elif Ndim == 2:
     xfield = imult*f_nc.variables[cv_nc][:,:]
 else:
-    print ' ERROR (mk_zonal_average.py) => weird shape for your mask array!'
+    print(' ERROR (mk_zonal_average.py) => weird shape for your mask array!')
     sys.exit(0)
 #xfield  = imult*f_nc.variables[cv_nc][:,:]
 f_nc.close()
@@ -73,7 +73,7 @@ ifield[idx_too_small] = 0
 idx_too_large = nmp.where(ifield > 255)
 ifield[idx_too_large] = 255
 
-#print ifield[:,22]
+#print(ifield[:,22])
 
 ifield8 = ifield.astype(nmp.uint8)
 
@@ -82,4 +82,4 @@ image = Image.fromarray(nmp.flipud(ifield8))
 
 # Then save it:
 image.save(cf_im)
-print ' *** Image '+cf_im+' saved!\n'
+print(' *** Image '+cf_im+' saved!\n')

+ 1 - 0
python/exec/prepare_movies.py

@@ -133,6 +133,7 @@ if cvar == 'ice':
     if vdic['NN_ICEF'] == 'X':
         print 'ERROR (prepare_movies.py): you set "X" (missing) as the name for ice concentration in your conf file!'; sys.exit(0)
     Vnemo = id_in.variables[vdic['NN_ICEF']][:,:,:]
+    Vnemo = nmp.multiply(Vnemo,100)
 
 if cvar == 'mld':
     if vdic['NN_MLD'] == 'X':

+ 3 - 3
python/exec/show_global_orca_field.py

@@ -257,7 +257,7 @@ mpl.rcParams.update(params)
 
 # Creating colorbar in a dfferent image:
 fig = plt.figure(num = 2, figsize=(rh,rh/18.), dpi=None) #, facecolor='w', edgecolor='0.5')
-ax2 = plt.axes([0., 0., 1., 1.], axisbg = None)
+ax2 = plt.axes([0., 0., 1., 1.], facecolor = None)
 ax2 = plt.axes([0.2, 0.5, 0.6, 0.4])
 clb = mpl.colorbar.ColorbarBase(ax2, ticks=vc_fld, cmap=pal_fld, norm=norm_fld, orientation='horizontal', extend='both')
 cb_labs = [] ; cpt = 0
@@ -290,8 +290,8 @@ plt.savefig('colorbar_p'+cpal_fld+'_cc'+color_continents+'.svg', dpi=rDPI, orien
 
 fig = plt.figure(num = 1, figsize=(rh,rh*float(nj)/float(ni)), dpi=None) #, facecolor='r', edgecolor='0.5') #, facecolor='w', edgecolor='0.5')
 
-ax  = plt.axes([0., 0., 1., 1.], axisbg = color_continents)
-if ldebug: ax  = plt.axes([0.1, 0.1, 0.8, 0.8], axisbg = 'w')
+ax  = plt.axes([0., 0., 1., 1.], facecolor = color_continents)
+if ldebug: ax  = plt.axes([0.1, 0.1, 0.8, 0.8], facecolor = 'w')
 
 
 

+ 4 - 1
python/exec/temp_sal.py

@@ -150,7 +150,10 @@ else:
 
 l_do_monthly_3d=True
 if 'deptht' in list_var:
-    vdepth = id_nemo_mn.variables['deptht'][:]
+    try:
+      vdepth = id_nemo_mn.variables['deptht'][:]
+    except KeyError as e:
+      vdepth = id_nemo_mn.variables['olevel'][:]
 else:
     print 'WARNING: depth vector "deptht" not present in '+cf_nemo_mn+'!\n'
     l_do_monthly_3d=False

+ 4 - 4
python/exec/zgr2_slider.py

@@ -130,10 +130,10 @@ fig, ax = plt.subplots()
 plt.subplots_adjust(left=0.05, bottom=0.25)
 axcolor = 'lightgoldenrodyellow'
 
-axkth   = plt.axes([0.25, 0.16, 0.65, 0.02], axisbg=axcolor)
-axacr   = plt.axes([0.25, 0.12, 0.65, 0.02], axisbg=axcolor)
-axkth2  = plt.axes([0.25, 0.08, 0.65, 0.02], axisbg=axcolor)
-axacr2  = plt.axes([0.25, 0.04, 0.65, 0.02], axisbg=axcolor)
+axkth   = plt.axes([0.25, 0.16, 0.65, 0.02], facecolor=axcolor)
+axacr   = plt.axes([0.25, 0.12, 0.65, 0.02], facecolor=axcolor)
+axkth2  = plt.axes([0.25, 0.08, 0.65, 0.02], facecolor=axcolor)
+axacr2  = plt.axes([0.25, 0.04, 0.65, 0.02], facecolor=axcolor)
 
 skth2   = Slider(axkth2, 'kth2', 0.1, 400.0, valinit=zkth2)
 skth    = Slider(axkth , 'kth ', 0.1, 400.0, valinit=zkth )

+ 20 - 12
python/modules/barakuda_colmap.py

@@ -11,7 +11,7 @@ import numpy as nmp
 
 # List of Barakuda home-made colormaps:
 list_barakuda = [ 'blk', 'land', 'land_dark', 'terre', 'cb1', 'eke', 'bathy', 'mld', 'tap1', 'tap2', 'jetblanc', 'amoc',
-                  'sst1', 'sst2', 'sst3', 'ice', 'blanc', 'rms',
+                  'sst1', 'sst2', 'sst3', 'ice', 'ice_on', 'blanc', 'rms',
                   'sigtr', 'bbr', 'bbr2', 'bbr0', 'bbr_cold', 'bbr_warm',
                   'cold0', 'warm0', 'graylb', 'graylb2', 'sigma', 'sigma0', 'mask', 'on0', 'on1', 'on2', 'on3' ]
 
@@ -19,21 +19,21 @@ list_barakuda = [ 'blk', 'land', 'land_dark', 'terre', 'cb1', 'eke', 'bathy', 'm
 
 l_debug = False
 
-def chose_colmap( cname, log_ctrl=0 ):
+def chose_colmap( cname, log_ctrl=0, exp_ctrl=0 ):
 
     # 1st is it a ncview colormap ?
     if cname[:7] == 'ncview_':
         M = ncview_cmap_to_array( cname )
-        ColorMap = __build_colormap__(M, log_ctrl=log_ctrl)
+        ColorMap = __build_colormap__(M, log_ctrl=log_ctrl, exp_ctrl=exp_ctrl)
 
         # Maybe a barakuda colormap ?
     elif cname in list_barakuda or ( cname[-2:] == '_r' and cname[:-2] in list_barakuda):
         if l_debug: print '\n *** Getting Barakuda colormap "'+cname+'" !'
         x = brkd_cmap(cname)
-        ColorMap = x.clrmp(log_ctrl=log_ctrl)
+        ColorMap = x.clrmp(log_ctrl=log_ctrl, exp_ctrl=exp_ctrl)
     else:
         # Then it must be a Matplotlib colormap:
-        if log_ctrl > 0: print 'WARNING: cannot use LOG colormap with Matplotlib colormaps...'
+        if log_ctrl or exp_ctrl > 0: print 'WARNING: cannot use LOG or EXP colormap with Matplotlib colormaps...'
         from matplotlib.pylab import cm
         import matplotlib.pyplot as mp
         list = mp.colormaps()
@@ -123,7 +123,7 @@ def ncview_cmap_to_array( cname ):
 # ===== local ======
 
 
-def __build_colormap__(MC, log_ctrl=0):
+def __build_colormap__(MC, log_ctrl=0, exp_ctrl=0):
 
     import matplotlib.colors as mplc
 
@@ -134,6 +134,7 @@ def __build_colormap__(MC, log_ctrl=0):
     for i in range(nc): x.append(255.*float(i)/((nc-1)*255.0))
     x = nmp.array(x)
     if log_ctrl > 0: x = nmp.log(x + log_ctrl)
+    if exp_ctrl > 0: x = nmp.exp(x * exp_ctrl)
     rr = x[nc-1] ; x  = x/rr
 
     y =nmp.zeros(nc)
@@ -165,7 +166,7 @@ class brkd_cmap:
     def __init__(self, name):
         self.name = name
 
-    def clrmp(self, log_ctrl=0):
+    def clrmp(self, log_ctrl=0, exp_ctrl=0):
 
         cname = self.name
 
@@ -220,9 +221,9 @@ class brkd_cmap:
 
         elif cname == 'on3':
             M = nmp.array( [
-                [ 0.,0.,0. ],               # noir
-                [ 0.,138./255.,184./255. ], # bleu
-                [ 1.,1.,1. ],               # blanc
+                [ 0.,0.,0. ],               # noir                
+                [ 0.,138./255.,184./255. ], # bleu                
+                [ 1.,1.,1. ],               # blanc                
                 [ 1.,237./255.,0 ],         # jaune
             ] )
 
@@ -356,6 +357,13 @@ class brkd_cmap:
                 [ 1.0 , 1.0 , 1.0 ]  # white
             ] )
 
+        elif cname == 'ice_on':
+            M = nmp.array( [
+                [ 0.,0.,0. ],        # noir   (has to match with coldest color of "on3" !
+                [ 25./255. , 102./255. , 114./255. ],
+                [ 1.0 , 1.0 , 1.0 ]  # white
+            ] )
+            #                [ 0.6 , 0.6 , 0.8 ], # light grey
         elif cname == 'blanc':
             M = nmp.array( [
                 [ 1.0 , 1.0 , 1.0 ],  # white
@@ -505,9 +513,9 @@ class brkd_cmap:
 
         if lrev:
             # reverse colormap:
-            my_cmap = __build_colormap__(M[::-1,:], log_ctrl=log_ctrl)
+            my_cmap = __build_colormap__(M[::-1,:], log_ctrl=log_ctrl, exp_ctrl=exp_ctrl)
         else:
-            my_cmap = __build_colormap__(M, log_ctrl=log_ctrl)
+            my_cmap = __build_colormap__(M, log_ctrl=log_ctrl, exp_ctrl=exp_ctrl)
 
         return my_cmap
 

+ 80 - 53
python/modules/barakuda_ncio.py

@@ -30,7 +30,7 @@ def wrt_1d_series(vt, vd, cvar, cinfo,
             print 'ERROR: wrt_1d_series.barakuda_ncio => smoothing with nsmooth='+str(nsmooth)+' not supported!'; sys.exit(0)
 
 
-    f_o = Dataset(cf_o, 'w', format='NETCDF3_CLASSIC')
+    f_o = Dataset(cf_o, 'w', format='NETCDF4')
 
     nt = len(vt)
     if len(vd) != nt:  print 'ERROR: wrt_1d_series.barakuda_ncio => data & time have different lengths!'; sys.exit(0)
@@ -43,19 +43,19 @@ def wrt_1d_series(vt, vd, cvar, cinfo,
 
 
     f_o.createDimension('time', None)
-    id_t = f_o.createVariable('time','f4',('time',)) ;  id_t.units = cu_t
+    id_t = f_o.createVariable('time','f4',('time',), zlib=True, complevel=5) ;  id_t.units = cu_t
 
-    id_d = f_o.createVariable(cvar,'f4',('time',))
+    id_d = f_o.createVariable(cvar,'f4',('time',), zlib=True, complevel=5)
     id_d.units = cu_d ;  id_d.long_name = cln_d
 
-    if l_do_v2: id_d2 = f_o.createVariable(cvar2,'f4',('time',)); id_d2.units = cu_d; id_d2.long_name = cln_d2
-    if l_do_v3: id_d3 = f_o.createVariable(cvar3,'f4',('time',)); id_d3.units = cu_d; id_d3.long_name = cln_d3
-    if l_do_v4: id_d4 = f_o.createVariable(cvar4,'f4',('time',)); id_d4.units = cu_d; id_d4.long_name = cln_d4
-    if l_do_v5: id_d5 = f_o.createVariable(cvar5,'f4',('time',)); id_d5.units = cu_d; id_d5.long_name = cln_d5
+    if l_do_v2: id_d2 = f_o.createVariable(cvar2,'f4',('time',), zlib=True, complevel=5); id_d2.units = cu_d; id_d2.long_name = cln_d2
+    if l_do_v3: id_d3 = f_o.createVariable(cvar3,'f4',('time',), zlib=True, complevel=5); id_d3.units = cu_d; id_d3.long_name = cln_d3
+    if l_do_v4: id_d4 = f_o.createVariable(cvar4,'f4',('time',), zlib=True, complevel=5); id_d4.units = cu_d; id_d4.long_name = cln_d4
+    if l_do_v5: id_d5 = f_o.createVariable(cvar5,'f4',('time',), zlib=True, complevel=5); id_d5.units = cu_d; id_d5.long_name = cln_d5
 
 
     if lsmooth:
-        id_sm = f_o.createVariable(cvar+'_'+str(nsmooth)+'yrm','f4',('time',))
+        id_sm = f_o.createVariable(cvar+'_'+str(nsmooth)+'yrm','f4',('time',), zlib=True, complevel=5)
         id_sm.units = cu_d ;  id_sm.long_name = str(nsmooth)+'-year running mean of '+cln_d
 
 
@@ -160,60 +160,60 @@ def wrt_appnd_1d_series(vt, vd, cf, cvar1,  cu_t='year', cu_d='unknown', cln_d='
 
 
     if l_nc_is_new:
-        f_out = Dataset(cf, 'w', format='NETCDF3_CLASSIC')
+        f_out = Dataset(cf, 'w', format='NETCDF4')
     else:
-        f_out = Dataset(cf, 'a', format='NETCDF3_CLASSIC')
+        f_out = Dataset(cf, 'a', format='NETCDF4')
 
     if l_nc_is_new:
         jrec2write = 0
         f_out.createDimension('time', None)
-        id_t = f_out.createVariable('time','f4',('time',)) ;      id_t.units = 'year'
+        id_t = f_out.createVariable('time','f4',('time',), zlib=True, complevel=5) ;      id_t.units = 'year'
 
-        id_v01   = f_out.createVariable(cvar1 ,'f4',('time',))
+        id_v01   = f_out.createVariable(cvar1 ,'f4',('time',), zlib=True, complevel=5)
         id_v01.units     = cu_d
         id_v01.long_name = cln_d
         if l_do_v2:
-            id_v02   = f_out.createVariable(cvar2 ,'f4',('time',))
+            id_v02   = f_out.createVariable(cvar2 ,'f4',('time',), zlib=True, complevel=5)
             id_v02.units = cu_d
             if not cun2=='unknown': id_v02.units = cun2
             id_v02.long_name = cln_d2
         if l_do_v3:
-            id_v03   = f_out.createVariable(cvar3 ,'f4',('time',))
+            id_v03   = f_out.createVariable(cvar3 ,'f4',('time',), zlib=True, complevel=5)
             id_v03.units     = cu_d
             if not cun3=='unknown': id_v03.units = cun3
             id_v03.long_name = cln_d3
         if l_do_v4:
-            id_v04   = f_out.createVariable(cvar4 ,'f4',('time',))
+            id_v04   = f_out.createVariable(cvar4 ,'f4',('time',), zlib=True, complevel=5)
             id_v04.units     = cu_d
             if not cun4=='unknown': id_v04.units = cun4
             id_v04.long_name = cln_d4
         if l_do_v5:
-            id_v05   = f_out.createVariable(cvar5 ,'f4',('time',))
+            id_v05   = f_out.createVariable(cvar5 ,'f4',('time',), zlib=True, complevel=5)
             id_v05.units = cu_d
             if not cun5=='unknown': id_v05.units = cun5
             id_v05.long_name = cln_d5
         if l_do_v6:
-            id_v06   = f_out.createVariable(cvar6 ,'f4',('time',))
+            id_v06   = f_out.createVariable(cvar6 ,'f4',('time',), zlib=True, complevel=5)
             id_v06.units     = cu_d
             if not cun6=='unknown': id_v06.units = cun6
             id_v06.long_name = cln_d6
         if l_do_v7:
-            id_v07   = f_out.createVariable(cvar7 ,'f4',('time',))
+            id_v07   = f_out.createVariable(cvar7 ,'f4',('time',), zlib=True, complevel=5)
             id_v07.units     = cu_d
             if not cun7=='unknown': id_v07.units = cun7
             id_v07.long_name = cln_d7
         if l_do_v8:
-            id_v08   = f_out.createVariable(cvar8 ,'f4',('time',))
+            id_v08   = f_out.createVariable(cvar8 ,'f4',('time',), zlib=True, complevel=5)
             id_v08.units = cu_d
             if not cun8=='unknown': id_v08.units = cun8
             id_v08.long_name = cln_d8
         if l_do_v9:
-            id_v09   = f_out.createVariable(cvar9 ,'f4',('time',))
+            id_v09   = f_out.createVariable(cvar9 ,'f4',('time',), zlib=True, complevel=5)
             id_v09.units     = cu_d
             if not cun9=='unknown': id_v09.units = cun9
             id_v09.long_name = cln_d9
         if l_do_v10:
-            id_v10   = f_out.createVariable(cvar10 ,'f4',('time',))
+            id_v10   = f_out.createVariable(cvar10 ,'f4',('time',), zlib=True, complevel=5)
             id_v10.units     = cu_d
             if not cun7=='unknown': id_v10.units = cun10
             id_v10.long_name = cln_d10
@@ -306,9 +306,9 @@ def wrt_appnd_2dt_series(vx, vy, vt, xd, cf, cvar1, missing_val=-9999.,
 
 
     if l_nc_is_new:
-        f_out = Dataset(cf, 'w', format='NETCDF3_CLASSIC')
+        f_out = Dataset(cf, 'w', format='NETCDF4')
     else:
-        f_out = Dataset(cf, 'a', format='NETCDF3_CLASSIC')
+        f_out = Dataset(cf, 'a', format='NETCDF4')
 
     if l_nc_is_new:
         jrec2write = 0
@@ -317,58 +317,58 @@ def wrt_appnd_2dt_series(vx, vy, vt, xd, cf, cvar1, missing_val=-9999.,
         f_out.createDimension(cydnm , ny)
         f_out.createDimension(cxdnm , nx)
 
-        id_t   = f_out.createVariable('time','f4',('time',)) ; id_t.units = 'year'
-        id_lat = f_out.createVariable(cyvnm ,'f4',(cydnm,))
-        id_lon = f_out.createVariable(cxvnm ,'f4',(cxdnm,))
+        id_t   = f_out.createVariable('time','f4',('time',), zlib=True, complevel=5) ; id_t.units = 'year'
+        id_lat = f_out.createVariable(cyvnm ,'f4',(cydnm,), zlib=True, complevel=5)
+        id_lon = f_out.createVariable(cxvnm ,'f4',(cxdnm,), zlib=True, complevel=5)
 
         id_lat[:] = vy[:]
         id_lon[:] = vx[:]
         
-        id_x01   = f_out.createVariable(cvar1 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+        id_x01   = f_out.createVariable(cvar1 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
         id_x01.units     = cu_d
         id_x01.long_name = cln_d
         if l_do_v2:
-            id_x02   = f_out.createVariable(cvar2 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x02   = f_out.createVariable(cvar2 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x02.units = cu_d
             if not cun2=='unknown': id_x02.units = cun2
             id_x02.long_name = cln_d2
         if l_do_v3:
-            id_x03   = f_out.createVariable(cvar3 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x03   = f_out.createVariable(cvar3 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x03.units     = cu_d
             if not cun3=='unknown': id_x03.units = cun3
             id_x03.long_name = cln_d3
         if l_do_v4:
-            id_x04   = f_out.createVariable(cvar4 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x04   = f_out.createVariable(cvar4 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x04.units     = cu_d
             if not cun4=='unknown': id_x04.units = cun4
             id_x04.long_name = cln_d4
         if l_do_v5:
-            id_x05   = f_out.createVariable(cvar5 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x05   = f_out.createVariable(cvar5 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x05.units = cu_d
             if not cun5=='unknown': id_x05.units = cun5
             id_x05.long_name = cln_d5
         if l_do_v6:
-            id_x06   = f_out.createVariable(cvar6 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x06   = f_out.createVariable(cvar6 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x06.units     = cu_d
             if not cun6=='unknown': id_x06.units = cun6
             id_x06.long_name = cln_d6
         if l_do_v7:
-            id_x07   = f_out.createVariable(cvar7 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x07   = f_out.createVariable(cvar7 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x07.units     = cu_d
             if not cun7=='unknown': id_x07.units = cun7
             id_x07.long_name = cln_d7
         if l_do_v8:
-            id_x08   = f_out.createVariable(cvar8 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x08   = f_out.createVariable(cvar8 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x08.units = cu_d
             if not cun8=='unknown': id_x08.units = cun8
             id_x08.long_name = cln_d8
         if l_do_v9:
-            id_x09   = f_out.createVariable(cvar9 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x09   = f_out.createVariable(cvar9 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x09.units     = cu_d
             if not cun9=='unknown': id_x09.units = cun9
             id_x09.long_name = cln_d9
         if l_do_v10:
-            id_x10   = f_out.createVariable(cvar10 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val)
+            id_x10   = f_out.createVariable(cvar10 ,'f4',('time',cydnm,cxdnm,), fill_value=missing_val, zlib=True, complevel=5)
             id_x10.units     = cu_d
             if not cun7=='unknown': id_x10.units = cun10
             id_x10.long_name = cln_d10
@@ -426,7 +426,7 @@ def write_2d_mask(cf_out, MSK, xlon=[], xlat=[], name='mask'):
 
     (nj,ni) = nmp.shape(MSK)
 
-    f_out = Dataset(cf_out, 'w', format='NETCDF3_CLASSIC')
+    f_out = Dataset(cf_out, 'w', format='NETCDF4')
 
     # Dimensions:
     f_out.createDimension('x', ni)
@@ -434,12 +434,12 @@ def write_2d_mask(cf_out, MSK, xlon=[], xlat=[], name='mask'):
 
     if (xlon != []) and (xlat != []):
         if (xlon.shape == (nj,ni)) and (xlon.shape == xlat.shape):
-            id_lon  = f_out.createVariable('nav_lon' ,'f4',('y','x',))
-            id_lat  = f_out.createVariable('nav_lat' ,'f4',('y','x',))
+            id_lon  = f_out.createVariable('nav_lon' ,'f4',('y','x',), zlib=True, complevel=5)
+            id_lat  = f_out.createVariable('nav_lat' ,'f4',('y','x',), zlib=True, complevel=5)
             id_lon[:,:] = xlon[:,:]
             id_lat[:,:] = xlat[:,:]
         
-    id_msk  = f_out.createVariable(name ,'i1',('y','x',))
+    id_msk  = f_out.createVariable(name ,'i1',('y','x',), zlib=True, complevel=5)
     id_msk[:,:] = MSK[:,:]
 
     f_out.about = 'Diagnostics created with BaraKuda (https://github.com/brodeau/barakuda)'
@@ -447,11 +447,11 @@ def write_2d_mask(cf_out, MSK, xlon=[], xlat=[], name='mask'):
 
     return
 
-def dump_2d_field(cf_out, XFLD, xlon=[], xlat=[], name='field'):
+def dump_2d_field( cf_out, XFLD, xlon=[], xlat=[], name='field', unit='', long_name='', mask=[] ):
 
     (nj,ni) = nmp.shape(XFLD)
 
-    f_out = Dataset(cf_out, 'w', format='NETCDF3_CLASSIC')
+    f_out = Dataset(cf_out, 'w', format='NETCDF4')
 
     # Dimensions:
     f_out.createDimension('y', nj)
@@ -459,12 +459,19 @@ def dump_2d_field(cf_out, XFLD, xlon=[], xlat=[], name='field'):
 
     if (xlon != []) and (xlat != []):
         if (xlon.shape == (nj,ni)) and (xlon.shape == xlat.shape):
-            id_lon  = f_out.createVariable('nav_lon' ,'f4',('y','x',))
-            id_lat  = f_out.createVariable('nav_lat' ,'f4',('y','x',))
+            id_lon  = f_out.createVariable('nav_lon' ,'f4',('y','x',), zlib=True, complevel=5)
+            id_lat  = f_out.createVariable('nav_lat' ,'f4',('y','x',), zlib=True, complevel=5)
             id_lon[:,:] = xlon[:,:]
             id_lat[:,:] = xlat[:,:]
         
-    id_fld  = f_out.createVariable(name ,'f4',('y','x',))
+    id_fld  = f_out.createVariable(name ,'f4',('y','x',), zlib=True, complevel=5)
+    if long_name != '': id_fld.long_name = long_name
+    if unit      != '': id_fld.units     = unit
+
+    if mask != []:
+        idx_land = nmp.where( mask < 0.5)
+        XFLD[idx_land] = nmp.nan
+    
     id_fld[:,:] = XFLD[:,:]
 
     f_out.about = 'Diagnostics created with BaraKuda (https://github.com/brodeau/barakuda)'
@@ -500,19 +507,39 @@ def dump_2d_multi_field(cf_out, XFLD, vnames, vndim=[], xlon=[], xlat=[], vtime=
     nf = len(vnames)
     if nbfld != nf: print 'ERROR (dump_2d_multi_field): list of names and main array dont agree in shape!'; sys.exit(0)
 
-    f_out = Dataset(cf_out, 'w', format='NETCDF3_CLASSIC')
+    f_out = Dataset(cf_out, 'w', format='NETCDF4')
 
+
+    l_coord_2d = False
+    cnm_dim_x = 'lon'
+    cnm_dim_y = 'lat'
+    
+    if (xlon != []) and (xlat != []):
+        if (xlon.shape == (nj,ni)) and (xlon.shape == xlat.shape):
+            l_coord_2d = True
+            cnm_dim_x = 'x'
+            cnm_dim_y = 'y'    
+    
     # Dimensions:
-    f_out.createDimension('x', ni)
-    f_out.createDimension('y', nj)
+    
+    f_out.createDimension(cnm_dim_x, ni)
+    f_out.createDimension(cnm_dim_y, nj)
     if l_add_time: f_out.createDimension('time_counter', None)
 
     if (xlon != []) and (xlat != []):
-        if (xlon.shape == (nj,ni)) and (xlon.shape == xlat.shape):
-            id_lon  = f_out.createVariable('nav_lon' ,'f4',('y','x',))
-            id_lat  = f_out.createVariable('nav_lat' ,'f4',('y','x',))
+        if l_coord_2d:
+            id_lon  = f_out.createVariable('nav_lon' ,'f4',(cnm_dim_y,cnm_dim_x,), zlib=True, complevel=5)
+            id_lat  = f_out.createVariable('nav_lat' ,'f4',(cnm_dim_y,cnm_dim_x,), zlib=True, complevel=5)
             id_lon[:,:] = xlon[:,:]
             id_lat[:,:] = xlat[:,:]
+        else:
+            id_lon  = f_out.createVariable(cnm_dim_x ,'f4',(cnm_dim_x,), zlib=True, complevel=5)
+            id_lat  = f_out.createVariable(cnm_dim_y ,'f4',(cnm_dim_y,), zlib=True, complevel=5)
+            id_lon[:] = xlon[:]
+            id_lat[:] = xlat[:]
+
+
+        
     if l_add_time:
         id_tim    = f_out.createVariable('time_counter' ,'f8',('time_counter',))
         id_tim[:] = vtime[:]
@@ -520,13 +547,13 @@ def dump_2d_multi_field(cf_out, XFLD, vnames, vndim=[], xlon=[], xlat=[], vtime=
     #id_fld = nmp.zeros(nbfld, dtype=int)
     for jv in range(nbfld):
         if (not l_add_time) or (vnbdim[jv]==2):
-            id_fld  = f_out.createVariable(vnames[jv] ,'f8',('y','x',))
+            id_fld  = f_out.createVariable(vnames[jv] ,'f8',(cnm_dim_y,cnm_dim_x,), zlib=True, complevel=5)
             if l_add_time:
                 id_fld[:,:] = XFLD[jv,0,:,:]
             else:
                 id_fld[:,:] = XFLD[jv,:,:]
         else:
-            id_fld  = f_out.createVariable(vnames[jv] ,'f8',('time_counter','y','x',))
+            id_fld  = f_out.createVariable(vnames[jv] ,'f8',('time_counter',cnm_dim_y,cnm_dim_x,), zlib=True, complevel=5)
             id_fld[:,:,:] = XFLD[jv,:,:,:]
             
     f_out.about = 'Diagnostics created with BaraKuda (https://github.com/brodeau/barakuda)'

+ 12 - 12
python/modules/barakuda_plot.py

@@ -132,7 +132,7 @@ class plot :
             XF = nmp.ma.masked_where(XMSK == 0, XF) ; # Masking where mask is zero!
 
         fig = plt.figure(num = 1, figsize=(WDTH_DEF , RAT_XY*5.), dpi=None, facecolor='w', edgecolor='k')
-        ax  = plt.axes([0.07, 0.06, 0.98, 0.88], axisbg=cbgcol)
+        ax  = plt.axes([0.07, 0.06, 0.98, 0.88], facecolor=cbgcol)
         vc  = __vcontour__(rmin, rmax, dc)
 
         # Colormap:
@@ -236,7 +236,7 @@ class plot :
         # FIGURE
         # ~~~~~~
         fig = plt.figure(num = 1, figsize=fig_size, dpi=None, facecolor='w', edgecolor='k')
-        ax  = plt.axes([0.05, 0.06, 1., 0.86], axisbg = '0.5')
+        ax  = plt.axes([0.05, 0.06, 1., 0.86], facecolor = '0.5')
 
         vc = __vcontour__(rmin, rmax, dc)
 
@@ -357,14 +357,14 @@ class plot :
             # Horizontal colorbar!
             if ctitle == '':
                 fig = plt.figure(num = 1, figsize=(12.4,7.*rat_vert), dpi=None, facecolor='w', edgecolor='k')
-                ax = plt.axes([0.05, -0.01, 0.93, 1.], axisbg = 'white')
+                ax = plt.axes([0.05, -0.01, 0.93, 1.], facecolor = 'white')
             else:
                 fig = plt.figure(num = 1, figsize=(12.4,7.4*rat_vert), dpi=None, facecolor='w', edgecolor='k')
-                ax = plt.axes([0.05, -0.01, 0.93, 0.96], axisbg = 'white')
+                ax = plt.axes([0.05, -0.01, 0.93, 0.96], facecolor = 'white')
         else:
             # Vertical colorbar!
             fig = plt.figure(num = 1, figsize=(12.4,6.*rat_vert), dpi=None, facecolor='w', edgecolor='k')
-            ax = plt.axes([0.046, 0.06, 1.02, 0.88], axisbg = 'white')
+            ax = plt.axes([0.046, 0.06, 1.02, 0.88], facecolor = 'white')
 
         vc = __vcontour__(rmin, rmax, dc)
 
@@ -457,7 +457,7 @@ class plot :
         # ~~~~~~
         fig = plt.figure(num = 1, figsize=(7.,6.*vert_rat), dpi=None, facecolor='w', edgecolor='k')
 
-        ax = plt.axes([0.07, 0.05, 0.9, 0.9], axisbg = 'gray')
+        ax = plt.axes([0.07, 0.05, 0.9, 0.9], facecolor = 'gray')
 
         vc = __vcontour__(rmin, rmax, dc); #print vc, '\n'
 
@@ -643,7 +643,7 @@ class plot :
 
 
         fig = plt.figure(num = 1, figsize=(vfig_size), dpi=None, facecolor='w', edgecolor='k')
-        ax  = plt.axes(vsporg, axisbg = 'w')
+        ax  = plt.axes(vsporg, facecolor = 'w')
 
 
         ## Colmap:
@@ -758,7 +758,7 @@ class plot :
         # ~~~~~~
         fig = plt.figure(num = 1, figsize=(7.,6.*vert_rat), dpi=None, facecolor='w', edgecolor='k')
 
-        ax = plt.axes([0.07, 0.05, 0.9, 0.9], axisbg = 'gray')
+        ax = plt.axes([0.07, 0.05, 0.9, 0.9], facecolor = 'gray')
 
         vc = __vcontour__(rmin, rmax, dc); #print vc, '\n'
 
@@ -834,7 +834,7 @@ class plot :
             font_ttl, font_xylb, font_clb, font_inf = __font_unity__(fig_dpi=DPI_DEF)
     
             fig = plt.figure(num = 1, figsize=(WDTH_DEF , RAT_XY*6.), dpi=None, facecolor='w', edgecolor='k') ; #trsp_sig_class
-            ax = plt.axes([0.075,  -0.025, 0.9, 0.98], axisbg = 'w')
+            ax = plt.axes([0.075,  -0.025, 0.9, 0.98], facecolor = 'w')
     
             vc = __vcontour__(rmin, rmax, dc)
     
@@ -896,7 +896,7 @@ class plot :
 
 
         fig = plt.figure(num = 1, figsize=fig_size, dpi=None, facecolor='w', edgecolor='k')
-        ax = plt.axes([0.1,  0.065,   0.92,       0.89], axisbg = 'gray')
+        ax = plt.axes([0.1,  0.065,   0.92,       0.89], facecolor = 'gray')
         vc = __vcontour__(rmin, rmax, dc)
 
         # Colmap:
@@ -955,7 +955,7 @@ class plot :
         XF = nmp.ma.masked_where(XMSK == 0, XF)
 
         fig = plt.figure(num = 1, figsize=(WDTH_DEF , RAT_XY*5.), dpi=None, facecolor='w', edgecolor='k')
-        ax  = plt.axes(vax, axisbg='gray')
+        ax  = plt.axes(vax, facecolor='gray')
         vc  = __vcontour__(rmin, rmax, dc)
 
         # Colormap:
@@ -1509,7 +1509,7 @@ class plot :
         [t.set_color('0.3') for t in ax2.xaxis.get_ticklabels()]
         plt.xlabel('Wave-number [cy/km]', color='0.3')
         #
-        if cinfo != '': ax2.annotate(cinfo, xy=(0.08, 0.08), xycoords='axes fraction',  bbox={'facecolor':clr_inf_box, 'alpha':1., 'pad':10}, zorder=100, **font_inf)
+        if cinfo != '': ax2.annotate(cinfo, xy=(0.08, 0.24), xycoords='axes fraction',  bbox={'facecolor':clr_inf_box, 'alpha':1., 'pad':10}, zorder=100, **font_inf)
         #
         if logo_on:
             fon = { 'fontname':'Arial', 'fontweight':'normal', 'fontsize':10 }

+ 1 - 1
python/modules/barakuda_plot_extra.py

@@ -149,7 +149,7 @@ def plot_nproj_extra(czone, rmin, rmax, dc, xlon, xlat, XF, XI,
 
         
     fig = plt.figure(num = 1, figsize=(vfig_size), dpi=None, facecolor='w', edgecolor='k')
-    ax  = plt.axes(vsporg, axisbg = 'w')
+    ax  = plt.axes(vsporg, facecolor = 'w')
 
 
     ## Colmap:

+ 40 - 207
python/modules/barakuda_thermo.py

@@ -11,7 +11,12 @@ grav  = 9.8          # gravity
 Rgas  = 287.04     
 Patm  = 101000.    
 ctv   = 0.608        # for virtual temperature
-eps   = 0.62197      # humidity constant
+
+
+R_dry = 287.05       # Specific gas constant for dry air              [J/K/kg]
+R_vap = 461.495      # Specific gas constant for water vapor          [J/K/kg]
+reps0 = R_dry/R_vap  # ratio of gas constant for dry air and water vapor => ~ 0.622
+
 cte   = 0.622     
 kappa = 0.4          # Von Karman's constant
 Cp    = 1000.5    
@@ -20,29 +25,16 @@ eps_w = 0.987        # emissivity of water
 sigma = 5.67E-8      # Stefan Boltzman constamt
 alfa  = 0.066        # Surface albedo over ocean
 
+rtt0 = 273.16     # triple point of temperature    [K]
 
 sensit = 0.1
 
-
-
 def Lvap(zsst):
     #
     # INPUT  : zsst => water temperature in [K]
     # OUTPUT : Lvap => Latent Heat of Vaporization [J/Kg]
     return ( 2.501 - 0.00237*(zsst - rt0) )*1.E6
 
-def e_sat(rt):
-    # vapour pressure at saturation  [Pa]
-    # rt      ! temperature (K)
-    zrtrt0 = rt/rt0
-
-    return 100*( nmp.power(10.,(10.79574*(1. - rt0/rt) - 5.028*nmp.log10(zrtrt0)     
-                 + 1.50475*0.0001*(1. - nmp.power(10.,(-8.2969*(zrtrt0 - 1.))) )
-                 + 0.42873*0.001 *(nmp.power(10.,(4.76955*(1. - rt0/rt))) - 1.) + 0.78614 ) ) )
-
-
-
-
 def e_air(q_air, zslp):
     #
     #--------------------------------------------------------------------
@@ -54,11 +46,11 @@ def e_air(q_air, zslp):
     #
 
     diff  = 1.E8
-    e_old = q_air*zslp/eps
+    e_old = q_air*zslp/reps0
 
     while diff > 1.:
         #print "Again... diff = ", diff
-        ee = q_air/eps*(zslp - (1. - eps)*e_old)
+        ee = q_air/reps0*(zslp - (1. - reps0)*e_old)
         diff  = nmp.sum(abs( ee - e_old ))
         e_old = ee
 
@@ -68,204 +60,45 @@ def e_air(q_air, zslp):
 
 
 
+### Update: June 2019, LB:
 
-#def rh_air(q_air, t_air, zslp)
-#    #
-#    REAL             ::    rh_air      #: relative humidity             [%]
-#    REAL, INTENT(in) ::          &
-#         &                 q_air,   &  #: specific humidity of air      [kg/kg]
-#         &                 t_air,   &  #: air temperature               [K]
-#         &                 zslp        #: atmospheric pressure          [Pa]
-#    #
-#    REAL             :: ea, es
-#    #
-#    #
-#    ea = e_air(q_air, zslp)
-#    es = e_sat(t_air)
-#    #
-#    rh_air = ea/es
-#    #
-#  END FUNCTION rh_air
-
-
-  
-#def q_air_rh(rha, ta, zslp)
-#    # Specific humidity from RH 
-#    REAL, DIMENSION(ni,nj) :: q_air_rh
-#    INTEGER, INTENT(in)    :: ni, nj
-#    
-#    REAL, DIMENSION(ni,nj), INTENT(in) :: &
-#         &     rha,     &   !: relative humidity      [fraction, not %#!]
-#         &     ta,      &   !: air temperature        [K]
-#         &     zslp         !: atmospheric pressure          [Pa]
-#    
-#    REAL, DIMENSION(ni,nj) :: ea
-#    
-#    ea = rha*e_sat(ni,nj, ta)
-##    
- #   q_air_rh = ea*eps/(zslp - (1. - eps)*ea)
-
-#  END FUNCTION q_air_rh
-
-
-
-
-#def q_air_dp(da, zslp)
-    #
-    # Air specific humidity from dew point temperature
-    #
-#    INTEGER, INTENT(in) :: ni, nj
-#    REAL, DIMENSION(ni,nj) :: q_air_dp  !: kg/kg
-    #
-#    REAL, DIMENSION(ni,nj), INTENT(in) :: &
-#         &     da,     &    !: dew-point temperature   [K]
-#         &     zslp         !: atmospheric pressure    [Pa]
-    #
-#    q_air_dp = e_sat(da)*eps/(zslp - (1. - eps)*e_sat(da))
-    #
-
-
-  #
-  #
-  #
-  # Humidity :
-  # ----------
-  #            - ea is the water vapour pressure  (h.Pa)
-  #            - qa is the specific hymidity      (g/kg)
-  #              rqa = rqa/1000.     ! puts specific humidity in kg/kg instead of g/kg    
-  #             rea = (rqa*rpa)/(0.378*rqa + cte)
-  #
-  #    Virtual temperature :
-  #
-  # 
-  # Tv = T*(1 + 0.608*q)    
-  #
-  # eps = 0.622        --> 0.608 = (1 - eps) / eps
-  #
-  
-
-
-  
-def rho_air(zt, zq, zP):
-    #
-    #INTEGER, INTENT(in)    :: ni, nj
-    #REAL, DIMENSION(ni,nj) ::   rho_air      !: density of air [kg/m^3] 
-    #REAL, DIMENSION(ni,nj), INTENT(in) ::  &
-    #     &      zt,       &     !: air temperature in (K)
-    #     &      zq,       &     !: air spec. hum. (kg/kg)
-    #     &      zP              !: pressure in       (Pa)
-    #
-    rho_air = zP/(Rgas*zt*(1. + ctv*zq))
-    return rho_air
-
-
-
-
-def q_sat(zsst, zslp):
-    #
-    #REAL, DIMENSION(ni,nj) :: q_sat
-    #INTEGER, INTENT(in) :: ni, nj
-    #REAL, DIMENSION(ni,nj), INTENT(in) ::  &
-    #     &                  zsst,  &   !: sea surface temperature         [K]  
-    #     &                  zslp       !: sea level atmospheric pressure  [Pa]
+def e_sat(rT):
     #
+    #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    # rT:     air temperature [K]
+    # e_sat:  water vapor at saturation [Pa]
     #
-    # Local :
-    # -------
-    #REAL, DIMENSION(ni,nj) ::  &
-    #     &    e_s
+    # Recommended by WMO
     #
+    # Goff, J. A., 1957: Saturation pressure of water on the new kelvin
+    # temperature scale. Transactions of the American society of heating
+    # and ventilating engineers, 347.
+    #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
     #
+    ztmp1 = nmp.zeros(nmp.shape(rT))
+    ztmp2 = nmp.zeros(nmp.shape(rT))
+    ztmp2 = rT/rtt0
+    ztmp1 = 1./ztmp2
     #
-    # Specific humidity at saturation
-    # -------------------------------
+    #e_sat = 100.*( 10.^(10.79574*(1. - ztmp) - 5.028*LOG10(rT/rtt0) \
+    #                    + 1.50475*10.^(-4)*(1. - 10.^(-8.2969*(rT/rtt0 - 1.)) ) \
+    #                    + 0.42873*10.^(-3)*(10.^(4.76955*(1. - ztmp)) - 1.) + 0.78614) )
     #
-    # Vapour pressure at saturation :
-    e_s = 100*(10.^(10.79574*(1-rt0/zsst)-5.028*math.log10(zsst/rt0)  \
-                   + 1.50475*10.^(-4)*(1 - 10.^(-8.2969*(zsst/rt0 - 1)) )    \
-                   + 0.42873*10.^(-3)*(10.^(4.76955*(1 - rt0/zsst)) - 1) + 0.78614) )
+    e_sat = 100.*( nmp.power(10.,(10.79574*(1. - ztmp1) - 5.028*nmp.log10(ztmp2) \
+                   + 1.50475*0.0001*(1. - nmp.power(10.,(-8.2969*(ztmp2 - 1.))) ) \
+                   + 0.42873*0.001 *(nmp.power(10.,(4.76955*(1. - ztmp1))) - 1.) + 0.78614 ) ) )
     #
-    return eps*e_s/(zslp - (1. - eps)*e_s)
-
-
-
-
-
-
-#def e_sat_ice(ni,nj, zrt)
+    del ztmp1, ztmp2
     #
-#    INTEGER, INTENT(in) :: ni, nj
-#    REAL, DIMENSION(ni,nj) :: e_sat_ice !: vapour pressure at saturation in presence of ice [Pa]
-#    REAL, DIMENSION(ni,nj), INTENT(in) :: zrt
-#    #
-#    e_sat_ice = 100.*(10.^( -9.09718*(273.16/zrt - 1.) - 3.56654*math.log10(273.16/zrt) &
-#         &                  + 0.876793*(1. - zrt/273.16) + math.log10(6.1071) ) )
-#    #
-
-
-
-
-
-#def q_sat_simple(zsst)
-#    REAL, DIMENSION(ni,nj) :: q_sat_simple
-#    INTEGER, INTENT(in)    :: ni, nj
-#    REAL, DIMENSION(ni,nj), INTENT(in) ::  &
-#         &                  zsst     !: sea surface temperature         [K]
-#    
-#    q_sat_simple = 640380./1.22 * exp(-5107.4/zsst)
-#    
-
-  
-#def q_sat_simple_with_rho(zsst, zslp)
-#    REAL, DIMENSION(ni,nj) :: q_sat_simple_with_rho
-#    INTEGER, INTENT(in)    :: ni, nj
-#    REAL, DIMENSION(ni,nj), INTENT(in) ::  &
-#         &                  zsst, &     !: sea surface temperature         [K]
-#         &                  zslp        !: sea level atmospheric pressure  [Pa]
-#    REAL, DIMENSION(ni,nj) :: ztmp, ztmp2
-#    ! we need to know specific humidity to get a good estimate of density:
-#    ztmp2 = 0.99 ! RH! air is saturated #!
-#    ztmp  = 0.98 * q_air_rh(ztmp2, zsst, zslp)
-#    
-#    ztmp2 = 0.0
-#    
-#    ztmp2 = rho_air(zsst, ztmp, zslp) ! rho_air
-#
-#    q_sat_simple_with_rho = 640380./ztmp2 * exp(-5107.4/zsst)
-
-
+    return e_sat
 
-
-#def e_sat(rt):
-#    #  Vapour pressure at saturation for a given temperature [Pa]
-#    #
-#    #  * rt      ! temperature (K)
-#    #
-#    rt0 = 273.16
-#    #
-#    e_sat = 100*( 10**(10.79574*(1 - rt0/rt) - 5.028*log10(rt/rt0)
-#                       + 1.50475*10**(-4)*(1 - 10**(-8.2969*(rt/rt0 - 1)) )
-#                       + 0.42873*10**(-3)*(10**(4.76955*(1 - rt0/rt)) - 1) + 0.78614) )
-#    return e_sat
-
-
-
-def qa_e_p(res, rp):
-    #  Specific humidity from pressure and vapour pressure at saturation
-    #
-    #  * res    : vapour pressure at saturation [Pa]
-    #  * rp     : atmospheric pressure [Pa]
-    #
-    reps = 0.62197
-    #
-    qa_e_p = reps*res / ( rp - (1. - reps)*res )
+def q_air_dp(da, slp):
+    #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
+    # Air specific humidity from dew point temperature
+    #     da          !: dew-point temperature   [K]
+    #     slp         !: atmospheric pressure    [Pa]
+    #~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
     #
-    return qa_e_p
-
-
-
-
-
-
-
-
+    es = e_sat(da)
+    q_air_dp = es*reps0/(slp - (1. - reps0)*es)
+    return q_air_dp

+ 43 - 11
python/modules/barakuda_tool.py

@@ -9,6 +9,8 @@ import numpy as nmp
 #    #idx_bad = nmp.where(Lshit)
 
 
+ris2 = 1./nmp.sqrt(2.)
+
 
 def chck4f(cf, script_name=''):
 
@@ -358,8 +360,6 @@ def drown(X, mask, k_ew=-1, nb_max_inc=5, nb_smooth=5):
 
     cmesg = 'ERROR, barakuda_tool.py => drown :'
 
-    rr = 0.707
-
     nbdim = len(nmp.shape(X))
 
     if nbdim > 3 or nbdim <2:
@@ -429,27 +429,27 @@ def drown(X, mask, k_ew=-1, nb_max_inc=5, nb_smooth=5):
 
                 if ji == 0 and k_ew >= 0:
                     Xtemp[jj,0] = 1./(maskv[jj,1]+maskv[jj+1,0]+maskv[jj,ni-1-k_ew]+maskv[jj-1,0]+
-                                   rr*maskv[jj+1,1]+rr*maskv[jj+1,ni-1-k_ew]+rr*maskv[jj-1,ni-1-k_ew]+rr*maskv[jj-1,1])*(
+                                   ris2*maskv[jj+1,1]+ris2*maskv[jj+1,ni-1-k_ew]+ris2*maskv[jj-1,ni-1-k_ew]+ris2*maskv[jj-1,1])*(
                         maskv[jj,1]*dold[jj,1] + maskv[jj+1,0]*dold[jj+1,0] +
                         maskv[jj,ni-1-k_ew]*dold[jj,ni-1-k_ew] + maskv[jj-1,0]*dold[jj-1,0] +
-                        rr*maskv[jj+1,1]*dold[jj+1,1] + rr*maskv[jj+1,ni-1-k_ew]*dold[jj+1,ni-1-k_ew] +
-                        rr*maskv[jj-1,ni-1-k_ew]*dold[jj-1,ni-1-k_ew] + rr*maskv[jj-1,1]*dold[jj-1,1]  )
+                        ris2*maskv[jj+1,1]*dold[jj+1,1] + ris2*maskv[jj+1,ni-1-k_ew]*dold[jj+1,ni-1-k_ew] +
+                        ris2*maskv[jj-1,ni-1-k_ew]*dold[jj-1,ni-1-k_ew] + ris2*maskv[jj-1,1]*dold[jj-1,1]  )
 
                 elif ji == ni-1 and k_ew >= 0:
                     Xtemp[jj,ni-1] = 1./(maskv[jj,k_ew]+maskv[jj+1,ni-1]+maskv[jj,ni-2]+maskv[jj-1,ni-1]+
-                                   rr*maskv[jj+1,k_ew]+rr*maskv[jj+1,ni-2]+rr*maskv[jj-1,ni-2]+rr*maskv[jj-1,k_ew])*(
+                                   ris2*maskv[jj+1,k_ew]+ris2*maskv[jj+1,ni-2]+ris2*maskv[jj-1,ni-2]+ris2*maskv[jj-1,k_ew])*(
                         maskv[jj,k_ew]*dold[jj,k_ew] + maskv[jj+1,ni-1]*dold[jj+1,ni-1] +
                         maskv[jj,ni-2]*dold[jj,ni-2] + maskv[jj-1,ni-1]*dold[jj-1,ni-1] +
-                        rr*maskv[jj+1,k_ew]*dold[jj+1,k_ew] + rr*maskv[jj+1,ni-2]*dold[jj+1,ni-2] +
-                        rr*maskv[jj-1,ni-2]*dold[jj-1,ni-2] + rr*maskv[jj-1,k_ew]*dold[jj-1,k_ew]  )
+                        ris2*maskv[jj+1,k_ew]*dold[jj+1,k_ew] + ris2*maskv[jj+1,ni-2]*dold[jj+1,ni-2] +
+                        ris2*maskv[jj-1,ni-2]*dold[jj-1,ni-2] + ris2*maskv[jj-1,k_ew]*dold[jj-1,k_ew]  )
 
                 else:
                     Xtemp[jj,ji] = 1./(maskv[jj,ji+1]+maskv[jj+1,ji]+maskv[jj,ji-1]+maskv[jj-1,ji]+
-                                   rr*maskv[jj+1,ji+1]+rr*maskv[jj+1,ji-1]+rr*maskv[jj-1,ji-1]+rr*maskv[jj-1,ji+1])*(
+                                   ris2*maskv[jj+1,ji+1]+ris2*maskv[jj+1,ji-1]+ris2*maskv[jj-1,ji-1]+ris2*maskv[jj-1,ji+1])*(
                         maskv[jj,ji+1]*dold[jj,ji+1] + maskv[jj+1,ji]*dold[jj+1,ji] +
                         maskv[jj,ji-1]*dold[jj,ji-1] + maskv[jj-1,ji]*dold[jj-1,ji] +
-                        rr*maskv[jj+1,ji+1]*dold[jj+1,ji+1] + rr*maskv[jj+1,ji-1]*dold[jj+1,ji-1] +
-                        rr*maskv[jj-1,ji-1]*dold[jj-1,ji-1] + rr*maskv[jj-1,ji+1]*dold[jj-1,ji+1]  )
+                        ris2*maskv[jj+1,ji+1]*dold[jj+1,ji+1] + ris2*maskv[jj+1,ji-1]*dold[jj+1,ji-1] +
+                        ris2*maskv[jj-1,ji-1]*dold[jj-1,ji-1] + ris2*maskv[jj-1,ji+1]*dold[jj-1,ji+1]  )
 
                 ic = ic+1
 
@@ -711,3 +711,35 @@ def var_and_signs( csin ):
 
 
 
+def smoother(X, msk, nb_smooth=5):
+
+    ### Do boundaries!!!
+    
+    cmesg = 'ERROR, barakuda_tool.py => smoother :'
+
+    nbdim = len(nmp.shape(X))
+
+    if nbdim != 2:
+        print cmesg+' size of data array is wrong!!!'; sys.exit(0)
+    
+    (nj,ni) = nmp.shape(X)
+
+    xtmp = nmp.zeros((nj,ni))
+
+    for ii in range(nb_smooth):
+
+        xtmp[:,:] = X[:,:]*msk[:,:]
+
+        X[1:-1,1:-1] = 0.35*xtmp[1:-1,1:-1] + ( 0.65*( xtmp[1:-1,2:] + xtmp[2:,1:-1] + xtmp[1:-1,:-2] + xtmp[:-2,1:-1] \
+                                    + ris2*( xtmp[2:,2:]   + xtmp[:-2,2:]  + xtmp[:-2,:-2] + xtmp[2:,:-2]  )  ) ) \
+                                    / nmp.maximum( msk[1:-1,2:] + msk[2:,1:-1] + msk[1:-1,:-2] + msk[:-2,1:-1] \
+                                  + ris2*( msk[2:,2:]   + msk[:-2,2:]  + msk[:-2,:-2] + msk[2:,:-2]  ) \
+                                           , 1.E-6 )
+                           
+        X[:,:] = X[:,:]*msk[:,:]
+
+    del xtmp
+
+    return
+
+

+ 2 - 2
python/test_a_python_diag.sh

@@ -100,10 +100,10 @@ export NEMO_OUT_D=`echo ${NEMO_OUT_STRCT} | sed -e "s|<ORCA>|${ORCA}|g" -e "s|<E
 if [ ! -d ${NEMO_OUT_D} ]; then echo "Unfortunately we could not find ${NEMO_OUT_D}"; exit; fi
 YEAR_INI=1990 ; YEAR_INI_F=1990
 export cyear=`printf "%04d" ${jyear}`
-if [ ${ece_exp} -gt 0 ]; then
+#if [ ${ece_exp} -gt 0 ]; then
     iy=$((${jyear}-${YEAR_INI}+1+${YEAR_INI}-${YEAR_INI_F}))
     dir_ece="`printf "%03d" ${iy}`/"
-fi
+#fi
 CPREF=`echo ${NEMO_FILE_PREFIX} | sed -e "s|<ORCA>|${ORCA}|g" -e "s|<EXP>|${EXP}|g" -e "s|<TSTAMP>|${TSTAMP}|g"`
 
 if [ ${icrosssect} -eq 1 ] || [ ${imean2d} -eq 1 ] || [ ${imov} -eq 1 ]; then

+ 40 - 0
rebuild.sh

@@ -0,0 +1,40 @@
+#!/bin/bash
+
+module purge
+#module load releases/2018b netCDF-Fortran/4.4.4-intel-2018b
+module load netCDF-Fortran/4.5.3-gompi-2020b
+
+exp="NE4_08"
+start_year="1960"
+cores="8"
+xios_split="24"
+path_out="/scratch/ucl/elic/pbarriat/nemo/archive/${exp}/output"
+#path_exe="/home/ucl/elic/pbarriat/modeles/nemo/4.0.6/tools/REBUILD_NEMO"
+path_exe="/home/ucl/elic/pbarriat/modeles/nemo/nemo_dev_4/tools/REBUILD_NEMO"
+
+files=${path_out}/0*
+year=${start_year}
+for dir in ${files}
+do
+  cd ${dir}
+  echo "In ${dir} processing ${exp}_1m_${year}0101_${year}1231 ..."
+
+  #[[ ! -s ${exp}_1m_${year}0101_19610101_grid_T.nc ]] && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_19610101_grid_T ${xios_split}
+  #mv ${exp}_1m_${year}0101_19610101_grid_T.nc ${exp}_1m_${year}0101_${year}1231_grid_T.nc
+  #[[ ! -s ${exp}_1m_${year}0101_19610101_grid_U.nc ]] && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_19610101_grid_U ${xios_split}
+  #mv ${exp}_1m_${year}0101_19610101_grid_U.nc ${exp}_1m_${year}0101_${year}1231_grid_U.nc
+  #[[ ! -s ${exp}_1m_${year}0101_19610101_grid_V.nc ]] && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_19610101_grid_V ${xios_split}
+  #mv ${exp}_1m_${year}0101_19610101_grid_V.nc ${exp}_1m_${year}0101_${year}1231_grid_V.nc
+  #[[ ! -s ${exp}_1m_${year}0101_19610101_SBC.nc ]]    && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_19610101_SBC    ${xios_split}
+  #mv ${exp}_1m_${year}0101_19610101_SBC.nc ${exp}_1m_${year}0101_${year}1231_SBC.nc
+  #[[ ! -s ${exp}_1m_${year}0101_19610101_icemod.nc ]] && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_19610101_icemod ${xios_split}
+  #mv ${exp}_1m_${year}0101_19610101_icemod.nc ${exp}_1m_${year}0101_${year}1231_icemod.nc
+
+  [[ ! -s ${exp}_1m_${year}0101_${year}1231_grid_T.nc ]] && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_${year}1231_grid_T ${xios_split}
+  [[ ! -s ${exp}_1m_${year}0101_${year}1231_grid_U.nc ]] && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_${year}1231_grid_U ${xios_split}
+  [[ ! -s ${exp}_1m_${year}0101_${year}1231_grid_V.nc ]] && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_${year}1231_grid_V ${xios_split}
+  [[ ! -s ${exp}_1m_${year}0101_${year}1231_SBC.nc ]]    && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_${year}1231_SBC    ${xios_split}
+  [[ ! -s ${exp}_1m_${year}0101_${year}1231_icemod.nc ]] && ${path_exe}/rebuild_nemo -t ${cores} ${exp}_1m_${year}0101_${year}1231_icemod ${xios_split}
+  year=$((${year}+1))
+  cd ..
+done

BIN
seaice_diags.nc


+ 32 - 27
src/bash/bash_functions.bash

@@ -43,7 +43,7 @@ function barakuda_init()
     export l_y2_j=false    ; # if 1-year long NEMO files end sometime in year+1 instead of year!
     
     # Supported ORCA grids:
-    export ORCA_LIST="ORCA025.L75 ORCA1.L75 ORCA1.L46 ORCA1.L42 ORCA2.L31"
+    export ORCA_LIST="ORCA025.L75 ORCA1.L75 ORCA1.L46 ORCA1.L42 ORCA2.L31 NANUK025"
 
     # Some defaults:
     export LFORCE_YINI=false
@@ -105,8 +105,9 @@ function barakuda_setup()
     echo
 
     if [ -z ${PYTHON_HOME} ]; then echo "ERROR: PYTHON_HOME is not set! => add it to config file"; exit; fi
-    export PYTH="${PYTHON_HOME}/bin/python -W ignore" ; # which Python installation to use
-    export PYTHONPATH=${PYTHON_HOME}/lib/python2.7/site-packages:${BARAKUDA_ROOT}/python/modules ; # PATH to python barakuda modules
+    PYTH_tmp=`which python`
+    export PYTH="${PYTH_tmp} -W ignore" ; # which Python installation to use
+    export PYTHONPATH=${PYTHONPATH}:${BARAKUDA_ROOT}/python/modules ; # PATH to python barakuda modules
     export PYBRKD_EXEC_PATH=${BARAKUDA_ROOT}/python/exec         ; # PATH to python barakuda executable
 
     echo " PYTHON_HOME => "${PYTHON_HOME} ; echo
@@ -144,7 +145,9 @@ function barakuda_setup()
     # Need to be consistent with the netcdf installation upon which cdftools_light was compiled:
     ff="${BARAKUDA_ROOT}/cdftools_light/make.macro"
     if [ ! -f ${ff} ]; then echo "PROBLEM: cannot find ${ff} (needed to get NCDF_DIR)!"; exit; fi
-    export NCDF_DIR=`cat ${ff} | grep ^NCDF_DIR | cut -d = -f2 | sed -e s/' '//g`
+    #export NCDF_DIR=`cat ${ff} | grep ^NCDF_DIR | cut -d = -f2 | sed -e s/' '//g`
+    #NCDF_DIR="$`cat ${ff} | grep ^NCDF_DIR | cut -d = -f2 | cut -d '(' -f2 | cut -d ')' -f1`"
+    export NCDF_DIR=${EBROOTNETCDFMINFORTRAN}
     echo ; echo "NCDF_DIR = ${NCDF_DIR}"; echo
     export LD_LIBRARY_PATH=${NCDF_DIR}/lib:${LD_LIBRARY_PATH}
 
@@ -180,22 +183,23 @@ function barakuda_setup()
         exit
     fi
     
-    mkdir -p ${DIAG_D} ${TMP_DIR}
+    mkdir -p ${DIAG_D}/flux_int_basins ${TMP_DIR}/movies
+    ls ${TMP_DIR}
     
     export NEMO_OUT_D=`echo ${NEMO_OUT_STRCT} | sed -e "s|<ORCA>|${ORCA}|g" -e "s|<EXP>|${EXP}|g" -e "s|<Y_INI_EC>|${Y_INI_EC}|g" -e "s|<M_INI_EC>|${M_INI_EC}|g"`
     if [ ! -d ${NEMO_OUT_D} ]; then echo "Unfortunately we could not find ${NEMO_OUT_D}"; exit; fi
     
     # Where to look for NEMO namelists:
-    if [ ${ece_exp} -eq 0 ]; then
+    #if [ ${ece_exp} -eq 0 ]; then
         # NEMO standalone:
-        export NAMELIST_DIR=${NEMO_OUT_D}
-    elif [ ${ece_exp} -gt 0 ] && [ ${ece_exp} -lt 10 ]; then
-        # EC-Earth classic mode:
-        export NAMELIST_DIR=`echo ${NEMO_OUT_D} | sed -e "s|/output/nemo||g"`
-    else
-        # EC-Earth autosubmit mode TO FIX! :
-        echo; echo " WARNING ('barakuda_setup' of bash_functions.bash): don't know where to look for NEMO namelists!"
-    fi
+        export NAMELIST_DIR=${NEMO_OUT_D}/..
+    #elif [ ${ece_exp} -gt 0 ] && [ ${ece_exp} -lt 10 ]; then
+    #    # EC-Earth classic mode:
+    #    export NAMELIST_DIR=`echo ${NEMO_OUT_D} | sed -e "s|/output/nemo||g"`
+    #else
+    #    # EC-Earth autosubmit mode TO FIX! :
+    #    echo; echo " WARNING ('barakuda_setup' of bash_functions.bash): don't know where to look for NEMO namelists!"
+    #fi
     echo; echo " *** NAMELIST_DIR = ${NAMELIST_DIR} "; echo
     
 
@@ -233,15 +237,16 @@ function barakuda_setup()
 function barakuda_first_last_years()
 {
     cd ${NEMO_OUT_D}/
-    if [ ${ece_exp} -gt 0 ]; then
+    #if [ ${ece_exp} -gt 0 ]; then
         if [ ! -d 001 ]; then
             echo " *** Inside: `pwd` !"; \ls -l ; echo
             echo "ERROR: since ece_exp=${ece_exp}, there should be a directory 001 in:"; echo " ${NEMO_OUT_D}"; echo; exit
         fi
         nby_ece=`ls -d ???/ |  grep "[^0-9]" | wc -l`
-        echo " ${nby_ece} years have been completed..."
+        nby_ece_true=$((${nby_ece}*${IFREQ_SAV_YEARS}))
+        echo " ${nby_ece_true} years have been completed..."
         cd 001/
-    fi
+    #fi
 
     # Try to guess the first year from stored "grid_T" files:
     YEAR_INI=`\ls ${CPREF}*${ctest}* | sed -e s/"${CPREF}"/""/g | head -1 | cut -c1-4`
@@ -262,19 +267,19 @@ function barakuda_first_last_years()
 
     cd ${NEMO_OUT_D}/
 
-    if [ ${ece_exp} -gt 0 ]; then
+    #if [ ${ece_exp} -gt 0 ]; then
         dir_end=`printf "%03d" ${nby_ece}`
         if [ ! -d ${dir_end} ]; then echo "ERROR: since ece_exp=${ece_exp}, there should be a directory ${dir_end} in:"; echo " ${NEMO_OUT_D}"; exit ; fi
         export YEAR_END=$((${YEAR_INI}+${nby_ece}))
-    else
-        export YEAR_END=`\ls ${CPREF}*${ctest}* | sed -e s/"${CPREF}"/''/g | tail -1 | cut -c1-4`
-        echo ${YEAR_END} |  grep "[^0-9]" >/dev/null; # Checking if it's an integer
-        if [ ! "$?" -eq 1 ]; then
-            echo "ERROR: it was imposible to guess the year coresponding to the last saved year!"
-            echo "       => check your NEMO output directory and file naming..."; exit
-        fi
-        export YEAR_END=$((${YEAR_END}+${IFREQ_SAV_YEARS}-1))
-    fi
+    #else
+    #    export YEAR_END=`\ls ${CPREF}*${ctest}* | sed -e s/"${CPREF}"/''/g | tail -1 | cut -c1-4`
+    #    echo ${YEAR_END} |  grep "[^0-9]" >/dev/null; # Checking if it's an integer
+    #    if [ ! "$?" -eq 1 ]; then
+    #        echo "ERROR: it was imposible to guess the year coresponding to the last saved year!"
+    #        echo "       => check your NEMO output directory and file naming..."; exit
+    #    fi
+    #    export YEAR_END=$((${YEAR_END}+${IFREQ_SAV_YEARS}-1))
+    #fi
     echo
     echo " *** Initial year set to ${YEAR_INI}"
     echo " ***   Last  year set to ${YEAR_END}"

+ 1 - 1
src/html/conf_end.html

@@ -1,7 +1,7 @@
 
       <br><br><br>
 
-      <i> Page and diagnostics created with <a href="https://github.com/brodeau/barakuda">BaraKuda</a>...</i>
+      <i> Page and diagnostics created with <a href="https://gogs.elic.ucl.ac.be/pbarriat/barakuda">BaraKuda</a>...</i>
 
       <br><br><br>
 

+ 1 - 1
src/html/conf_start.html

@@ -9,7 +9,7 @@
     <div align="justify" style="margin: 0px 0px 10px 10px; font-family: Trebuchet MS; font-size: 18px;width: 1260px;">
       <center>
         
-        <a href="https://github.com/brodeau/barakuda">
+        <a href="https://gogs.elic.ucl.ac.be/pbarriat/barakuda">
           <img src="logo.svg" onerror="this.onerror=null; this.src='logo.png'">
         </a>
         

BIN
tmp_ice.nc