Index: /trunk/models/ARPCLIMAT/param_test
===================================================================
--- /trunk/models/ARPCLIMAT/param_test	(revision 616)
+++ /trunk/models/ARPCLIMAT/param_test	(revision 616)
@@ -0,0 +1,5 @@
+LFX     0.01     0.4    0.04   linear
+FNEBC    1       20     10      linear
+RQLCR    0.5e-4  10e-4   2e-4   linear
+RAUTEFR  0.5e-3  10e-3   1e-3   linear
+TFVL     0.0001   0.2    0.02   log
Index: /trunk/models/ARPCLIMAT/serie_ARPCLIMAT.sh
===================================================================
--- /trunk/models/ARPCLIMAT/serie_ARPCLIMAT.sh	(revision 615)
+++ /trunk/models/ARPCLIMAT/serie_ARPCLIMAT.sh	(revision 616)
@@ -2,4 +2,9 @@
 
 #set -ex
+
+##### TO DO #######
+# - selections des variables d'interet du 1D et conversion au format commun
+# - reecrir proprement ce script sous forme de fonction
+
 
 if [ $# != 2 ] ; then
@@ -10,14 +15,5 @@
 fi
 
-DIRMUSC=$REP_MUSC
-WORKDIR=`pwd`
-
-model=ARPCLIMAT
-#model=AROME
-
-#case=ARMCU
-#subcase=REF
-#case=AYOTTE
-#subcase=24SC
+##### lecture des arguments
 tmp=$1
 case="$(sed 's/\/.*//' <<< "$tmp")"
@@ -26,15 +22,52 @@
 nwave=$2
 
+
+##### specificitées du modèle
+#####  => copier le fonctionnement de LMDZ avec un passage
+#####  de ces specificités en option quand c'est pertinant
+model=ARPCLIMAT
+#model=AROME
+
+##### Quelques "declarations" dont certaines seront éventuellement 
+##### automatisées
+DIRMUSC=/home/gmgec/mrgc/coulondecorzensm/MUSC/V2.5.4
+
+nlev=91 #grille verticale
+timestep=300 # ??
+GROUP=V642 # doit équibaloir à groupe 
+simuREF=CMIP6 # ??
+namref=$DIRMUSC/namelist/ARPCLIMAT/nam.atm.tl127l91r.CMIP6.v642.LPBLEoff
+
+namsfx=$DIRMUSC/namelist/SURFEX/nam.sfx.tl127.CMIP6.v631
+
+GROUPREF=V642
+EXPREF=arp642.stab_CMIP6.LPBLEoff
+configref=$DIRMUSC/config/config_${EXPREF}.py
+
+
+#MASTER=/home/common/pack/arp631/bin/MASTER
+#PGD=/home/common/pack/arp631/bin/PGD
+#PREP=/home/common/pack/arp631/bin/PRE
+
+
+
+
+
+###### Debut du code #####
+WORKDIR=`pwd`
+
+## Chemins specifiques à htexplo
 PARAM=$WORKDIR/WAVE${nwave}/Par1D_Wave${nwave}.asc
-
 repout=$WORKDIR/WAVE${nwave}/${case}/${subcase}
 DIRNAMELIST=$WORKDIR/WAVE$nwave/namelist
 DIRCONFIG=$WORKDIR/WAVE$nwave/config
 
+## Chemins spécifiques à MUSC
+DIROUTMUSC=${DIRMUSC}/simulations/${GROUPREF}/${EXPREF}
 
 # A few variables in the environment to specify the simulation configuration (model component)
 # Please edit param_ARPCLIMAT
 
-. ./param_ARPCLIMAT
+#. ./param_ARPCLIMAT
 
 # Type of cleaning : no, nc, lfa, nclfa, all
@@ -51,5 +84,5 @@
 echo 'nlev = '$nlev
 echo 'timestep = '$timestep
-echo 'cycle = '$cycle
+echo 'GROUP = '$GROUP
 echo 'simuref = '$simuREF
 echo 'namref = '$namref
@@ -85,5 +118,6 @@
 
 # Preparation des fichiers de config
-python prep_config_tunning.py $nl $name $case $subcase $nwave $model $simuREF $cycle $MASTER $PGD $PREP $namsfx
+python prep_config_tuning.py $nwave $nl $name $configref $GROUP
+#$nl $name $case $subcase $nwave $model $simuREF $GROUP $MASTER $PGD $PREP $namsfx
 mv config_* $DIRCONFIG
 
@@ -120,28 +154,40 @@
 rm -f $WORKDIR/err.log
 
-for i in `seq -f "%03g" 1 ${nl}`
+install_dir=$WORKDIR/../../../install_shared
+source $install_dir/base_env.profile
+source $DIRMUSC/setenv 37 ### A generaliser avec l'install automatique de MUSC 
+
+for i in `seq -w 001 ${nl}`
 #for i in `seq -f "%03g" 1 2`
 do
-  if [ $model == 'ARPCLIMAT' ]; then
-    ln -s $DIRMUSC/SURFEX/${cycle}/${simuREF} $DIRMUSC/SURFEX/${cycle}/${simuREF}.${name}-${nwave}-$i
-  fi
-  run_MUSC_cases.py $DIRCONFIG/config_${cycle}_${simuREF}.${name}-${nwave}-$i.py $case $subcase
+  SIMID=${EXPREF}.${name}-${nwave}-$i
+  DIRSIMMUSC=${DIRMUSC}/simulations/${GROUP}/${SIMID}/${case}/${subcase}
+  echo SIMID $SIMID
+  echo DIRSIMMUSC $DIRSIMMUSC
+  if [ $model == 'ARPCLIMAT' ]; then
+    ln -s $DIRMUSC/SURFEX/${GROUP}/${simuREF} $DIRMUSC/SURFEX/${GROUP}/${simuREF}.${name}-${nwave}-$i
+  fi
+  #python MUSC.py -config $DIRCONFIG/config_${GROUP}_${simuREF}.${name}-${nwave}-$i.py -case $case -subcase $subcase
+  #echo python MUSC.py -config $DIRCONFIG/config_${EXPREF}.${name}-${nwave}-$i.py -case $case -subcase $subcase --run-only
+  echo python MUSC.py -config $DIRCONFIG/config_${SIMID}.py -case $case -subcase $subcase
+  python MUSC.py -config $DIRCONFIG/config_${SIMID}.py -case $case -subcase $subcase
+  #run_MUSC_cases.py $DIRCONFIG/config_${GROUP}_${simuREF}.${name}-${nwave}-$i.py $case $subcase
 # Pour être cohérent avec le calcul fait sur les LES
-  cdo houravg $DIRMUSC/simulations/${cycle}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/netcdf/Out_klevel.nc $repout/${name}-${nwave}-$i.nc || echo $i >> $WORKDIR/err.log
-  if [ $model == 'ARPCLIMAT' ]; then
-    rm -f $DIRMUSC/SURFEX/${cycle}/${simuREF}.${name}-${nwave}-$i
+  cdo houravg ${DIRSIMMUSC}/Output/netcdf/Out_klevel.nc $repout/${name}-${nwave}-$i.nc || echo $i >> $WORKDIR/err.log
+  if [ $model == 'ARPCLIMAT' ]; then
+    rm -f $DIRMUSC/SURFEX/${GROUP}/${simuREF}.${name}-${nwave}-$i
   fi
   if [ $clean == 'nc' ]; then
-    rm -rf $DIRMUSC/simulations/${cycle}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/netcdf/*.nc
+    rm -rf $DIRMUSC/simulations/${GROUP}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/netcdf/*.nc
   fi
   if [ $clean == 'lfa' ]; then
-    rm -rf $DIRMUSC/simulations/${cycle}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/LFAf/*.lfa
+    rm -rf $DIRMUSC/simulations/${GROUP}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/LFAf/*.lfa
   fi
   if [ $clean == 'nclfa' ]; then
-    rm -rf $DIRMUSC/simulations/${cycle}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/netcdf/*.nc
-    rm -rf $DIRMUSC/simulations/${cycle}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/LFAf/*.lfa
+    rm -rf $DIRMUSC/simulations/${GROUP}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/netcdf/*.nc
+    rm -rf $DIRMUSC/simulations/${GROUP}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/LFAf/*.lfa
   fi
   if [ $clean == 'all' ]; then
-    rm -rf $DIRMUSC/simulations/${cycle}/${simuREF}.${name}-${nwave}-$i
+    rm -rf $DIRMUSC/simulations/${GROUP}/${simuREF}.${name}-${nwave}-$i
   fi
 done
@@ -185,14 +231,14 @@
 do
   if [ $model == 'ARPCLIMAT' ]; then
-    ln -s $DIRMUSC/SURFEX/${cycle}/${simuREF} $DIRMUSC/SURFEX/${cycle}/${simuREF}.${name}-${nwave}-$i
-  fi
-  run_MUSC_cases.py $DIRCONFIG/config_${cycle}_${simuREF}.${name}-${nwave}-$i.py $case $subcase
+    ln -s $DIRMUSC/SURFEX/${GROUP}/${simuREF} $DIRMUSC/SURFEX/${GROUP}/${simuREF}.${name}-${nwave}-$i
+  fi
+  run_MUSC_cases.py $DIRCONFIG/config_${GROUP}_${simuREF}.${name}-${nwave}-$i.py $case $subcase
 # Pour être cohérent avec le calcul fait sur les LES
-  cdo houravg $DIRMUSC/simulations/${cycle}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/netcdf/Out_klevel.nc $repout/tmp_${name}-${nwave}-$i.nc || echo $i >> $WORKDIR/$ERROUT
+  cdo houravg $DIRMUSC/simulations/${GROUP}/${simuREF}.${name}-${nwave}-$i/L${nlev}_${timestep}s/$case/$subcase/Output/netcdf/Out_klevel.nc $repout/tmp_${name}-${nwave}-$i.nc || echo $i >> $WORKDIR/$ERROUT
   cd  $repout
   ncks -v wpvp_conv,wpthp_conv,wpthp_pbl,wpup_conv,wpup_pbl,wpqp_conv,wpqp_pbl -d levh,1,91 tmp_${name}-${nwave}-$i.nc ${name}-${nwave}-$i.nc
 
   if [ $model == 'ARPCLIMAT' ]; then
-    rm -f $DIRMUSC/SURFEX/${cycle}/${simuREF}.${name}-${nwave}-$i
+    rm -f $DIRMUSC/SURFEX/${GROUP}/${simuREF}.${name}-${nwave}-$i
   fi
 done
@@ -203,2 +249,4 @@
 
 cd $WORKDIR
+source $install_dir/base_env.profile
+source $install_dir/env.sh
Index: /trunk/models/ARPCLIMAT/setup_ARPCLIMAT.sh
===================================================================
--- /trunk/models/ARPCLIMAT/setup_ARPCLIMAT.sh	(revision 615)
+++ /trunk/models/ARPCLIMAT/setup_ARPCLIMAT.sh	(revision 616)
@@ -1,20 +1,43 @@
+# Pour l'instant, ARPEGE ne tourne que sur belenos, où il y a le binaire déjà compilé
+# Commencer par ecrire des scripts pour ARPEGE uniquement sur belenos
+# On verra la suite après
+# Ici il faut 
+# - Prevoir l'install automatique de MUSC
+# - gérer les chemins des binaires
+# - écrire le model info ===> Enfait je me demande si ça va pas plus dans le serie_ARPCLIMAT.sh ça
 
 
+DIRMUSC=/home/gmgec/mrgc/coulondecorzensm/MUSC/V2.5.4 
+
+GROUP=V642 ### Je sais pas ce que c'est
+
+namref=$DIRMUSC/namelist/ARPCLIMAT/nam.atm.tl127l91r.CMIP6.v642.LPBLEoff
+
+GROUPREF=V642 #### Ca non plus
+EXPREF=arp642.stab_CMIP6.LPBLEoff
+configref=$DIRMUSC/config/config_${EXPREF}.py
+
+
+model=ARPCLIMAT #### j'imagine que ça peut tourner avec d'autres modeles meteo france
+
+
+
+####### OLD #######
 # To be modified if namelists and binaries are not at the default location
 #PREP=/home/common/pack/arp603_export.01.GFORTRAN610.cx/bin/PREP
-cat << EOF > models/ARPCLIMAT/param_ARPCLIMAT
-nlev=91
-timestep=300
-cycle=arp631
-simuREF=CMIP6
-namref=$REP_MUSC/namelist/ARPCLIMAT/nam.atm.tl127l91r.CMIP6.v631
-
-namsfx=$REP_MUSC/namelist/SURFEX/nam.sfx.tl127.CMIP6.v631
-
-MASTER=/home/common/pack/arp631/bin/MASTER
-PGD=/home/common/pack/arp631/bin/PGD
-PREP=/home/common/pack/arp631/bin/PREP
-#MASTER=/Users/romainroehrig/rootpack/arp603_export.01.MPIGNU640.x/bin/MASTERODB
-#PGD=/Users/romainroehrig/rootpack/arp603_export.01.MPIGNU640.x/bin/PGD
-#PREP=/Users/romainroehrig/rootpack/arp603_export.01.MPIGNU640.x/bin/PREP
-EOF
+#cat << EOF > models/ARPCLIMAT/param_ARPCLIMAT
+#nlev=91
+#timestep=300
+#cycle=arp631
+#simuREF=CMIP6
+#namref=$REP_MUSC/namelist/ARPCLIMAT/nam.atm.tl127l91r.CMIP6.v631
+#
+#namsfx=$REP_MUSC/namelist/SURFEX/nam.sfx.tl127.CMIP6.v631
+#
+#MASTER=/home/common/pack/arp631/bin/MASTER
+#PGD=/home/common/pack/arp631/bin/PGD
+#PREP=/home/common/pack/arp631/bin/PREP
+##MASTER=/Users/romainroehrig/rootpack/arp603_export.01.MPIGNU640.x/bin/MASTERODB
+##PGD=/Users/romainroehrig/rootpack/arp603_export.01.MPIGNU640.x/bin/PGD
+##PREP=/Users/romainroehrig/rootpack/arp603_export.01.MPIGNU640.x/bin/PREP
+#EOF
Index: /trunk/setup.sh
===================================================================
--- /trunk/setup.sh	(revision 615)
+++ /trunk/setup.sh	(revision 616)
@@ -151,17 +151,23 @@
     ;;
     "beleno") cat > env.sh <<eod
-# this config uses software R and pdfjam installed by villefranquen
-# relevant paths are added to PATH and LD_LIBRARY_PATH
+# Using intel compiler for belenos
+# Just miss pdfjam
 set +u  # <module load> can trigger -u
 # module purge tries to unalias condam but the alias does not exist so it raises an error
 # so I create a false condam alias so that it can be unaliased...
 alias condam="ls"
-module purge
-module load gcc/14.1.0
-module load openmpi/gnu/4.1.2.1
-module load python/3.10.12 
-export PATH=/opt/softs/libraries/GCC_5.3.0/cdo-1.7.0/bin/:\$PATH
-export LD_LIBRARY_PATH=/opt/softs/gcc/14.1.0/lib64/:/opt/softs/anaconda3/envs/Python310/lib:\$LD_LIBRARY_PATH
-export MPLBACKEND='Agg'  # non-interactive plots
+module ()
+{
+    eval \`/usr/bin/modulecmd bash \$*\`
+}
+module purge > /dev/null 2>&1
+module load intel > /dev/null 2>&1
+module load nco > /dev/null 2>&1
+module load eccodes > /dev/null 2>&1
+module load cdo > /dev/null 2>&1
+module load python/3.7.6 > /dev/null 2>&1
+module load R/3.6.1 > /dev/null 2>&1
+export PATH="/opt/softs/anaconda3/bin:\$PATH"
+export LD_LIBRARY_PATH=/opt/softs/anaconda3/lib/:\$LD_LIBRARY_PATH
 export TAR=/usr/bin/tar  # for R package install
 eod
@@ -238,5 +244,7 @@
   # Testing availability of various programs
   #-----------------------------------------------------------------------
-  for exe in R Rscript cdo ncks $python pdfjam ; do
+  if [[ ${hostname:0:6} = "beleno" ]] ; then listprog="R Rscript cdo ncks $python" ; else listprog="R Rscript cdo ncks $python" pdfjam ;fi
+  #for exe in R Rscript cdo ncks $python pdfjam ; do
+  for exe in $listprog ; do
       if [[ $(which "$exe") = "" ]] ; then echo "STOP: Can't find program <$exe>" ; exit 1 ; fi
   done
@@ -316,5 +324,5 @@
       if [[ $ExeterUQ = "ExeterUQ_MOGP" ]]; then
         # Install additional required packages 
-        pip install patsy matplotlib netcdf4 "pandas==2.3.1" seaborn
+        pip install patsy matplotlib netcdf4 pandas seaborn
         # Try pypi, otherwise git commit
         if ! pip install "mogp-emulator==$mogp_commit"; then
@@ -353,5 +361,5 @@
           #                This is particularly a pain when we want to support machines with different versions of R installed.
           #                The list and order of packages below is an attempt to circumvent that, but I hope we can find a cleaner way in the future....
-          if ! eval "$htune_dir/src/CheckInstallPackages.sh" Rcpp lattice Matrix reticulate invgamma GenSA far dotCall64 spam fields lhs maps mco ncdf4 shape tensor withr loo MASS pracma mvtnorm flexclust tmvtnorm tictoc ars HI coda MfUSampler ; then
+          if ! eval "$htune_dir/src/CheckInstallPackages.sh" Rcpp lattice reticulate Matrix invgamma GenSA far dotCall64 spam fields lhs maps mco ncdf4 shape tensor withr loo MASS pracma mvtnorm flexclust tmvtnorm tictoc ars HI coda MfUSampler ; then
             echo "STOP: Problem encountered when installing R packages"; exit 1
           fi
Index: /trunk/src/Rlibs_3.6.1.txt
===================================================================
--- /trunk/src/Rlibs_3.6.1.txt	(revision 616)
+++ /trunk/src/Rlibs_3.6.1.txt	(revision 616)
@@ -0,0 +1,28 @@
+Rcpp=1.1.1
+lattice=0.20.45
+Matrix=1.5.0
+invgamma=1.2
+GenSA=1.1.14.1
+far=0.6.7
+dotCall64=1.1.1
+fields=16.3.1
+lhs=1.2.1
+maps=3.4.3
+mco=1.17
+ncdf4=1.24
+shape=1.4.6.1
+tensor=1.5.1
+withr=3.0.2
+loo=2.2.0
+MASS=7.3.58.3
+pracma=2.4.6
+mvtnorm=1.3.6
+flexclust=1.5.0
+tmvtnorm=1.7
+tictoc=1.2.1
+ars=0.8
+HI=0.5
+coda=0.19.4.1
+MfUSampler=1.1.0
+reticulate=1.42.0
+spam=2.5-1
