#! /bin/ksh #============================================================================= # mistral cpu batch job parameters # -------------------------------- #SBATCH --account=bm1102 #SBATCH --job-name=exp.ocean_omip_long_128_30184-QCY.run #SBATCH --partition=compute2,compute #SBATCH --chdir=/mnt/lustre01/work/mh0033/m300761/Projects/experiments/comp/zstar/run #SBATCH --nodes=40 #SBATCH --threads-per-core=2 # the following is needed to work around a bug that otherwise leads to # a too low number of ranks when using compute,compute2 as queue #SBATCH --mem=0 #SBATCH --output=/mnt/lustre01/work/mh0033/m300761/Projects/experiments/comp/zstar/run/LOG.exp.ocean_omip_long_128_30184-QCY.run.%j.o #SBATCH --error=/mnt/lustre01/work/mh0033/m300761/Projects/experiments/comp/zstar/run/LOG.exp.ocean_omip_long_128_30184-QCY.run.%j.o #SBATCH --exclusive #SBATCH --time=08:00:00 #============================================================================= set -x ulimit -s unlimited #============================================================================= # # ICON run script: # !ATTENTION! Do not change the format of the following lines. # They are evaluated by checksuite scripts. # created by ./run/make_target_runscript # target machine is bullx_cpu # target use_compiler is intel # with_mpi=yes # with_openmp=no # memory_model=large # submit with sbatch # #============================================================================= # # OpenMP environment variables # ---------------------------- export OMP_NUM_THREADS=1 export ICON_THREADS=1 export OMP_SCHEDULE=dynamic,1 export OMP_DYNAMIC="false" export OMP_STACKSIZE=200M # # MPI variables # ------------- no_of_nodes=${SLURM_JOB_NUM_NODES:=1} mpi_procs_pernode=$((${SLURM_JOB_CPUS_PER_NODE%%\(*} / 2)) ((mpi_total_procs=no_of_nodes * mpi_procs_pernode)) # blocking length # --------------- nproma=16 # #============================================================================= # load local setting, if existing # ------------------------------- if [ -a ../setting ] then echo "Load Setting" . ../setting fi # environment variables for the experiment and the target system # -------------------------------------------------------------- export EXPNAME="ocean_omip_long_128_30184-QCY" export KMP_AFFINITY="verbose,granularity=core,compact,1,1" export KMP_LIBRARY="turnaround" export KMP_KMP_SETTINGS="1" export OMP_WAIT_POLICY="active" export OMPI_MCA_pml="cm" export OMPI_MCA_mtl="mxm" export OMPI_MCA_coll="^fca" export MXM_RDMA_PORTS="mlx5_0:1" export HCOLL_MAIN_IB="mlx5_0:1" export HCOLL_ML_DISABLE_BARRIER="1" export HCOLL_ML_DISABLE_IBARRIER="1" export HCOLL_ML_DISABLE_BCAST="1" export HCOLL_ENABLE_MCAST_ALL="1" export HCOLL_ENABLE_MCAST="1" export OMPI_MCA_coll_sync_barrier_after_alltoallv="1" export OMPI_MCA_coll_sync_barrier_after_alltoallw="1" export MXM_HANDLE_ERRORS="bt" export UCX_HANDLE_ERRORS="bt" export MALLOC_TRIM_THRESHOLD_="-1" export KMP_AFFINITY="verbose,granularity=core,compact,1,1" export KMP_LIBRARY="turnaround" export KMP_KMP_SETTINGS="1" export OMP_WAIT_POLICY="active" export OMPI_MCA_pml="cm" export OMPI_MCA_mtl="mxm" export OMPI_MCA_coll="^fca" export MXM_RDMA_PORTS="mlx5_0:1" export HCOLL_MAIN_IB="mlx5_0:1" export HCOLL_ML_DISABLE_BARRIER="1" export HCOLL_ML_DISABLE_IBARRIER="1" export HCOLL_ML_DISABLE_BCAST="0" export HCOLL_ENABLE_MCAST_ALL="1" export HCOLL_ENABLE_MCAST="1" export OMPI_MCA_coll_sync_barrier_after_alltoallv="1" export OMPI_MCA_coll_sync_barrier_after_alltoallw="1" export MXM_HANDLE_ERRORS="bt" export UCX_HANDLE_ERRORS="bt" # load profile # ------------ if [[ -a /etc/profile ]] then . /etc/profile fi #============================================================================= # directories with absolute paths # ------------------------------- thisdir=$(pwd) basedir=${thisdir%/*} export basedir icon_data_rootFolder="/pool/data/ICON" ##============================================================================= ## load modules #loadmodule="ncl/6.2.1-gccsys cdo/1.9.5-gcc64 git python ../etc/Modules/icon-switch openmpi/2.0.2p1_hpcx-intel14" #module load "$loadmodule" ##============================================================================= # how to start the icon model # --------------------------- export START="srun --cpu-freq=HighM1 --kill-on-bad-exit=1 --nodes=${SLURM_JOB_NUM_NODES:-1} --cpu_bind=verbose,cores --distribution=block:block --ntasks=$((no_of_nodes * mpi_procs_pernode)) --ntasks-per-node=${mpi_procs_pernode} --cpus-per-task=$((2 * OMP_NUM_THREADS)) --propagate=STACK,CORE" #export START="mpiexec -n 24" export MODEL="${basedir}/bin/icon" # how to submit the next job # -------------------------- submit="sbatch" job_name="exp.ocean_omip_long_128_30184-QCY.run" # cdo for post-processing # ----------------------- cdo="cdo" cdo_diff="cdo diffn" # constants for time calculations # ------------------------------- second=1 # [s] 1 second minute=60 # [s] 1 minute hour=3600 # [s] 1 hour day=86400 # [s] 1 day month=2592000 # [s] 30 days year360=31104000 # [s] 360 days year=31556900 # [s] 1 earth year # define script functions used in the experiment run script # --------------------------------------------------------- . ./add_run_routines #============================================================================= ulimit -s 2097152 ulimit -c 0 #!/bin/bash #============================================================================= #----------------------------------------------------------------------------- # the namelist filename ocean_namelist=NAMELIST_${EXPNAME} #----------------------------------------------------------------------------- # global timing start_date="1923-01-01T00:00:00Z" end_date="1948-01-01T00:00:00Z" # 201 years to ensure write of restart at end # restart=".true." #----------------------------------------------------------------------------- # model timing restart_interval="P1Y" checkpoint_interval="P1Y" # #----------------------------------------------------------------------------- # calculate model timesteps using $days_data output_interval="P1M" # ocean model output interval in days daily="P1D" monthly="P1M" file_interval="P1Y" modelTimeStep="PT600S" restartSemaphoreFilename='isRestartRun.sem' if [ -f ../experiments/${EXPNAME}/${restartSemaphoreFilename} ]; then modelTimeStep="PT1800S" fi autoPostProcessing="false" # submit postprocessing job #steps_per_file=610 # assigned output intervals per file interval #----------------------------------------------------------------------------- # global resolution iforc=12 forcing_timescale=365 # length of OMIP/NCEP dataset: 1 = annual; 12 = monthly data; 365/else = daily data init_relax=1 ocean_vertical_levels=128 ocean_grids="OceanOnly_Global_IcosSymmetric_0039km_rotatedZ37d_BlackSea_Greenland_modified_sills_srtm30_1min.nc" grids_folder=/pool/data/ICON/oes/input/r0004/OceanOnly_Global_IcosSymmetric_0039km_rotatedZ37d_BlackSea_Greenland_modified_srtm30_1min ocean_data_InputFolder=/pool/data/ICON/oes/input/r0004/OceanOnly_Global_IcosSymmetric_0039km_rotatedZ37d_BlackSea_Greenland_modified_srtm30_1min ocean_data_InputFolder2="/mnt/lustre01/work/mh0033/m300761/Projects/experiments/comp/zstar/data/128_30184" #----------------------------------------------------------------------------- #icon_data_rootFolder=/pool/data/ICON # # write namelist parameters # ------------------------- cat > ${ocean_namelist} << EOF ! ¶llel_nml nproma = $nproma p_test_run = .false. l_fast_sum = .true. num_prefetch_proc = 0 pio_type = 0 num_io_procs = 0 / &grid_nml dynamics_grid_filename = "${ocean_grids}", use_dummy_cell_closure = .true. use_duplicated_connectivity = .false. / &dynamics_nml iequations = -1 ! -1: hydrost. ocean model / &run_nml modelTimeStep = "${modelTimeStep}" ! model timestep in seconds output = 'nml' ! namelist controlled output scheme activate_sync_timers = .TRUE. profiling_output = 1 msg_timestamp = .FALSE. timers_level = 10 debug_check_level = 0 restart_filename = "${EXPNAME}_restart_oce_.nc" / &output_nml filetype = 5 ! output format: 2=GRIB2, 4=NETCDFv2 output_filename = "${EXPNAME}" output_start = "${start_date}" ! start in ISO-format output_end = "${end_date}" ! end in ISO-format output_interval = "${monthly}" filename_format = "_" operation = "mean" file_interval = "${file_interval}" mode = 1 ! 1: forecast mode (relative t-axis), 2: climate mode (absolute t-axis) include_last = .false. output_grid = .TRUE. filename_format = "_" ml_varlist = 'group:oce_default','group:oce_essentials', 'group:ice_default','group:oce_physics' / &output_nml output_start = "${start_date}" output_end = "${end_date}" output_interval = "${monthly}" filename_format = "_" output_grid = .true. operation = "mean" mode = 1 ! 1: forecast mode (relative t-axis), 2: climate mode (absolute t-axis) file_interval = "${file_interval}" output_filename = "${EXPNAME}_oceanMonitor" ml_varlist = 'group:ocean_monitor' / &output_nml output_start = "${start_date}" output_end = "${end_date}" output_interval = "${monthly}" filename_format = "_" output_grid = .true. operation = "mean" mode = 1 ! 1: forecast mode (relative t-axis), 2: climate mode (absolute t-axis) file_interval = "${file_interval}" output_filename = "${EXPNAME}_MOC" ml_varlist = 'group:ocean_moc' / &output_nml filetype = 5 ! output format: 2=GRIB2, 4=NETCDFv2 output_filename = "${EXPNAME}_fx" output_start = "${start_date}" ! start date in ISO-format output_end = "${start_date}" ! end date in ISO-format output_interval = "${monthly}" ! interval in ISO-format filename_format = "" file_interval = "${file_interval}" output_grid = .TRUE. mode = 1 include_last = .false. ml_varlist = 'lsm_ctr_c','lsm_c','lsm_e','surface_cell_sea_land_mask','surface_edge_sea_land_mask' ,'surface_vertex_sea_land_mask' ,'vertex_bottomLevel','basin_c','regio_c','bottom_thick_c' ,'bottom_thick_e','column_thick_c','column_thick_e','wet_c','wet_e','wet_halo_zero_c','wet_halo_zero_e','prism_thick_c' ,,'invConstantPrismThickness','prism_volume','prism_thick_e','prism_thick_flat_sfc_c','prism_thick_flat_sfc_e' ,'inverse prism_thick_c','prism_center_dist_c','constantPrismCenters_Zdistance','constantPrismCenters_invZdistance' ,'inv_prism_thick_e','inv_prism_center_dist_c','inv_prism_center_dist_e','depth_CellMiddle','depth_CellInterface' / &output_nml filetype = 5 ! output format: 2=GRIB2, 4=NETCDFv2 output_filename = "${EXPNAME}_tke" output_start = "${start_date}" ! start in ISO-format output_end = "${end_date}" ! end in ISO-format output_interval = "${monthly}" filename_format = "_" operation = 'mean' file_interval = "${file_interval}" mode = 1 ! 1: forecast mode (relative t-axis), 2: climate mode (absolute t-axis) include_last = .false. output_grid = .TRUE. filename_format = "_" ml_varlist = 'group:oce_cvmix_tke' / &dbg_index_nml idbg_mxmn=1 ! initialize MIN/MAX debug output idbg_val =0 ! initialize one cell debug output idbg_slev=1 ! initialize start level for debug output idbg_elev=5 ! initialize end level for debug output dbg_lat_in= 30.0 ! latitude location of one cell debug output dbg_lon_in= -30.0 ! longitude location of one cell debug output str_mod_tst='all' ! define modules to print out in debug mode / &ocean_dynamics_nml vert_cor_type = 1 ! 128 unevenly spaced levels n_zlev = $ocean_vertical_levels dzlev_m(1:128) = 2.0, 4.0, 5.0, 7.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.0, 8.25, 8.5, 8.75, 9.0, 9.25, 9.5, 9.75, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.0, 10.5, 11.0, 11.5, 12.0, 12.5, 13.0, 13.5, 14.0, 14.5, 15.0, 15.5, 16.0, 16.5, 17.0, 17.5, 18.0, 18.5, 19.0, 19.5, 20.0, 20.5, 21.0, 21.5, 22.0, 22.5, 23.0, 23.5, 24.0, 24.5, 25.0, 25.5, 26.0, 26.5, 27.0, 28.5, 29.0, 29.5, 30.0, 30.5, 31.0, 31.0, 32.0, 33.0, 34.0, 35.0, 36.0, 37.0, 38.0, 39.0, 40.0, 42.0, 44.0, 46.0, 48.0, 50.0, 52.0, 54.0, 56.0, 58.0, 60.0, 62.0, 64.0, 66.0, 68.0, 70.0, 72.0, 74.0, 76.0, 78.0, 80.0, 82.0, 84.0, 86.0, 88.0, 90.0, 92.0, 94.0, 96.0, 98.0, 100.0, 102.0, 104.0, 106.0, 108.0, 110.0, 112.0, 114.0, 116.0, 118.0, 200.0, 200.0, 200.0, 200.0, 200.0, 200.0, 200.0, 200.0, 200.0 l_edge_based = .FALSE. ! edge- or cell-based mimetic discretization l_partial_cells = .FALSE. ! partial bottom cells=true: local varying bottom depth select_solver = 4 ! 1=gmres_oce_old; 2=ocean_restart_gmres, 3=mixed precisison restart ! 4=CG (default) 5=CGJ 6=BiCG 7=GMRES restart (legacy) 8=MINRES use_absolute_solver_tolerance = .true. solver_tolerance = 1.0E-10 ! this may further be reduced select_lhs = 1 ! 1=operator based (default) 2=matrix based l_lhs_direct = .false.! .true.= use lhs implementation directly .false.= matrix scanner (default) solver_FirstGuess = 2 ! 0=start from zeros 1=last timestep smoothed 2=last timestep (default) solver_max_iter_per_restart = 14 solver_max_restart_iterations = 100 ! outer (restart solver) fast_performance_level = 200 ! performance level 12: for cell-based; 5: default use_continuity_correction = .TRUE. ! height adjustment according to vertical velocity in dynamics cfl_check = .FALSE. cfl_write = .FALSE. i_bc_veloc_top = 1 i_bc_veloc_bot = 1 ! 0: (def) bottom friction off, 1: on / &ocean_tracer_transport_nml FLUX_CALCULATION_HORZ = 5 ! 5 ! 1=upwind, 2=central, 3=Lax-Friedrichs, 4=Miura, 5=FCT with Zalesak limiter (default) FLUX_CALCULATION_VERT = 7 ! 7 ! 6=adpo; 7=upwind biased ppm (default); 8=FCT with zalesak limiter ! define low and high order methods to be used in horizontal flux corrected transport methods (flux_calculation_horz=4,5) fct_low_order_flux = 1 ! horizontal low order method: 1=upwind (def), no other implemented fct_high_order_flux = 5 ! horizontal high order method: 1=upwind, 2=central, 3=lax_friedrichs, 4=miura_order1 fct_limiter_horz = 100 ! zalesak threshold_min_T = -2.0 ! to avoid abort / &ocean_horizontal_diffusion_nml laplacian_form = 1 ! 1=curlcurl-graddiv VelocityDiffusion_order = 2 ! 21=biharmonic+laplacian (for the laplacian leith) BiharmonicViscosity_scaling = 1 BiharmonicViscosity_reference = 3.50E12 ! [m2/s] constant horizontal viscosity coefficient for velocity BiharmonicViscosity_background = 0.0 ! [m2/s] constant horizontal viscosity coefficient for velocity HarmonicViscosity_scaling = 1 HarmonicViscosity_reference = 0.0 ! [m2/s] constant horizontal viscosity coefficient for velocity TracerHorizontalDiffusion_scaling = 1 Temperature_HorizontalDiffusion_Background = 0.0 Temperature_HorizontalDiffusion_Reference = 0 ! 40 Salinity_HorizontalDiffusion_Background = 0.0 Salinity_HorizontalDiffusion_Reference = 0 ! 40 / &ocean_vertical_diffusion_nml vert_mix_type = 2 ! 1: PP; 2: TKE; 3: KPP; 4 TKE+IDEMIX only_tke = .true. ! .true. for TKE; .false. for TKE+IDEMIX PPscheme_type = 0 !velocity_VerticalDiffusion_background = 5.0E-5 ! [m2/s] vertical background viscosity coefficient for velocity !Temperature_VerticalDiffusion_background = 1.0E-5 ! [m2/s] vertical background diffusion coefficient for temperature !Salinity_VerticalDiffusion_background = 1.0E-5 ! [m2/s] vertical background diffusion coefficient for salinity !tracer_convection_MixingCoefficient = 0.1 ! max vertical tracer diffusion for convection used in case of instability !convection_InstabilityThreshold = -1.0E-6 ! used in update_ho_params - default=-5e-8 !RichardsonDiffusion_threshold = 0.0 ! used in update_ho_params - default=+5e-8 !tracer_RichardsonCoeff = 2.0E-3 ! factor for vertical diffusion coefficient in PP scheme !velocity_RichardsonCoeff = 2.0E-3 ! factor for vertical viscosity coefficient in PP scheme !bottom_drag_coeff = 3.0E-3 ! default=2.5E-3; active for i_bc_veloc_bot=1 !use_wind_mixing = .true. ! true: use wind mixing scheme in MPIOM-type pp-scheme !lambda_wind = 0.03 !tracer_TopWindMixing = 0.5E-3 ! [m2/s] windmixing diffusivity in MPIOM-type pp-scheme !velocity_TopWindMixing = 0.5E-3 ! [m2/s] windmixing viscosity in MPIOM-type pp-scheme ! cvmix/tke parameters c_k = 0.1 c_eps = 0.7 alpha_tke = 30.0 mxl_min = 1.d-8 kappaM_min = 0.0 kappaM_max = 100.0 cd = 3.75 tke_min = 1.d-6 tke_mxl_choice = 2 tke_surf_min = 1.d-4 only_tke = .true. use_ubound_dirichlet = .false. use_lbound_dirichlet = .false. / &ocean_GentMcWilliamsRedi_nml GMRedi_configuration = 1 ! 0=cartesian diffusion 1=GM-Redi: bolus advection + isopycnal diffusion tapering_scheme=1 GMRedi_usesRelativeMaxSlopes = .false. S_max=1.0e-3 S_d=0.0001 ! 1e-3 to 1e-4 k_tracer_GM_kappa_parameter = 800.0 ! k_tracer_isoneutral_parameter = 800.0 ! value for cell-based cartesian diffusion - mpiom: 1000/400km = 400/160km k_tracer_dianeutral_parameter = 0.0 !1.0E-5 ! SWITCH_OFF_DIAGONAL_VERT_EXPL =.TRUE. GMREDI_COMBINED_DIAGNOSTIC =.FALSE. SWITCH_ON_REDI_BALANCE_DIAGONSTIC =.FALSE. REVERT_VERTICAL_RECON_AND_TRANSPOSED =.TRUE. SLOPE_CALC_VIA_TEMPERTURE_SALINITY =.TRUE. INCLUDE_SLOPE_SQUARED_IMPLICIT =.TRUE. !think of l_with_vert_tracer_diffusion SWITCH_ON_TAPERING_HORIZONTAL_DIFFUSION=.TRUE. / &ocean_physics_nml i_sea_ice = 1 ! 0 = no sea ice; 1 = sea ice model on; default=1 / &sea_ice_nml i_ice_therm = 1 ! 1=zero-layer (default), 2=Winton, 0/2: not allowed i_ice_dyn = 1 ! 1/0=switch on/off AWI ice dynamics ! i_Qio_type = 3 ! 3 (default): energy of whole grid-area used for melting (MPIOM-type) ! use_constant_tfreez = .TRUE. ! default: true ! use_no_flux_gradients = .FALSE. ! default: true leadclose_1 = 0.25 ! default: 0.5 - value of MPIOM: 0.25 leadclose_2n = 0.666 ! default: 0.0 - value of MPIOM: 2/3 albedoW_sim = 0.10 ! albedo of the ocean used in sea ice model albs = 0.80 ! Albedo of snow (not melting) albsm = 0.65 ! Albedo of snow (melting) albi = 0.70 ! Albedo of ice (not melting) albim = 0.65 ! Albedo of ice (melting) Pstar = 40000 / &ocean_forcing_nml iforc_oce = ${iforc} ! ocean forcing forcing_timescale = $forcing_timescale ! length of ocean forcing data set, see above forcing_frequency = 86400.0 init_oce_relax = $init_relax ! read ocean surface relaxation file, see above type_surfRelax_Temp = 0 ! 0: no relaxation used ! 1: relaxation switched on for reading (init_oce_relax=1) or some testcases only para_surfRelax_Temp = 1.0 ! strength of 2-dim relaxation for temperature (months) ! this value is divided by number of seconds per month (=30*24*3600) type_surfRelax_Salt = 1 ! 2-dim relaxation of salinity - see temperature relaxation for type values para_surfRelax_Salt = 3.0 ! strength of 2-dim relaxation for salinity (months) forcing_windstress_u_type = 1 ! read from file forcing_windstress_v_type = 1 ! read from file forcing_fluxes_type = 1 ! read from file ! forcing_enable_freshwater = .TRUE. ! apply freshwater forcing boundary condition (OMIP only) ! forcing_set_runoff_to_zero = .FALSE. ! set runoff to zero for comparison to MPIOM; default: false ! zero_freshwater_flux = .FALSE. ! set external freshwater flux to zero; default: false ! salt-change due to internal fluxes only limit_seaice = .TRUE. ! default: true seaice_limit = 0.8 ! hard limit set to 80% of upper layer for sea ice limit_elevation = .TRUE. atm_pressure_included_in_ocedyn = .FALSE. ! use atm. pressure in the ocean dynamics atm_pressure_included_in_icedyn = .FALSE. ! use atm. pressure in the ice dynamics use_tides = .FALSE. tides_mod = 1 ! 1 Default 2 MPIOM tide_startdate = '2020-01-01 00:00' lswr_jerlov = .TRUE. ! use jerlov water types for sw absorption jerlov_atten = 0.08 ! jerlov water type IB jerlov_bluefrac = 0.36 ! jerlov water type IB / &ocean_initialConditions_nml initial_salinity_type = 1 ! read from file initial_temperature_type = 1 ! read from file / &ocean_diagnostics_nml diagnostics_level = 1 diagnose_for_horizontalVelocity = .false. / &io_nml restart_file_type = 5 write_last_restart = .true. ! lkeep_in_sync = .true. ! sync after each timestep ! restart_write_mode = "joint procs multifile" / EOF #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # add standard atmo_non-hydrostatic_files #. ${thisdir}/add_required_ocean_files add_link_file $ocean_data_InputFolder2/ts_phc3.0_annual_icon_OceanOnly_Global_IcosSymmetric_0039km_rotatedZ37d_BlackSea_Greenland_modified_sills_srtm30_1min.nc_L128.nc initial_state.nc add_link_file $ocean_data_InputFolder/R2B6_ocean-flux.nc ocean-flux.nc add_link_file $ocean_data_InputFolder2/ts_phc3.0_annual_icon_OceanOnly_Global_IcosSymmetric_0039km_rotatedZ37d_BlackSea_Greenland_modified_sills_srtm30_1min.nc_surf.nc ocean-relax.nc #----------------------------------------------------------------------------- #!/bin/ksh #============================================================================= # # This section of the run script prepares and starts the model integration. # # MODEL and START must be defined as environment variables or # they must be substituted with appropriate values. # # Marco Giorgetta, MPI-M, 2010-04-21 # #----------------------------------------------------------------------------- final_status_file=${basedir}/run/${job_name}.final_status rm -f ${final_status_file} #----------------------------------------------------------------------------- # # directories definition # RUNSCRIPTDIR=${basedir}/run if [ x$grids_folder = x ] ; then HGRIDDIR=${basedir}/grids else HGRIDDIR=$grids_folder fi # experiment directory, with plenty of space, create if new EXPDIR=${basedir}/experiments/${EXPNAME} if [ ! -d ${EXPDIR} ] ; then mkdir -p ${EXPDIR} fi # ls -ld ${EXPDIR} if [ ! -d ${EXPDIR} ] ; then mkdir ${EXPDIR} #else # rm -rf ${EXPDIR} # mkdir ${EXPDIR} fi ls -ld ${EXPDIR} check_error $? "${EXPDIR} does not exist?" cd ${EXPDIR} #----------------------------------------------------------------------------- final_status_file=${RUNSCRIPTDIR}/${job_name}.final_status rm -f ${final_status_file} #----------------------------------------------------------------------------- # set up the model lists if they do not exist # this works for single model runs # for coupled runs the lists should be declared explicilty if [ x$namelist_list = x ]; then # minrank_list=( 0 ) # maxrank_list=( 65535 ) # incrank_list=( 1 ) minrank_list[0]=0 maxrank_list[0]=65535 incrank_list[0]=1 if [ x$atmo_namelist != x ]; then # this is the atmo model namelist_list[0]="$atmo_namelist" modelname_list[0]="atmo" modeltype_list[0]=1 run_atmo="true" elif [ x$ocean_namelist != x ]; then # this is the ocean model namelist_list[0]="$ocean_namelist" modelname_list[0]="oce" modeltype_list[0]=2 elif [ x$psrad_namelist != x ]; then # this is the psrad model namelist_list[0]="$psrad_namelist" modelname_list[0]="psrad" modeltype_list[0]=3 elif [ x$hamocc_namelist != x ]; then # this is the hamocc model namelist_list[0]="$hamocc_namelist" modelname_list[0]="hamocc" modeltype_list[0]=4 elif [ x$testbed_namelist != x ]; then # this is the testbed model namelist_list[0]="$testbed_namelist" modelname_list[0]="testbed" modeltype_list[0]=99 else check_error 1 "No namelist is defined" fi fi #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # set some default values and derive some run parameteres restart=${restart:=".false."} restartSemaphoreFilename='isRestartRun.sem' #AUTOMATIC_RESTART_SETUP: if [ -f ${restartSemaphoreFilename} ]; then restart=.true. # do not delete switch-file, to enable restart after unintended abort #[[ -f ${restartSemaphoreFilename} ]] && rm ${restartSemaphoreFilename} fi #END AUTOMATIC_RESTART_SETUP # # wait 5min to let GPFS finish the write operations if [ "x$restart" != 'x.false.' -a "x$submit" != 'x' ]; then if [ x$(df -T ${EXPDIR} | cut -d ' ' -f 2) = gpfs ]; then sleep 10; fi fi # fill some checks run_atmo=${run_atmo="false"} if [ x$atmo_namelist != x ]; then run_atmo="true" fi run_jsbach=${run_jsbach="false"} run_ocean=${run_ocean="false"} if [ x$ocean_namelist != x ]; then run_ocean="true" fi run_psrad=${run_psrad="false"} if [ x$psrad_namelist != x ]; then run_psrad="true" fi run_hamocc=${run_hamocc="false"} if [ x$hamocc_namelist != x ]; then run_hamocc="true" fi #----------------------------------------------------------------------------- # add grids to required files all_grids="${atmo_dyn_grids} ${atmo_rad_grids} ${ocean_grids}" for gridfile in ${all_grids}; do # gridfile=${gridfile//\'/} # strip all ' in case ' is used to delimit the grid names gridfile=${gridfile//\"/} # strip all " in case " is used to delimit the grid names gridfile=${gridfile//\,/} # strip all , in case , is used to separate the grid names # grfinfofile=${gridfile%.nc}-grfinfo.nc # ls -l ${HGRIDDIR}/$gridfile check_error $? "${HGRIDDIR}/$gridfile does not exist." add_link_file ${HGRIDDIR}/${gridfile} ./ if [ -f ${HGRIDDIR}/${grfinfofile} ]; then add_link_file ${HGRIDDIR}/${grfinfofile} ./ fi done #----------------------------------------------------------------------------- # print_required_files copy_required_files link_required_files #----------------------------------------------------------------------------- # get restart files if [ x$restart_atmo_from != "x" ] ; then rm -f restart_atm_DOM01.nc # ln -s ${basedir}/experiments/${restart_from_folder}/${restart_atmo_from} ${EXPDIR}/restart_atm_DOM01.nc cp ${basedir}/experiments/${restart_from_folder}/${restart_atmo_from} cp_restart_atm.nc ln -s cp_restart_atm.nc restart_atm_DOM01.nc restart=".true." fi if [ x$restart_ocean_from != "x" ] ; then rm -f restart_oce.nc # ln -s ${basedir}/experiments/${restart_from_folder}/${restart_ocean_from} ${EXPDIR}/restart_oce.nc cp ${basedir}/experiments/${restart_from_folder}/${restart_ocean_from} cp_restart_oce_DOM01.nc ln -s cp_restart_oce_DOM01.nc restart_oce_DOM01.nc restart=".true." fi #----------------------------------------------------------------------------- read_restart_namelists=${read_restart_namelists:=".true."} #----------------------------------------------------------------------------- # # create ICON master namelist # ------------------------ # For a complete list see Namelist_overview and Namelist_overview.pdf #----------------------------------------------------------------------------- # create master_namelist master_namelist=icon_master.namelist if [ x$end_date = x ]; then cat > $master_namelist << EOF &master_nml lrestart = $restart / &master_time_control_nml experimentStartDate = "$start_date" restartTimeIntval = "$restart_interval" checkpointTimeIntval = "$checkpoint_interval" / &time_nml is_relative_time = .false. / EOF else if [ x$calendar = x ]; then calendar='proleptic gregorian' calendar_type=1 else calendar=$calendar calendar_type=$calendar_type fi cat > $master_namelist << EOF &master_nml lrestart = $restart read_restart_namelists = $read_restart_namelists / &master_time_control_nml calendar = "$calendar" checkpointTimeIntval = "$checkpoint_interval" restartTimeIntval = "$restart_interval" experimentStartDate = "$start_date" experimentStopDate = "$end_date" / &time_nml is_relative_time = .false. / EOF fi #----------------------------------------------------------------------------- #----------------------------------------------------------------------------- # add model component to master_namelist add_component_to_master_namelist() { model_namelist_filename="$1" model_name=$2 model_type=$3 model_min_rank=$4 model_max_rank=$5 model_inc_rank=$6 cat >> $master_namelist << EOF &master_model_nml model_name="$model_name" model_namelist_filename="$model_namelist_filename" model_type=$model_type model_min_rank=$model_min_rank model_max_rank=$model_max_rank model_inc_rank=$model_inc_rank / EOF #----------- #get namelist if [ -f ${RUNSCRIPTDIR}/$model_namelist_filename ] ; then mv -f ${RUNSCRIPTDIR}/$model_namelist_filename ${EXPDIR} check_error $? "mv -f ${RUNSCRIPTDIR}/$model_namelist_filename" else check_error 1 "${RUNSCRIPTDIR}/$model_namelist_filename does not exist" fi } #----------------------------------------------------------------------------- no_of_models=${#namelist_list[*]} echo "no_of_models=$no_of_models" j=0 while [ $j -lt ${no_of_models} ] do add_component_to_master_namelist "${namelist_list[$j]}" "${modelname_list[$j]}" ${modeltype_list[$j]} ${minrank_list[$j]} ${maxrank_list[$j]} ${incrank_list[$j]} j=`expr ${j} + 1` done #----------------------------------------------------------------------------- # Add JSBACH part to master_namelist if [[ $run_jsbach == @(yes|true) ]]; then cat >> $master_namelist << EOF &jsb_control_nml is_standalone = .false. restart_jsbach = .false. debug_level = 0 timer_level = 0 / EOF # if [[ -n ${atmo_dyn_grids} ]]; then set -A gridfiles $atmo_dyn_grids no_of_domains=${#gridfiles[*]} else no_of_domains=1 fi echo "no_of_domains=$no_of_domains" domain="" domain_suffix="" j=1 while [ $j -le ${no_of_domains} ] do if [[ $no_of_domains -gt 1 ]]; then # no_of_domains < 10 ! domain=" DOM0${j}" domain_suffix="_d${j}" fi cat >> $master_namelist << EOF &jsb_model_nml model_id = $j model_name = "JSBACH${domain}" model_shortname = "jsb${domain_suffix}" model_description = 'JSBACH land surface model' model_namelist_filename = "${lnd_namelist}${domain_suffix}" / EOF if [[ -f ${RUNSCRIPTDIR}/NAMELIST_${EXPNAME}_lnd${domain_suffix} && -f ${EXPDIR}/NAMELIST_${EXPNAME}_lnd${domain_suffix} ]] ; then # namelist file has already been copied to expdir by copy_required_files above rm ${RUNSCRIPTDIR}/NAMELIST_${EXPNAME}_lnd${domain_suffix} check_error $? "rm ${RUNSCRIPTDIR}/NAMELIST_${EXPNAME}_lnd${domain_suffix}" else check_error 1 "${RUNSCRIPTDIR}/NAMELIST_${EXPNAME}_lnd${domain_suffix} does not exist" fi j=`expr ${j} + 1` done fi # # get model # ls -l ${MODEL} check_error $? "${MODEL} does not exist?" # ldd -v ${MODEL} # #----------------------------------------------------------------------------- # # start experiment # rm -f finish.status # date ${START} ${MODEL} # > out.txt 2>&1 date # if [ -r finish.status ] ; then check_final_status 0 "${START} ${MODEL}" else check_final_status -1 "${START} ${MODEL}" fi # #----------------------------------------------------------------------------- # finish_status=`cat finish.status` echo $finish_status echo "============================" echo "Script run successfully: $finish_status" echo "============================" #----------------------------------------------------------------------------- # rm output_schedule_steps* #----------------------------------------------------------------------------- if [[ "x$use_hamocc" = "xyes" ]]; then # store HAMOCC log file strg="$(ls -rt ${EXPNAME}_hamocc_EU*.nc* | tail -1 )" prefx="${EXPNAME}_hamocc_EU_tendencies" foo=${strg##${prefx}} foo=${foo%%.*} bgcout_file="bgcout_${foo}" mv bgcout $bgcout_file fi #----------------------------------------------------------------------------- namelist_list="" #----------------------------------------------------------------------------- # check if we have to restart, ie resubmit # Note: this is a different mechanism from checking the restart if [ $finish_status = "RESTART" ] ; then echo "restart next experiment..." this_script="${RUNSCRIPTDIR}/${job_name}" echo 'this_script: ' "$this_script" touch ${restartSemaphoreFilename} cd ${RUNSCRIPTDIR} ${submit} $this_script $run_param_0 else [[ -f ${restartSemaphoreFilename} ]] && rm ${restartSemaphoreFilename} fi #----------------------------------------------------------------------------- # automatic call/submission of post processing if available if [ "x${autoPostProcessing}" = "xtrue" ]; then # check if there is a postprocessing is available cd ${RUNSCRIPTDIR} targetPostProcessingScript="./post.${EXPNAME}.run" [[ -x $targetPostProcessingScript ]] && ${submit} ${targetPostProcessingScript} cd - fi #----------------------------------------------------------------------------- cd $RUNSCRIPTDIR #----------------------------------------------------------------------------- # exit 0 # # vim:ft=sh #-----------------------------------------------------------------------------