137 lines
4.0 KiB
Bash
137 lines
4.0 KiB
Bash
#!/bin/bash
|
|
#
|
|
# Slurm script template for PMSCO calculations on the Ra cluster
|
|
# based on run_mpi_HPL_nodes-2.sl by V. Markushin 2016-03-01
|
|
#
|
|
# this version checks out the source code from a git repository
|
|
# to a temporary location and compiles the code.
|
|
# this is to minimize conflicts between different jobs
|
|
# but requires that each job has its own git commit.
|
|
#
|
|
# Use:
|
|
# - enter the appropriate parameters and save as a new file.
|
|
# - call the sbatch command to pass the job script.
|
|
# request a specific number of nodes and tasks.
|
|
# example:
|
|
# sbatch --nodes=2 --ntasks-per-node=24 --time=02:00:00 run_pmsco.sl
|
|
# the qpmsco script does all this for you.
|
|
#
|
|
# PMSCO arguments
|
|
# copy this template to a new file, and set the arguments
|
|
#
|
|
# PMSCO_WORK_DIR
|
|
# path to be used as working directory.
|
|
# contains the script derived from this template
|
|
# and a copy of the pmsco code in the 'pmsco' directory.
|
|
# receives output and temporary files.
|
|
#
|
|
# PMSCO_PROJECT_FILE
|
|
# python module that declares the project and starts the calculation.
|
|
# must include the file path relative to $PMSCO_WORK_DIR.
|
|
#
|
|
# PMSCO_OUT
|
|
# name of output file. should not include a path.
|
|
#
|
|
# all paths are relative to $PMSCO_WORK_DIR or (better) absolute.
|
|
#
|
|
#
|
|
# Further arguments
|
|
#
|
|
# PMSCO_JOBNAME (required)
|
|
# the job name is the base name for output files.
|
|
#
|
|
# PMSCO_WALLTIME_HR (integer, required)
|
|
# wall time limit in hours. must be integer, minimum 1.
|
|
# this value is passed to PMSCO.
|
|
# it should specify the same amount of wall time as requested from the scheduler.
|
|
#
|
|
# PMSCO_PROJECT_ARGS (optional)
|
|
# extra arguments that are parsed by the project module.
|
|
#
|
|
#SBATCH --job-name="_PMSCO_JOBNAME"
|
|
#SBATCH --output="_PMSCO_JOBNAME.o.%j"
|
|
#SBATCH --error="_PMSCO_JOBNAME.e.%j"
|
|
|
|
PMSCO_WORK_DIR="_PMSCO_WORK_DIR"
|
|
PMSCO_JOBNAME="_PMSCO_JOBNAME"
|
|
PMSCO_WALLTIME_HR=_PMSCO_WALLTIME_HR
|
|
|
|
PMSCO_PROJECT_FILE="_PMSCO_PROJECT_FILE"
|
|
PMSCO_OUT="_PMSCO_JOBNAME"
|
|
PMSCO_PROJECT_ARGS="_PMSCO_PROJECT_ARGS"
|
|
|
|
module load psi-python36/4.4.0
|
|
module load gcc/4.8.5
|
|
module load openmpi/3.1.3
|
|
source activate pmsco3
|
|
|
|
echo '================================================================================'
|
|
echo "=== Running $0 at the following time and place:"
|
|
date
|
|
/bin/hostname
|
|
cd $PMSCO_WORK_DIR
|
|
pwd
|
|
ls -lA
|
|
#the intel compiler is currently not compatible with mpi4py. -mm 170131
|
|
#echo
|
|
#echo '================================================================================'
|
|
#echo "=== Setting the environment to use Intel Cluster Studio XE 2016 Update 2 intel/16.2:"
|
|
#cmd="source /opt/psi/Programming/intel/16.2/bin/compilervars.sh intel64"
|
|
#echo $cmd
|
|
#$cmd
|
|
echo
|
|
echo '================================================================================'
|
|
echo "=== The environment is set as following:"
|
|
env
|
|
echo
|
|
echo '================================================================================'
|
|
echo "BEGIN test"
|
|
which mpirun
|
|
cmd="mpirun /bin/hostname"
|
|
echo $cmd
|
|
$cmd
|
|
echo "END test"
|
|
echo
|
|
echo '================================================================================'
|
|
echo "BEGIN mpirun pmsco"
|
|
echo
|
|
|
|
cd "$PMSCO_WORK_DIR"
|
|
cd pmsco
|
|
echo "code revision"
|
|
git log --pretty=tformat:'%h %ai %d' -1
|
|
make -C pmsco all
|
|
python -m compileall pmsco
|
|
python -m compileall projects
|
|
echo
|
|
|
|
cd "$PMSCO_WORK_DIR"
|
|
PMSCO_CMD="python pmsco/pmsco $PMSCO_PROJECT_FILE"
|
|
PMSCO_ARGS="$PMSCO_PROJECT_ARGS"
|
|
if [ -n "$PMSCO_SCAN_FILES" ]; then
|
|
PMSCO_ARGS="-s $PMSCO_SCAN_FILES $PMSCO_ARGS"
|
|
fi
|
|
if [ -n "$PMSCO_OUT" ]; then
|
|
PMSCO_ARGS="-o $PMSCO_OUT $PMSCO_ARGS"
|
|
fi
|
|
if [ "$PMSCO_WALLTIME_HR" -ge 1 ]; then
|
|
PMSCO_ARGS="-t $PMSCO_WALLTIME_HR $PMSCO_ARGS"
|
|
fi
|
|
if [ -n "$PMSCO_LOGLEVEL" ]; then
|
|
PMSCO_ARGS="--log-level $PMSCO_LOGLEVEL --log-file $PMSCO_JOBNAME.log $PMSCO_ARGS"
|
|
fi
|
|
|
|
# Do no use the OpenMPI specific options, like "-x LD_LIBRARY_PATH", with the Intel mpirun.
|
|
cmd="mpirun $PMSCO_CMD $PMSCO_ARGS"
|
|
echo $cmd
|
|
$cmd
|
|
echo "END mpirun pmsco"
|
|
echo '================================================================================'
|
|
cd "$PMSCO_WORK_DIR"
|
|
rm -rf pmsco
|
|
date
|
|
ls -lAtr
|
|
echo '================================================================================'
|
|
|
|
exit 0
|