179 lines
4.5 KiB
Bash
179 lines
4.5 KiB
Bash
#!/bin/bash
|
|
#
|
|
# SGE script template for MSC calculations
|
|
#
|
|
# This script uses the tight integration of openmpi-1.4.5-gcc-4.6.3 in SGE
|
|
# using the parallel environment (PE) "orte".
|
|
# This script must be used only with qsub command - do NOT run it as a stand-alone
|
|
# shell script because it will start all processes on the local node.
|
|
#
|
|
# PhD arguments
|
|
# copy this template to a new file, and set the arguments
|
|
#
|
|
# PHD_WORK_DIR
|
|
# path to be used as working directory.
|
|
# contains the SGE script derived from this template.
|
|
# receives output and temporary files.
|
|
#
|
|
# PHD_PROJECT_FILE
|
|
# python module that declares the project and starts the calculation.
|
|
# must include the file path relative to $PHD_WORK_DIR.
|
|
#
|
|
# PHD_SOURCE_DIR
|
|
# path to the pmsco source directory
|
|
# (the directory which contains the bin, lib, pmsco sub-directories)
|
|
#
|
|
# PHD_SCAN_FILES
|
|
# list of scan files.
|
|
#
|
|
# PHD_OUT
|
|
# name of output file. should not include a path.
|
|
#
|
|
# all paths are relative to $PHD_WORK_DIR or (better) absolute.
|
|
#
|
|
#
|
|
# Further arguments
|
|
#
|
|
# PHD_JOBNAME (required)
|
|
# the job name is the base name for output files.
|
|
#
|
|
# PHD_NODES (required)
|
|
# number of computing nodes (processes) to allocate for the job.
|
|
#
|
|
# PHD_WALLTIME_HR (required)
|
|
# wall time limit (hours)
|
|
#
|
|
# PHD_WALLTIME_MIN (required)
|
|
# wall time limit (minutes)
|
|
#
|
|
# PHD_MODE (optional)
|
|
# calculation mode: single, swarm, grid, gradient
|
|
#
|
|
# PHD_CODE (optional)
|
|
# calculation code: edac, msc, test
|
|
#
|
|
# PHD_LOGLEVEL (optional)
|
|
# request log level: DEBUG, INFO, WARNING, ERROR
|
|
# create a log file based on the job name.
|
|
#
|
|
# PHD_PROJECT_ARGS (optional)
|
|
# extra arguments that are parsed by the project module.
|
|
#
|
|
|
|
PHD_WORK_DIR="_PHD_WORK_DIR"
|
|
PHD_JOBNAME="_PHD_JOBNAME"
|
|
PHD_NODES=_PHD_NODES
|
|
PHD_WALLTIME_HR=_PHD_WALLTIME_HR
|
|
PHD_WALLTIME_MIN=_PHD_WALLTIME_MIN
|
|
|
|
PHD_PROJECT_FILE="_PHD_PROJECT_FILE"
|
|
PHD_MODE="_PHD_MODE"
|
|
PHD_CODE="_PHD_CODE"
|
|
PHD_SOURCE_DIR="_PHD_SOURCE_DIR"
|
|
PHD_SCAN_FILES="_PHD_SCAN_FILES"
|
|
PHD_OUT="_PHD_JOBNAME"
|
|
PHD_LOGLEVEL="_PHD_LOGLEVEL"
|
|
PHD_PROJECT_ARGS="_PHD_PROJECT_ARGS"
|
|
|
|
# Define your job name, parallel environment with the number of slots, and run time:
|
|
#$ -cwd
|
|
#$ -N _PHD_JOBNAME.job
|
|
#$ -pe orte _PHD_NODES
|
|
#$ -l ram=2G
|
|
#$ -l s_rt=_PHD_WALLTIME_HR:_PHD_WALLTIME_MIN:00
|
|
#$ -l h_rt=_PHD_WALLTIME_HR:_PHD_WALLTIME_MIN:30
|
|
#$ -V
|
|
|
|
###################################################
|
|
# Fix the SGE environment-handling bug (bash):
|
|
source /usr/share/Modules/init/sh
|
|
export -n -f module
|
|
|
|
# Load the environment modules for this job (the order may be important):
|
|
module load python/python-2.7.5
|
|
module load gcc/gcc-4.6.3
|
|
module load mpi/openmpi-1.4.5-gcc-4.6.3
|
|
module load blas/blas-20110419-gcc-4.6.3
|
|
module load lapack/lapack-3.4.2-gcc-4.6.3
|
|
export LD_LIBRARY_PATH=$PHD_SOURCE_DIR/lib/:$LD_LIBRARY_PATH
|
|
|
|
###################################################
|
|
# Set the environment variables:
|
|
MPIEXEC=$OPENMPI/bin/mpiexec
|
|
# OPENMPI is set by the mpi/openmpi-* module.
|
|
|
|
export OMP_NUM_THREADS=1
|
|
export OMPI_MCA_btl='openib,sm,self'
|
|
# export OMPI_MCA_orte_process_binding=core
|
|
|
|
##############
|
|
# BEGIN DEBUG
|
|
# Print the SGE environment on master host:
|
|
echo "================================================================"
|
|
echo "=== SGE job JOB_NAME=$JOB_NAME JOB_ID=$JOB_ID"
|
|
echo "================================================================"
|
|
echo DATE=`date`
|
|
echo HOSTNAME=`hostname`
|
|
echo PWD=`pwd`
|
|
echo "NSLOTS=$NSLOTS"
|
|
echo "PE_HOSTFILE=$PE_HOSTFILE"
|
|
cat $PE_HOSTFILE
|
|
echo "================================================================"
|
|
echo "Running environment:"
|
|
env
|
|
echo "================================================================"
|
|
echo "Loaded environment modules:"
|
|
module list 2>&1
|
|
echo
|
|
# END DEBUG
|
|
##############
|
|
|
|
##############
|
|
# Setup
|
|
cd "$PHD_SOURCE_DIR"
|
|
python -m compileall .
|
|
|
|
cd "$PHD_WORK_DIR"
|
|
ulimit -c 0
|
|
|
|
###################################################
|
|
# The command to run with mpiexec:
|
|
CMD="python $PHD_PROJECT_FILE"
|
|
ARGS="$PHD_PROJECT_ARGS"
|
|
|
|
if [ -n "$PHD_SCAN_FILES" ]; then
|
|
ARGS="-s $PHD_SCAN_FILES -- $ARGS"
|
|
fi
|
|
|
|
if [ -n "$PHD_CODE" ]; then
|
|
ARGS="-c $PHD_CODE $ARGS"
|
|
fi
|
|
|
|
if [ -n "$PHD_MODE" ]; then
|
|
ARGS="-m $PHD_MODE $ARGS"
|
|
fi
|
|
|
|
if [ -n "$PHD_OUT" ]; then
|
|
ARGS="-o $PHD_OUT $ARGS"
|
|
fi
|
|
|
|
if [ "$PHD_WALLTIME_HR" -ge 1 ]
|
|
then
|
|
ARGS="-t $PHD_WALLTIME_HR $ARGS"
|
|
else
|
|
ARGS="-t 0.5 $ARGS"
|
|
fi
|
|
|
|
if [ -n "$PHD_LOGLEVEL" ]; then
|
|
ARGS="--log-level $PHD_LOGLEVEL --log-file $PHD_JOBNAME.log $ARGS"
|
|
fi
|
|
|
|
# The MPI command to run:
|
|
MPICMD="$MPIEXEC --prefix $OPENMPI -x PATH -x LD_LIBRARY_PATH -x OMP_NUM_THREADS -x OMPI_MCA_btl -np $NSLOTS $CMD $ARGS"
|
|
echo "Command to run:"
|
|
echo "$MPICMD"
|
|
echo
|
|
exec $MPICMD
|
|
|
|
exit 0
|