#!/bin/bash
#
#PBS -r n
#PBS -N Gromacs
#PBS -o stdout
#PBS -e stderr
#PBS -j oe
#PBS -m abe
#PBS -M YOUR_MAIL_ADDRESS_HERE
#PBS -l nodes=1:ppn=1
#PBS -l pmem=200mb
#PBS -l walltime=01:00:00
module load intel
module load gromacs
cd $PBS_O_WORKDIR
mdrun_d < gromacs.in > gromacs.out 2>&1
#!/bin/bash
#
#PBS -r n
#PBS -N Gromacs
#PBS -o stdout
#PBS -e stderr
#PBS -j oe
#PBS -m abe
#PBS -M YOUR_MAIL_ADDRESS_HERE
#PBS -l nodes=2:ppn=4
#PBS -l pmem=200mb
#PBS -l walltime=01:00:00
module load intel
module load openmpi
module load gromacs
cd $PBS_O_WORKDIR
mpiexec mdrun_mpi_d < gromacs.in > gromacs.out 2>&1
#!/bin/bash
#PBS -N gromacs
#PBS -o stdout
#PBS -m abe
#PBS -M taylor@hpc.ufl.edu
#PBS -j oe
#PBS -l nodes=1:ppn=2:gpus=1
#PBS -l walltime=36:00:00
#PBS -q gpu
module load intel/2013
module load cuda/5.5
module load gromacs/4.6.5
myHostName=`hostname -s`
export PBS_GPUS=`gpus=; while read gpu ; do gpus="$gpus,$gpu"; done < $PBS_GPUFILE; echo $gpus | sed -e "s%^,%%"`
export OMP_NUM_THREADS=$PBS_NUM_PPN
export CUDA_VISIBLE_DEVICES=`echo $PBS_GPUS | sed -e "s%$myHostName-gpu%%g"`
echo CUDA_VISIBLE_DEVICES = $CUDA_VISIBLE_DEVICES
cd $PBS_O_WORKDIR
grompp -f prodmd.mdp -c nptsim1.pdb -t nptsim1.cpt -p topology1.top -o prodmd.tpr
mdrun -v -s prodmd.tpr -c prodmd.tpr -deffnm prodmd > gromacs.log 2>&1
exit 0