Differences

This shows you the differences between two versions of the page.

Link to this comparison view

Both sides previous revision Previous revision
Next revision
Previous revision
Last revisionBoth sides next revision
doku:molecular_dynamics [2020/04/23 08:14] irdoku:molecular_dynamics [2020/04/23 19:21] ir
Line 1: Line 1:
 ====== Molecular Dynamics ====== ====== Molecular Dynamics ======
  
-[[pandoc:introduction-to-mul-cluster:01_introduction:05_user_software&s[]=fluent|VASP, ...]]+COMSOL, LAMMPS, VASP,  Wien2k 
 + 
 +====== VASP ====== 
 + 
 +===== VASP ===== 
 + 
 +==== Modules ==== 
 + 
 +<code> 
 +module purge 
 +module load autotools  
 +module load gnu7/7.2.0  
 +module load openmpi3/3.0.0  
 +module load openblas/0.2.20 
 +module load scalapack/2.0.2  
 +module load fftw/3.3.6  
 +module load prun 
 + 
 +</code> 
 + 
 +---- 
 + 
 +===== VASP ===== 
 + 
 +==== vasp.5.lib ==== 
 + 
 +<code> 
 +ln -s makefile.linux_gfortran makefile 
 +export CC=gcc 
 +make 
 +</code> 
 + 
 +---- 
 + 
 +===== VASP 5.4.1/5.4.4 ===== 
 + 
 +==== makefile.include ==== 
 + 
 +<code> 
 +OFLAG      = -O2 -march=broadwell 
 + 
 +LIBDIR     = /opt/ohpc/pub/libs/gnu7/openblas/0.2.20/lib 
 + 
 +BLAS       = -L$(LIBDIR) -lopenblas 
 + 
 +LAPACK     = 
 + 
 +BLACS      = 
 + 
 +SCALAPACK  = -L$(LIBDIR) -lscalapack $(BLACS) 
 + 
 +FFTW       ?= /opt/ohpc/pub/libs/gnu7/openmpi3/fftw/3.3.6 
 + 
 +</code> 
 + 
 +---- 
 + 
 +===== VASP 5.4.1/5.4.4 ===== 
 + 
 +==== Compile ==== 
 + 
 +<code> 
 +make all 
 +</code> 
 +==== If you have to recompile ==== 
 + 
 +<code> 
 +make veryclean 
 +make all 
 +</code> 
 +==== To build only specific version ==== 
 + 
 +<code> 
 +make std 
 +</code> 
 + 
 +---- 
 + 
 +===== VASP 5.4.1/5.4.4 ===== 
 + 
 +==== Job script ==== 
 + 
 +''%%/opt/ohpc/pub/examples/slurm/mul/vasp%%'' 
 + 
 +<code> 
 +#!/bin/bash 
 +
 +#SBATCH -J vasp 
 +#SBATCH -N 2 
 +#SBATCH -o job.%j.out 
 +#SBATCH -p E5-2690v4 
 +#SBATCH -q E5-2690v4-batch 
 +#SBATCH --ntasks-per-node=28 
 +#SBATCH --threads-per-core=1 
 +#SBATCH --mem=16G 
 + 
 +export OMP_NUM_THREADS=1 
 + 
 +exe=/path/to/my/vasp/vasp.5.4.4/bin/vasp_std 
 + 
 +time mpirun -np $SLURM_NPROCS $exe 
 + 
 +</code> 
 + 
 +---- 
 + 
 +====== LAMMPS ====== 
 + 
 +===== LAMMPS ===== 
 + 
 +''%%/opt/ohpc/pub/examples/slurm/mul/lammps%%'' 
 + 
 +<code> 
 +#!/bin/bash 
 +
 +#SBATCH -J lammps  
 +#SBATCH -N 2 
 +#SBATCH -o job.%j.out 
 +#SBATCH -p E5-2690v4 
 +#SBATCH -q E5-2690v4-batch 
 +#SBATCH --ntasks-per-node=28 
 +#SBATCH --threads-per-core=1 
 +#SBATCH --mem=16G 
 + 
 +module purge 
 +module load lammps 
 + 
 +mpirun -np $SLURM_NTASKS lmp_mul -in ./in.put 
 +</code> 
 + 
 +---- 
 + 
 +====== Wien2k ====== 
 + 
 + 
 +---- 
 + 
 +===== Wien2k ===== 
 + 
 +Example job script in: 
 + 
 +<code> 
 +/opt/ohpc/pub/examples/slurm/mul/wien2k 
 +</code> 
 + 
 +---- 
 + 
 +====== COMSOL ====== 
 + 
 + 
 +---- 
 + 
 +===== COMSOL 5.2a ===== 
 + 
 +==== Module ==== 
 + 
 +<code> 
 +load module COMSOL/5.2a 
 +</code> 
 + 
 +---- 
 + 
 +===== COMSOL 5.2a ===== 
 + 
 +==== Job script threaded version (no MPI) ==== 
 + 
 +You can use only one node in this case. Create a job file called ''%%job_smp.sh%%'': 
 + 
 +''%%/opt/ohpc/pub/examples/slurm/mul/comsol%%'' 
 + 
 +<code> 
 +#!/bin/bash 
 +
 +#SBATCH -J comsol 
 +#SBATCH -N 1 
 +#SBATCH -o job.%j.out 
 +#SBATCH -p E5-2690v4 
 +#SBATCH -q E5-2690v4-batch 
 +#SBATCH --ntasks-per-node=28 
 +#SBATCH --threads-per-core=1 
 +#SBATCH --time=04:00:00 
 +#SBATCH --mem=16G 
 + 
 +# Details of your input and output files 
 +INPUTFILE=micromixer_cluster.mph 
 +OUTPUTFILE=micromixer.out 
 + 
 +# Load our comsol module 
 +module purge 
 +module load COMSOL/5.2a 
 + 
 +# create tmpdir  
 +TMPDIR="/tmp1/comsol" 
 +mkdir -p $TMPDIR 
 + 
 +## Nowrun COMSOL in batch mode with the input and output detailed above. 
 +comsol batch -np $SLURM_NTASKS -inputfile $INPUTFILE -outputfile $OUTPUTFILE -tmpdir $TMPDIR 
 +</code> 
 + 
 + 
 +---- 
 + 
 +===== COMSOL 5.2a ===== 
 + 
 +==== Job script with MPI ==== 
 + 
 +You can use more than one node in this caseCreate a job file called ''%%job_mpi.sh%%'': 
 + 
 +''%%/opt/ohpc/pub/examples/slurm/mul/comsol%%'' 
 + 
 +<code> 
 +#!/bin/bash 
 +
 +#SBATCH -J comsol 
 +#SBATCH -N 2 
 +#SBATCH -o job.%j.out 
 +#SBATCH -p E5-2690v4 
 +#SBATCH -q E5-2690v4-batch 
 +#SBATCH --ntasks-per-node=28 
 +#SBATCH --threads-per-core=1 
 +#SBATCH --time=04:00:00 
 +#SBATCH --mem=16G 
 + 
 +# Details of your input and output files 
 +INPUTFILE=micromixer_cluster.mph 
 +OUTPUTFILE=micromixer.out 
 + 
 +# Load our comsol module 
 +module purge 
 +module load COMSOL/5.2a 
 +module load intel-mpi/2018 
 + 
 +# create tmpdir  
 +TMPDIR="/tmp1/comsol" 
 +mkdir -p $TMPDIR 
 + 
 +## Now, run COMSOL in batch mode with the input and output detailed above. 
 +comsol -clustersimple batch \ 
 +-inputfile $INPUTFILE \ 
 +-outputfile $OUTPUTFILE \ 
 +-tmpdir $TMPDIR \ 
 +-mpiroot $MPIROOT -mpi intel -mpifabrics shm:dapl 
 +</code> 
 + 
 + 
 +---- 
 + 
 +===== COMSOL 5.2a ===== 
 + 
 +==== Submit job ==== 
 + 
 +<code> 
 +sbatch job_xyz.sh 
 +</code> 
 + 
 +---- 
 + 
 +===== COMSOL 5.2a ===== 
 + 
 +==== Restarting jobs ==== 
 + 
 +To continue your calculation from the last saved state on use the options: 
 + 
 +<code> 
 +-recover -continue 
 +</code>
  • doku/molecular_dynamics.txt
  • Last modified: 2021/10/22 09:02
  • by ir