Differences
This shows you the differences between two versions of the page.
Both sides previous revision Previous revision Next revision | Previous revisionLast revisionBoth sides next revision | ||
doku:molecular_dynamics [2020/04/23 08:14] – ir | doku:molecular_dynamics [2020/04/23 19:21] – ir | ||
---|---|---|---|
Line 1: | Line 1: | ||
====== Molecular Dynamics ====== | ====== Molecular Dynamics ====== | ||
- | [[pandoc: | + | COMSOL, LAMMPS, VASP, Wien2k |
+ | |||
+ | ====== VASP ====== | ||
+ | |||
+ | ===== VASP ===== | ||
+ | |||
+ | ==== Modules ==== | ||
+ | |||
+ | < | ||
+ | module purge | ||
+ | module load autotools | ||
+ | module load gnu7/7.2.0 | ||
+ | module load openmpi3/ | ||
+ | module load openblas/ | ||
+ | module load scalapack/ | ||
+ | module load fftw/3.3.6 | ||
+ | module load prun | ||
+ | |||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== VASP ===== | ||
+ | |||
+ | ==== vasp.5.lib ==== | ||
+ | |||
+ | < | ||
+ | ln -s makefile.linux_gfortran makefile | ||
+ | export CC=gcc | ||
+ | make | ||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== VASP 5.4.1/5.4.4 ===== | ||
+ | |||
+ | ==== makefile.include ==== | ||
+ | |||
+ | < | ||
+ | OFLAG = -O2 -march=broadwell | ||
+ | |||
+ | LIBDIR | ||
+ | |||
+ | BLAS = -L$(LIBDIR) -lopenblas | ||
+ | |||
+ | LAPACK | ||
+ | |||
+ | BLACS = | ||
+ | |||
+ | SCALAPACK | ||
+ | |||
+ | FFTW ?= / | ||
+ | |||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== VASP 5.4.1/5.4.4 ===== | ||
+ | |||
+ | ==== Compile ==== | ||
+ | |||
+ | < | ||
+ | make all | ||
+ | </ | ||
+ | ==== If you have to recompile ==== | ||
+ | |||
+ | < | ||
+ | make veryclean | ||
+ | make all | ||
+ | </ | ||
+ | ==== To build only specific version ==== | ||
+ | |||
+ | < | ||
+ | make std | ||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== VASP 5.4.1/5.4.4 ===== | ||
+ | |||
+ | ==== Job script ==== | ||
+ | |||
+ | '' | ||
+ | |||
+ | < | ||
+ | # | ||
+ | # | ||
+ | # | ||
+ | #SBATCH -N 2 | ||
+ | #SBATCH -o job.%j.out | ||
+ | #SBATCH -p E5-2690v4 | ||
+ | #SBATCH -q E5-2690v4-batch | ||
+ | #SBATCH --ntasks-per-node=28 | ||
+ | #SBATCH --threads-per-core=1 | ||
+ | #SBATCH --mem=16G | ||
+ | |||
+ | export OMP_NUM_THREADS=1 | ||
+ | |||
+ | exe=/ | ||
+ | |||
+ | time mpirun -np $SLURM_NPROCS $exe | ||
+ | |||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ====== LAMMPS ====== | ||
+ | |||
+ | ===== LAMMPS ===== | ||
+ | |||
+ | '' | ||
+ | |||
+ | < | ||
+ | # | ||
+ | # | ||
+ | #SBATCH -J lammps | ||
+ | #SBATCH -N 2 | ||
+ | #SBATCH -o job.%j.out | ||
+ | #SBATCH -p E5-2690v4 | ||
+ | #SBATCH -q E5-2690v4-batch | ||
+ | #SBATCH --ntasks-per-node=28 | ||
+ | #SBATCH --threads-per-core=1 | ||
+ | #SBATCH --mem=16G | ||
+ | |||
+ | module purge | ||
+ | module load lammps | ||
+ | |||
+ | mpirun -np $SLURM_NTASKS lmp_mul -in ./in.put | ||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ====== Wien2k ====== | ||
+ | |||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== Wien2k ===== | ||
+ | |||
+ | Example job script in: | ||
+ | |||
+ | < | ||
+ | / | ||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ====== COMSOL ====== | ||
+ | |||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== COMSOL 5.2a ===== | ||
+ | |||
+ | ==== Module ==== | ||
+ | |||
+ | < | ||
+ | load module COMSOL/ | ||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== COMSOL 5.2a ===== | ||
+ | |||
+ | ==== Job script threaded version (no MPI) ==== | ||
+ | |||
+ | You can use only one node in this case. Create a job file called '' | ||
+ | |||
+ | '' | ||
+ | |||
+ | < | ||
+ | # | ||
+ | # | ||
+ | #SBATCH -J comsol | ||
+ | #SBATCH -N 1 | ||
+ | #SBATCH -o job.%j.out | ||
+ | #SBATCH -p E5-2690v4 | ||
+ | #SBATCH -q E5-2690v4-batch | ||
+ | #SBATCH --ntasks-per-node=28 | ||
+ | #SBATCH --threads-per-core=1 | ||
+ | #SBATCH --time=04: | ||
+ | #SBATCH --mem=16G | ||
+ | |||
+ | # Details of your input and output files | ||
+ | INPUTFILE=micromixer_cluster.mph | ||
+ | OUTPUTFILE=micromixer.out | ||
+ | |||
+ | # Load our comsol module | ||
+ | module purge | ||
+ | module load COMSOL/ | ||
+ | |||
+ | # create tmpdir | ||
+ | TMPDIR="/ | ||
+ | mkdir -p $TMPDIR | ||
+ | |||
+ | ## Now, run COMSOL in batch mode with the input and output detailed above. | ||
+ | comsol batch -np $SLURM_NTASKS -inputfile $INPUTFILE -outputfile $OUTPUTFILE -tmpdir $TMPDIR | ||
+ | </ | ||
+ | |||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== COMSOL 5.2a ===== | ||
+ | |||
+ | ==== Job script with MPI ==== | ||
+ | |||
+ | You can use more than one node in this case. Create a job file called '' | ||
+ | |||
+ | '' | ||
+ | |||
+ | < | ||
+ | # | ||
+ | # | ||
+ | #SBATCH -J comsol | ||
+ | #SBATCH -N 2 | ||
+ | #SBATCH -o job.%j.out | ||
+ | #SBATCH -p E5-2690v4 | ||
+ | #SBATCH -q E5-2690v4-batch | ||
+ | #SBATCH --ntasks-per-node=28 | ||
+ | #SBATCH --threads-per-core=1 | ||
+ | #SBATCH --time=04: | ||
+ | #SBATCH --mem=16G | ||
+ | |||
+ | # Details of your input and output files | ||
+ | INPUTFILE=micromixer_cluster.mph | ||
+ | OUTPUTFILE=micromixer.out | ||
+ | |||
+ | # Load our comsol module | ||
+ | module purge | ||
+ | module load COMSOL/ | ||
+ | module load intel-mpi/ | ||
+ | |||
+ | # create tmpdir | ||
+ | TMPDIR="/ | ||
+ | mkdir -p $TMPDIR | ||
+ | |||
+ | ## Now, run COMSOL in batch mode with the input and output detailed above. | ||
+ | comsol -clustersimple batch \ | ||
+ | -inputfile $INPUTFILE \ | ||
+ | -outputfile $OUTPUTFILE \ | ||
+ | -tmpdir $TMPDIR \ | ||
+ | -mpiroot $MPIROOT -mpi intel -mpifabrics shm:dapl | ||
+ | </ | ||
+ | |||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== COMSOL 5.2a ===== | ||
+ | |||
+ | ==== Submit job ==== | ||
+ | |||
+ | < | ||
+ | sbatch job_xyz.sh | ||
+ | </ | ||
+ | |||
+ | ---- | ||
+ | |||
+ | ===== COMSOL 5.2a ===== | ||
+ | |||
+ | ==== Restarting jobs ==== | ||
+ | |||
+ | To continue your calculation from the last saved state on use the options: | ||
+ | |||
+ | < | ||
+ | -recover -continue | ||
+ | </ |