# 新加坡gromacs_lignocellulose_job_script ###### tags: `Gromacs` ```= module unload binutils/2.26 gmp/6.1.0 mpfr/3.1.4 mpc/1.0.3 isl/0.14 gcc/4.9.3 intelmpi gromacs/5.1.2/gcc493/impi module load gromacs #!/bin/bash #PBS -N gromacs_lignocellulose_openmpi3.1.0_normal #PBS -q normal #PBS -l select=32:ncpus=24:mpiprocs=24:ompthreads=1:mem=96gb #PBS -l walltime=00:10:00 #PBS -P 50000033 #PBS -j oe #PBS -o test1.txt echo "start" date export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/openmpi3.1.0/install/bin:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/openmpi3.1.0/install/lib:$LD_LIBRARY_PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$LD_LIBRARY_PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gromacswithopenmpi/bin:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gromacswithopenmpi/lib64:$LD_LIBRARY_PATH cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/mkl/oneapi/mkl/2021.3.0/env source vars.sh which mpirun mpirun --version cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/inputtest/input/lignocellulose/32nodes/run node=32 ncpu=24 rm -f hostfile1 for (( i=1; i<=$node; i=i+1 )) do S[${i}]=`sed -n "$(($i*$ncpu))p" $PBS_NODEFILE` echo "${S[$i]} slots=$ncpu" >> hostfile1 done time mpirun --hostfile /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/inputtest/input/lignocellulose/16nodes/run/hostfile1 -np 768 --bynode mdrun_mpi -v -s -ntomp 1 lignocellulose-rf.tpr -nsteps 100000 -noconfout date echo "end" ``` time mpirun -np 24 --map-by numa mdrun_mpi -v -s stmv.tpr -nsteps 100000 -noconfout ```= #!/bin/bash #PBS -N gromacs_lignocellulose_openmpi3.1.0_normal #PBS -q normal #PBS -l select=16:ncpus=24:mpiprocs=24:ompthreads=1:mem=96gb #PBS -l walltime=00:10:00 #PBS -P 50000033 #PBS -j oe #PBS -o pbsout_openmpi3.1.0_16nodes_24_ncp_24mpi_2omp.txt echo "start" date export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$LD_LIBRARY_PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gromacs_openmpi3.1.0/install/bin:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gromacs_openmpi3.1.0/install/lib64:$LD_LIBRARY_PATH cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/mkl/oneapi/mkl/2021.3.0/env source vars.sh export PATH=$PATH:/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/openmpi3.1.0/install/bin export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/openmpi3.1.0/install/lib which mpirun mpirun --version cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/inputtest/input/lignocellulose/16nodes/run time mpirun -np 384 --map-by numa mdrun_mpi -v -s -ntomp 1 lignocellulose-rf.tpr -nsteps 100000 -noconfout date echo "end" ``` ```= #!/bin/bash #PBS -N gromacs_lignocellulose_test_normal #PBS -q normal #PBS -l select=8:ncpus=24:mpiprocs=24:ompthreads=2 #PBS -l walltime=24:00:00 #PBS -P 50000033 #PBS -j oe #PBS -o pbsout_gromac_8nodes_24_ncp_24mpi_2omp.txt echo "start" date export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$LD_LIBRARY_PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/hpcx/hpcx-v2.4.0-gcc-MLNX_OFED_LINUX-4.4-2.0.7.0-redhat6.10-x86_64/sources/openmpi/bin:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/hpcx/hpcx-v2.4.0-gcc-MLNX_OFED_LINUX-4.4-2.0.7.0-redhat6.10-x86_64/sources/openmpi/lib:$LD_LIBRARY_PATH cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/mkl/intel/oneapi/mkl/2021.3.0/env source vars.sh export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gromacswithopenmpi/install/bin:$PATH which mpirun mpirun --version cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/inputtest/input/lignocellulose/8odes/run time mpirun -np 192 -N 24 mdrun_mpi -v -s lignocellulose-rf.tpr -nsteps 100000 -noconfout date echo "end" ``` ### gromacs_intelmpi ```= #!/bin/bash #PBS -N gromacs_lignocellulose_intelmpi_normal #PBS -q normal #PBS -l select=16:ncpus=24:mpiprocs=24:ompthreads=2 #PBS -l walltime=24:00:00 #PBS -P 50000033 #PBS -j oe #PBS -o pbsout_gromac_4nodes_24ncp_24mpi-2omp.txt echo "start" date module load intel/19.0.0.117 export CC=mpiicc export CXX=mpiicpc export F77=mpiifort export F90=mpiifort export FC=mpiifort export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/cmake/cmake320/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/bin:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$LD_LIBRARY_PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacswithintelcc/bin:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacswithintelcc/lib64:$LD_LIBRARY_PATH which mpirun mpirun --version cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/input/lignocellulose/4nodes/run time mpirun mdrun_mpi -v -s lignocellulose-rf.tpr -maxh 0.1 -resethway -nsteps 10000 -noconfout date echo "end" ``` ### gromacs build with new open mpi ```= #!/bin/bash #PBS -N gromacs_lignocellulose_test_normal #PBS -q normal #PBS -l select=16:ncpus=24:mpiprocs=24:ompthreads=2 #PBS -l walltime=24:00:00 #PBS -P 50000033 #PBS -j oe #PBS -o pbsout_gromac_4nodes_16ppn_mormal.txt echo "start" date export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/hpcx/hpcx-v2.4.0-gcc-MLNX_OFED_LINUX-4.4-2.0.7.0-redhat6.10-x86_64/sources/openmpi/bin:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/hpcx/hpcx-v2.4.0-gcc-MLNX_OFED_LINUX-4.4-2.0.7.0-redhat6.10-x86_64/sources/openmpi/lib:$LD_LIBRARY_PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$LD_LIBRARY_PATH cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/mkl/intel/oneapi/mkl/2021.3.0/env source vars.sh export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gromacswithopenmpi/install/bin:$PATH which mpirun mpirun --version cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/inputtest/input/lignocellulose/16nodes/run time mpirun mdrun_mpi -v -s lignocellulose-rf.tpr -maxh 0.1 - resethway -nsteps 10000 -noconfout date echo "end" ``` ### gromacs build with hpcx ```= #!/bin/bash #PBS -N gromacs_lignocellulose_test_normal #PBS -q normal #PBS -l select=16:ncpus=24:mpiprocs=24:ompthreads=2 #PBS -l walltime=24:00:00 #PBS -P 50000033 #PBS -j oe #PBS -o pbsout_gromac_4nodes_16ppn_mormal.txt echo "start" date export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$LD_LIBRARY_PATH cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/hpcx/hpcx-v2.4.0-gcc-MLNX_OFED_LINUX-4.4-2.0.7.0-redhat6.10-x86_64/modulefiles module use $PWD module load hpcx-ompi cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/mkl/intel/oneapi/mkl/2021.3.0/env source vars.sh export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gromacswithopenmpi/install/bin:$PATH which mpirun mpirun --version cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/inputtest/input/lignocellulose/16nodes/run time mpirun mdrun_mpi -np 384 -v -s -mca pml ucx -x UCX_NET_DEVICES=mlx5_0:1 lignocellulose-rf.tpr -nsteps 10000 -noconfout date echo "end" ``` ### gromacs build with openmpi medium normal ```= #!/bin/bash #PBS -N gromacs_lignocellulose_test_normal #PBS -q normal #PBS -l select=2:ncpus=16:mpiprocs=1:ompthreads=2 #PBS -l walltime=24:00:00 #PBS -P 50000033 #PBS -j oe #PBS -o pbsout_gromac_2nodes_16ppn_mormal.txt echo "start" date export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/cmake/cmake320/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib:$LD_LIBRARY_PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gcc/lib64:$LD_LIBRARY_PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/openmpi/openmpi401/bin:$PATH export LD_LIBRARY_PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/openmpi/openmpi401/lib:$LD_LIBRARY_PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/gromacswithopenmpi/install/bin:$PATH export PATH=/home/users/industry/ai-hpc/apacsc19/scratch/gromacs/ucx/ucx/bin:$PATH cd /home/users/industry/ai-hpc/apacsc19/scratch/gromacs/mkl/intel/oneapi/mkl/2021.3.0/env source vars.sh which mpirun mpirun --version cd mpirun -n $NPROCS --hostfile $HOSTFILE --map-by ppr:$NUMBER_PROCESSES_PER_NUMA:numa:pe=$NUMBER_THREADS_PER_PROCESS -report-bindings $MPI_EXECUTABLE time mpirun -np 128 mdrun_mpi -v -s lignocellulose-rf.tpr -nsteps 100000 -noconfout date echo "end" ```
×
Sign in
Email
Password
Forgot password
or
By clicking below, you agree to our
terms of service
.
Sign in via Facebook
Sign in via Twitter
Sign in via GitHub
Sign in via Dropbox
Sign in with Wallet
Wallet (
)
Connect another wallet
New to HackMD?
Sign up