search.job 754 Bytes
Newer Older
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
#!/bin/bash
#SBATCH --job-name=TiO_1L_3
#SBATCH --partition=q28,q36,q40,qgpu
#SBATCH --mem=10G
#SBATCH --nodes=1
#SBATCH --time=3:00:00
#SBATCH --ntasks-per-node=5
#SBATCH --cpus-per-task=1
#SBATCH --no-requeue
#SBATCH --array=0-29%30


echo "========= Job started  at `date` =========="
# Go to the directory where this job was submitted
cd $SLURM_SUBMIT_DIR

source /home/mkb/.gpaw_py3
source /home/mkb/.GOFEE
source /home/mkb/.dftb

path0=$SLURM_SUBMIT_DIR/runs3
base_path=$path0/run$SLURM_ARRAY_TASK_ID
mkdir $base_path

# Copy files
cp run_search.py $base_path
cp slab.traj $base_path
cp init.traj $base_path

cd $base_path
mpiexec --mca mpi_warn_on_fork 0 gpaw-python run_search.py > search.log
echo "========= Job finished at `date` =========="