-
Notifications
You must be signed in to change notification settings - Fork 5
EXARL on Perlmutter
Christine Sweeney edited this page Nov 8, 2023
·
5 revisions
module load python
and clone the repository
git clone --recursive https://github.com/exalearn/EXARL.git
conda create --name exarl python=3.9
source activate exarl
conda install -c anaconda tensorflow-gpu -y
conda install -c conda-forge mpi4py gym ase lmfit scikit-learn pandas numba pybind11 pytest flake8 -y
pip install plotille
export PYTHONPATH=<EXARL top level dir>:$PYTHONPATH
#!/bin/bash
#SBATCH -C gpu
#SBATCH -n 1
#SBATCH -A m3897_g
#SBATCH --gpus-per-task=1
#SBATCH -c 10
#SBATCH -t 360
#SBATCH -J ExaRL-1g-10
#SBATCH -o ExaRL-1g-10.%j.out
#SBATCH -L cfs
#SBATCH --gpu-bind=map_gpu:0
set -xe
export SLURM_CPU_BIND="cores"
export PYTHONPATH="${PYTHONPATH}:${SCRATCH}/EXARL"
srun python3 exarl/driver/ --env ExaBoosterDiscrete-v0 --n_episodes 100 --n_steps 100 --learner_procs 1
output_dir="${PWD}/exarl-output-${SLURM_JOB_ID}"