Semantic segmentation (pixel-wise classification) network to perform cosmic ray and beam particle separation in prototype DUNE detector.
git clone https://github.com/ArbinTimilsina/DeepLearningWithProtoDUNE.git
cd DeepLearningWithProtoDUNE
# 320x320 inputs; min 2000 beam hits
wget -O input_files.zip https://www.dropbox.com/sh/a00fbuye3i1c0sj/AACeI2l-iEpIoeDbDBtogjJKa?dl=1
unzip input_files.zip -d input_files
rm -rf input_files.zip
python train_model.py --help
# Example
python train_model.py -o Development -e 5
Details can be found in the configuration file.
python analyze_model.py --help
# Example
python analyze_model.py -p 5 -s Development
conda create --name envDeepLearningWithProtoDUNE python=3.5
conda activate envDeepLearningWithProtoDUNE
pip install --upgrade pip
pip install -r requirements/cpu_requirements.txt
conda install pydot graphviz
singularity pull --name DeepLearningWithProtoDUNE.img shub://ArbinTimilsina/Base-Singularity:deeplearningwithprotodune
# If using GPUs, don't forget --nv option
singularity exec --nv DeepLearningWithProtoDUNE.img python train_model.py -o Development -e 5
KERAS_BACKEND=tensorflow python -c "from keras import backend"
python calculate_weights.py
It will run over the default traning files in the configuration. Median for each class will be displayed in plots/weights_median.pdf.
# For 10 events
python plot_events.py --events 10
# Create an IPython kernel for the environment
python -m ipykernel install --user --name envDeepLearningWithProtoDUNE --display-name "envDeepLearningWithProtoDUNE"
# Open the notebook
jupyter notebook miscellaneous/model_creation_playground.ipynb
# Note: Make sure to change the kernel to envDeepLearningWithProtoDUNE using the drop-down menu (Kernel > Change kernel > envDeepLearningWithProtoDUNE)