Skip to content

Commit

Permalink
Fix some security issues and add Bandit to our CIs (mne-tools#11937)
Browse files Browse the repository at this point in the history
Co-authored-by: Eric Larson <[email protected]>
  • Loading branch information
drammock and larsoner authored Sep 2, 2023
1 parent db07d55 commit 4afa3f4
Show file tree
Hide file tree
Showing 28 changed files with 113 additions and 87 deletions.
11 changes: 11 additions & 0 deletions .github/workflows/tests.yml
Original file line number Diff line number Diff line change
Expand Up @@ -26,6 +26,17 @@ jobs:
- uses: psf/black@stable
- uses: pre-commit/[email protected]

bandit:
name: Bandit
needs: style
runs-on: ubuntu-latest
steps:
- uses: davidslusser/[email protected]
with:
src: "mne"
options: "-c pyproject.toml -ll -r"
pip_install_command: "pip install bandit[toml]"

pytest:
name: '${{ matrix.os }} / ${{ matrix.kind }} / ${{ matrix.python }}'
needs: style
Expand Down
8 changes: 7 additions & 1 deletion README.rst
Original file line number Diff line number Diff line change
@@ -1,6 +1,6 @@
.. -*- mode: rst -*-
|PyPI|_ |conda-forge|_ |Zenodo|_ |Discourse|_ |Codecov|_
|PyPI|_ |conda-forge|_ |Zenodo|_ |Discourse|_ |Codecov|_ |Bandit|_ |OpenSSF|_

|MNE|_

Expand All @@ -19,6 +19,12 @@
.. |Codecov| image:: https://img.shields.io/codecov/c/github/mne-tools/mne-python?label=Coverage
.. _Codecov: https://codecov.io/gh/mne-tools/mne-python

.. |Bandit| image:: https://img.shields.io/badge/security-bandit-yellow.svg
.. _Bandit: https://github.com/PyCQA/bandit

.. |OpenSSF| image:: https://www.bestpractices.dev/projects/7783/badge
.. _OpenSSF: https://www.bestpractices.dev/projects/7783

.. |MNE| image:: https://mne.tools/stable/_static/mne_logo.svg
.. _MNE: https://mne.tools/dev/

Expand Down
1 change: 1 addition & 0 deletions doc/changes/devel.rst
Original file line number Diff line number Diff line change
Expand Up @@ -46,3 +46,4 @@ Bugs
API changes
~~~~~~~~~~~
- ``mne.preprocessing.apply_maxfilter`` and ``mne maxfilter`` have been deprecated and will be removed in 1.7. Use :func:`mne.preprocessing.maxwell_filter` (see :ref:`this tutorial <tut-artifact-sss>`) in Python or the command-line utility from MEGIN ``maxfilter`` and :func:`mne.bem.fit_sphere_to_headshape` instead (:gh:`11938` by `Eric Larson`_)
- :func:`mne.io.kit.read_mrk` reading pickled files is deprecated using something like ``np.savetxt(fid, pts, delimiter="\t", newline="\n")`` to save your points instead (:gh:`11937` by `Eric Larson`_)
3 changes: 2 additions & 1 deletion environment.yml
Original file line number Diff line number Diff line change
Expand Up @@ -31,7 +31,7 @@ dependencies:
- imageio-ffmpeg>=0.4.1
- vtk>=9.2
- traitlets
- pyvista>=0.32,!=0.35.2,!=0.38.0,!=0.38.1,!=0.38.2,!=0.38.3,!=0.38.4,!=0.38.5,!=0.38.6
- pyvista>=0.32,!=0.35.2,!=0.38.0,!=0.38.1,!=0.38.2,!=0.38.3,!=0.38.4,!=0.38.5,!=0.38.6,!=0.42.0
- pyvistaqt>=0.4
- qdarkstyle
- darkdetect
Expand All @@ -58,3 +58,4 @@ dependencies:
- pybv
- mamba
- lazy_loader
- defusedxml
9 changes: 4 additions & 5 deletions mne/_fiff/tests/test_meas_info.py
Original file line number Diff line number Diff line change
Expand Up @@ -3,7 +3,6 @@
#
# License: BSD-3-Clause

import hashlib
import pickle
from datetime import datetime, timedelta, timezone, date
from pathlib import Path
Expand Down Expand Up @@ -70,7 +69,7 @@
from mne._fiff import meas_info
from mne._fiff._digitization import _make_dig_points, DigPoint
from mne.transforms import Transform
from mne.utils import catch_logging, assert_object_equal, _record_warnings
from mne.utils import catch_logging, assert_object_equal, _empty_hash, _record_warnings

root_dir = Path(__file__).parent.parent.parent
fiducials_fname = root_dir / "data" / "fsaverage" / "fsaverage-fiducials.fif"
Expand Down Expand Up @@ -314,14 +313,14 @@ def test_read_write_info(tmp_path):
assert_array_equal(info["meas_id"]["machid"], meas_id["machid"])

# Test that writing twice produces the same file
m1 = hashlib.md5()
m1 = _empty_hash()
with open(temp_file, "rb") as fid:
m1.update(fid.read())
m1 = m1.hexdigest()
temp_file_2 = tmp_path / "info2.fif"
assert temp_file_2 != temp_file
write_info(temp_file_2, info)
m2 = hashlib.md5()
m2 = _empty_hash()
with open(str(temp_file_2), "rb") as fid:
m2.update(fid.read())
m2 = m2.hexdigest()
Expand Down Expand Up @@ -1073,7 +1072,7 @@ def test_pickle(fname_info, unlocked):
assert not info._unlocked
info._unlocked = unlocked
data = pickle.dumps(info)
info_un = pickle.loads(data)
info_un = pickle.loads(data) # nosec B301
assert isinstance(info_un, Info)
assert_object_equal(info, info_un)
assert info_un._unlocked == unlocked
Expand Down
2 changes: 1 addition & 1 deletion mne/channels/_dig_montage_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
#
# License: Simplified BSD

import xml.etree.ElementTree as ElementTree
from defusedxml import ElementTree

import numpy as np

Expand Down
2 changes: 1 addition & 1 deletion mne/channels/_standard_montage_utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,7 +9,7 @@
import numpy as np

from functools import partial
import xml.etree.ElementTree as ElementTree
from defusedxml import ElementTree

from .montage import make_dig_montage
from .._freesurfer import get_mni_fiducials
Expand Down
11 changes: 5 additions & 6 deletions mne/datasets/_fetch.py
Original file line number Diff line number Diff line change
Expand Up @@ -297,13 +297,12 @@ def fetch_dataset(
if check_version and (
_compare_version(data_version, "<", mne_version.strip(".git"))
):
# OK to `nosec` because it's false positive (misidentified as SQL)
warn(
"The {name} dataset (version {current}) is older than "
"mne-python (version {newest}). If the examples fail, "
"you may need to update the {name} dataset by using "
"mne.datasets.{name}.data_path(force_update=True)".format(
name=name, current=data_version, newest=mne_version
)
f"The {name} dataset (version {data_version}) is older than "
f"mne-python (version {mne_version}). If the examples fail, "
f"you may need to update the {name} dataset by using "
f"mne.datasets.{name}.data_path(force_update=True)" # nosec B608
)
_log_time_size(t0, sz)
return (final_path, data_version) if return_version else final_path
3 changes: 1 addition & 2 deletions mne/html_templates/_templates.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,12 +7,11 @@ def _get_html_templates_env(kind):
assert kind in ("repr", "report"), kind
import jinja2

autoescape = jinja2.select_autoescape(default=True, default_for_string=True)
templates_env = jinja2.Environment(
loader=jinja2.PackageLoader(
package_name="mne.html_templates", package_path=kind
),
autoescape=autoescape,
autoescape=jinja2.select_autoescape(default=True, default_for_string=True),
)
if kind == "report":
templates_env.filters["zip"] = zip
Expand Down
2 changes: 1 addition & 1 deletion mne/io/egi/egimff.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,7 @@
import math
import os.path as op
import re
from xml.dom.minidom import parse
from defusedxml.minidom import parse
from pathlib import Path

import numpy as np
Expand Down
2 changes: 1 addition & 1 deletion mne/io/egi/events.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,7 @@
from datetime import datetime
from glob import glob
from os.path import basename, join, splitext
from xml.etree.ElementTree import parse
from defusedxml.ElementTree import parse

import numpy as np

Expand Down
2 changes: 1 addition & 1 deletion mne/io/egi/general.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@
# License: BSD-3-Clause

import os
from xml.dom.minidom import parse
from defusedxml.minidom import parse
import re

import numpy as np
Expand Down
19 changes: 12 additions & 7 deletions mne/io/kit/coreg.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def read_mrk(fname):
----------
fname : path-like
Absolute path to Marker file.
File formats allowed: \*.sqd, \*.mrk, \*.txt, \*.pickled.
File formats allowed: \*.sqd, \*.mrk, \*.txt.
Returns
-------
Expand All @@ -49,9 +49,9 @@ def read_mrk(fname):
"""
from .kit import _read_dirs

fname = Path(fname)
_check_option("file extension", fname.suffix, (".sqd", ".mrk", ".txt", ".pickled"))
_check_fname(fname, "read", must_exist=True, name="mrk file")
fname = Path(_check_fname(fname, "read", must_exist=True, name="mrk file"))
if fname.suffix != ".pickled":
_check_option("file extension", fname.suffix, (".sqd", ".mrk", ".txt"))
if fname.suffix in (".sqd", ".mrk"):
with open(fname, "rb", buffering=0) as fid:
dirs = _read_dirs(fid)
Expand All @@ -70,13 +70,18 @@ def read_mrk(fname):
elif fname.suffix == ".txt":
mrk_points = _read_dig_kit(fname, unit="m")
elif fname.suffix == ".pickled":
warn(
"Reading pickled files is unsafe and not future compatible, save "
"to a standard format (text or FIF) instea, e.g. with:\n"
r"np.savetxt(fid, pts, delimiter=\"\\t\", newline=\"\\n\")",
FutureWarning,
)
with open(fname, "rb") as fid:
food = pickle.load(fid)
food = pickle.load(fid) # nosec B301
try:
mrk_points = food["mrk"]
except Exception:
err = "%r does not contain marker points." % fname
raise ValueError(err)
raise ValueError(f"{fname} does not contain marker points.") from None

# check output
mrk_points = np.asarray(mrk_points)
Expand Down
16 changes: 12 additions & 4 deletions mne/io/kit/tests/test_coreg.py
Original file line number Diff line number Diff line change
Expand Up @@ -27,15 +27,23 @@ def test_io_mrk(tmp_path):
pts_2 = read_mrk(path)
assert_array_equal(pts, pts_2, "read/write mrk to text")

# pickle
# pickle (deprecated)
fname = tmp_path / "mrk.pickled"
with open(fname, "wb") as fid:
pickle.dump(dict(mrk=pts), fid)
pts_2 = read_mrk(fname)
with pytest.warns(FutureWarning, match="unsafe"):
pts_2 = read_mrk(fname)
assert_array_equal(pts_2, pts, "pickle mrk")
with open(fname, "wb") as fid:
pickle.dump(dict(), fid)
pytest.raises(ValueError, read_mrk, fname)
with pytest.warns(FutureWarning, match="unsafe"):
with pytest.raises(ValueError, match="does not contain"):
read_mrk(fname)

# unsupported extension
pytest.raises(ValueError, read_mrk, "file.ext")
fname = tmp_path / "file.ext"
with pytest.raises(FileNotFoundError, match="does not exist"):
read_mrk(fname)
fname.write_text("")
with pytest.raises(ValueError, match="file extension"):
read_mrk(fname)
2 changes: 1 addition & 1 deletion mne/io/nedf/nedf.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@

from copy import deepcopy
from datetime import datetime, timezone
from xml.etree import ElementTree
from defusedxml import ElementTree

import numpy as np

Expand Down
3 changes: 2 additions & 1 deletion mne/preprocessing/maxfilter.py
Original file line number Diff line number Diff line change
Expand Up @@ -217,7 +217,8 @@ def apply_maxfilter(

logger.info("Running MaxFilter: %s " % cmd)
if os.getenv("_MNE_MAXFILTER_TEST", "") != "true": # fake maxfilter
st = os.system(cmd)
# OK to `nosec` because it's deprecated / will be removed
st = os.system(cmd) # nosec B605
else:
print(cmd) # we can check the output
st = 0
Expand Down
2 changes: 1 addition & 1 deletion mne/report/tests/test_report.py
Original file line number Diff line number Diff line change
Expand Up @@ -865,7 +865,7 @@ def test_survive_pickle(tmp_path):
# Pickle report object to simulate multiprocessing with joblib
report = Report(info_fname=raw_fname_new)
pickled_report = pickle.dumps(report)
report = pickle.loads(pickled_report)
report = pickle.loads(pickled_report) # nosec B301

# Just test if no errors occur
report.parse_folder(tmp_path, render_bem=False)
Expand Down
2 changes: 1 addition & 1 deletion mne/tests/test_label.py
Original file line number Diff line number Diff line change
Expand Up @@ -352,7 +352,7 @@ def test_label_io(tmp_path):
with open(dest, "wb") as fid:
pickle.dump(label, fid, pickle.HIGHEST_PROTOCOL)
with open(dest, "rb") as fid:
label2 = pickle.load(fid)
label2 = pickle.load(fid) # nosec B301
assert_labels_equal(label, label2)


Expand Down
2 changes: 1 addition & 1 deletion mne/time_frequency/tests/test_csd.py
Original file line number Diff line number Diff line change
Expand Up @@ -264,7 +264,7 @@ def test_csd_pickle(tmp_path):
with open(fname, "wb") as f:
pickle.dump(csd, f)
with open(fname, "rb") as f:
csd2 = pickle.load(f)
csd2 = pickle.load(f) # nosec B301
assert_array_equal(csd._data, csd2._data)
assert csd.tmin == csd2.tmin
assert csd.tmax == csd2.tmax
Expand Down
1 change: 1 addition & 0 deletions mne/utils/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -107,6 +107,7 @@
"_clean_names",
"pformat",
"_file_like",
"_empty_hash",
"_explain_exception",
"_get_argvalues",
"sizeof_fmt",
Expand Down
Loading

0 comments on commit 4afa3f4

Please sign in to comment.