Skip to content

Commit

Permalink
fix: pillow vulnerability and other + formatting (#89)
Browse files Browse the repository at this point in the history
## 📥 Pull Request Description

Fix vulnerabilities and run some pre-commit hooks for reformatting
etc...

## 📝 Checklist

Please make sure you've completed the following tasks before submitting
this pull request:

- [x] Pre-commit hooks were executed (not all green)
- [x] Changes have been reviewed by at least one other developer
- [ ] Tests have been added or updated to cover the changes (only
necessary if the changes affect the executable code)
- [x] All tests ran successfully
- [x] All merge conflicts are resolved
- [ ] Documentation has been updated to reflect the changes
- [ ] Any necessary migrations have been run

---------

Co-authored-by: Nils <[email protected]>
  • Loading branch information
dstalzjohn and aiakide committed Nov 28, 2023
1 parent 77d3dc9 commit 3d3e6af
Show file tree
Hide file tree
Showing 36 changed files with 233 additions and 210 deletions.
Original file line number Diff line number Diff line change
Expand Up @@ -5,4 +5,4 @@ target_image_size:
_target_: niceml.utilities.imagesize.ImageSize
width: 10
height: 10
use_dirs_as_subsets: True
use_dirs_as_subsets: True
2 changes: 1 addition & 1 deletion docs/hydra-dagster.md
Original file line number Diff line number Diff line change
Expand Up @@ -137,4 +137,4 @@ configured you can find either in the source code or at our ops reference page.
### Conclusion

In this tutorial, we've seen how to use Hydra with Dagster to configure a machine learning pipeline. Hydra allows for a hierarchical set of configuration files that can be used to define settings for different parts of the pipeline.
The `defaults` section in the YAML file allows us to import the content of other YAML files, making it easy to reuse common settings across different parts of the pipeline.
The `defaults` section in the YAML file allows us to import the content of other YAML files, making it easy to reuse common settings across different parts of the pipeline.
1 change: 0 additions & 1 deletion niceml/dagster/ops/splitdata.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,6 @@
}
)
def split_data(context: OpExecutionContext, input_location: dict):

"""Splits the data in input_location into subsets (set_infos)"""
op_config = json.loads(json.dumps(context.op_config))

Expand Down
4 changes: 3 additions & 1 deletion niceml/dashboard/cam.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,7 +12,9 @@
load_img,
)

from niceml.experiments.expdatalocalstorageloader import create_expdata_from_local_storage
from niceml.experiments.expdatalocalstorageloader import (
create_expdata_from_local_storage,
)
from niceml.experiments.experimentdata import ExperimentData


Expand Down
1 change: 0 additions & 1 deletion niceml/dashboard/imagenetdataloggerviscomponent.py
Original file line number Diff line number Diff line change
Expand Up @@ -56,7 +56,6 @@ def _render(
def _load_net_data(
*args, # pylint: disable = unused-argument
) -> List[np.ndarray]:

return [
self.image_loader(filepath=net_data_path)
for net_data_path in net_data_paths[: self.max_output]
Expand Down
2 changes: 1 addition & 1 deletion niceml/data/netdataloggers/netdatalogger.py
Original file line number Diff line number Diff line change
Expand Up @@ -17,6 +17,7 @@ class NetDataLogger(ABC):
"""Abstract implementation of an NetDataLogger"""

def __init__(self):
"""Initializes the NetDataLogger with default values"""
self.data_description = None
self.exp_context = None
self.set_name = None
Expand All @@ -28,7 +29,6 @@ def initialize(
exp_context: ExperimentContext,
set_name: str,
):

"""Method to initialize the NetDataLogger"""
self.data_description = data_description
self.exp_context = exp_context
Expand Down
Original file line number Diff line number Diff line change
@@ -1,3 +1,4 @@
"""module for categorical focal loss"""
from typing import List, Union

import numpy as np
Expand All @@ -7,6 +8,8 @@

@tf.keras.utils.register_keras_serializable()
class CategoricalFocalLoss(tf.keras.losses.Loss):
"""class for categorical focal loss"""

def __init__(self, alpha: Union[float, List[float]], gamma: float = 2.0, **kwargs):
"""
Inspired by https://github.com/umbertogriffo/focal-loss-keras
Expand All @@ -28,7 +31,7 @@ def __init__(self, alpha: Union[float, List[float]], gamma: float = 2.0, **kwarg
self.gamma = gamma

def __call__(self, y_true, y_pred, sample_weight=None):

"""Call method for loss"""
# Clip the prediction value to prevent NaN's and Inf's
epsilon = kb.epsilon()
y_pred = kb.clip(y_pred, epsilon, 1.0 - epsilon)
Expand Down
10 changes: 8 additions & 2 deletions niceml/dlframeworks/tensorflow/metrics/objdetmetrics.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,11 @@ class AvgPosPredObjDet: # pylint: disable=too-few-public-methods
"""Positive Classification Values for object detection"""

def __init__(self, name: str = "avg_pos_pred"):

"""Initializes the AvgPosPredObjDet with the given name"""
self.__name__ = name

def __call__(self, y_true, y_pred):
"""Call method is used as a default interface for the metric"""
y_pred = tf.cast(y_pred, dtype=tf.float32)

cls_predictions = y_pred[:, :, 4:]
Expand All @@ -35,10 +36,11 @@ class AvgNegPredObjDet: # pylint: disable=too-few-public-methods
"""Negative Classification Values for object detection"""

def __init__(self, name: str = "avg_neg_pred"):

"""Initializes the AvgNegPredObjDet with the given name"""
self.__name__ = name

def __call__(self, y_true, y_pred):
"""Call method is used as a default interface for the metric"""
y_pred = tf.cast(y_pred, dtype=tf.float32)

cls_predictions = y_pred[:, :, 4:]
Expand Down Expand Up @@ -67,9 +69,11 @@ class AvgPosTargetCountObjDet: # pylint: disable=too-few-public-methods
"""Average positive target count for one image in object detection"""

def __init__(self, name: str = "avg_pos_target_count"):
"""Initializes the AvgPosTargetCountObjDet"""
self.__name__ = name

def __call__(self, y_true, y_pred):
"""Call method is used as a default interface for the metric"""
positive_mask = tf.cast(
tf.equal(y_true[:, :, 4], POSITIVE_MASK_VALUE), dtype=tf.float32
)
Expand All @@ -82,9 +86,11 @@ class AvgNegTargetCountObjDet: # pylint: disable=too-few-public-methods
"""Average negative target count for one image in object detection"""

def __init__(self, name: str = "avg_neg_target_count"):
"""Initializes the AvgNegTargetCountObjDet"""
self.__name__ = name

def __call__(self, y_true, y_pred):
"""Call method is used as a default interface for the metric"""
negative_mask = tf.cast(
tf.equal(y_true[:, :, 4], NEGATIVE_MASK_VALUE), dtype=tf.float32
)
Expand Down
6 changes: 5 additions & 1 deletion niceml/dlframeworks/tensorflow/models/mlp.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
from tensorflow.keras import Sequential, layers # pylint: disable=import-error

from niceml.data.datadescriptions.datadescription import DataDescription
from niceml.data.datadescriptions.inputdatadescriptions import InputVectorDataDescription
from niceml.data.datadescriptions.inputdatadescriptions import (
InputVectorDataDescription,
)
from niceml.data.datadescriptions.outputdatadescriptions import (
OutputVectorDataDescription,
)
Expand All @@ -22,12 +24,14 @@ def __init__(
final_activation: str = "linear",
do_summary: bool = True,
):
"""Initializes the OwnMLP model factory"""
self.hidden_layers = hidden_layers
self.activation = activation
self.do_summary = do_summary
self.final_activation = final_activation

def create_model(self, data_description: DataDescription) -> Any:
"""Creates the mlp model"""
input_dd: InputVectorDataDescription = check_instance(
data_description, InputVectorDataDescription
)
Expand Down
4 changes: 3 additions & 1 deletion niceml/experiments/experimenttests/validateexps.py
Original file line number Diff line number Diff line change
Expand Up @@ -7,7 +7,9 @@

from niceml.data.storages.fsfilesystemstorage import FsFileSystemStorage
from niceml.data.storages.fsspecstorage import FSSpecStorage
from niceml.experiments.expdatalocalstorageloader import create_expdata_from_local_storage
from niceml.experiments.expdatalocalstorageloader import (
create_expdata_from_local_storage,
)
from niceml.experiments.experimenterrors import EmptyExperimentError
from niceml.experiments.experimenttests.exptests import (
ExperimentTest,
Expand Down
1 change: 0 additions & 1 deletion niceml/experiments/expoutinitializer.py
Original file line number Diff line number Diff line change
Expand Up @@ -35,7 +35,6 @@ def __init__(
self.exp_type: str = exp_type or ""

def __call__(self, exp_context: ExperimentContext):

produce_git_version_yaml(
exp_context,
ExperimentFilenames.GIT_VERSIONS,
Expand Down
2 changes: 0 additions & 2 deletions niceml/mlcomponents/objdet/anchorencoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -53,13 +53,11 @@ def encode_anchors( # pylint: disable=too-many-locals
encoded_feature_list: List[List[float]] = []

for anchor in anchor_list:

if len(gt_labels) == 0:
target_bbox = anchor
target_label = None
prediction_flag = NEGATIVE_MASK_VALUE
else:

max_iou = 0
target_bbox = gt_labels[0].bounding_box
target_label = None
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -29,15 +29,15 @@
class ObjDetPredictionHandler(PredictionHandler):
"""Prediction handler for object detection predictions (BoundingBox, class prediction)"""

def __init__( # pylint: disable=too-many-arguments
def __init__( # noqa: PLR0913
self,
prediction_filter: PredictionFilter,
prediction_prefix: str = "pred",
pred_identifier: str = "image_location",
detection_idx_col: str = DETECTION_INDEX_COLUMN_NAME,
apply_sigmoid: bool = True,
):

"""Initializes the ObjDetPredictionHandler"""
super().__init__()
self.prediction_filter = prediction_filter
self.prediction_prefix = prediction_prefix
Expand All @@ -53,6 +53,7 @@ def __init__( # pylint: disable=too-many-arguments
self.anchor_array = None

def initialize(self):
"""Initializes the prediction handler"""
self.anchors: List[BoundingBox] = self.anchor_generator.generate_anchors(
data_description=self.data_description
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -74,7 +74,6 @@ def __exit__(self, exc_type, exc_value, exc_traceback):
),
"wb",
) as file:

np.savez_compressed(file, **self.data)


Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,9 @@

import numpy as np

from niceml.data.datadescriptions.outputdatadescriptions import OutputImageDataDescription
from niceml.data.datadescriptions.outputdatadescriptions import (
OutputImageDataDescription,
)
from niceml.mlcomponents.resultanalyzers.instancefinders.instancecontour import (
InstanceContour,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,9 @@
from niceml.mlcomponents.resultanalyzers.instancefinders.instancefinder import (
InstanceFinder,
)
from niceml.mlcomponents.resultanalyzers.instancefinders.maskinstance import MaskInstance
from niceml.mlcomponents.resultanalyzers.instancefinders.maskinstance import (
MaskInstance,
)
from niceml.mlcomponents.resultanalyzers.tensors.semsegdataiterator import (
SemSegPredictionContainer,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,6 @@ def __init__(
self.anchors = None

def get_net_targets(self, data_list: List[ObjDetData]) -> np.ndarray:

if self.anchors is None:
if isinstance(self.data_description, OutputObjDetDataDescription):
self.anchors = self.anchor_generator.generate_anchors(
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,9 @@
import numpy as np

from niceml.data.datadescriptions.inputdatadescriptions import InputImageDataDescription
from niceml.data.datadescriptions.outputdatadescriptions import OutputImageDataDescription
from niceml.data.datadescriptions.outputdatadescriptions import (
OutputImageDataDescription,
)
from niceml.data.datainfos.semsegdatainfo import SemSegData
from niceml.mlcomponents.targettransformer.targettransformer import NetTargetTransformer
from niceml.utilities.commonutils import check_instance
Expand Down
6 changes: 4 additions & 2 deletions niceml/utilities/boundingboxes/bboxdrawing.py
Original file line number Diff line number Diff line change
Expand Up @@ -42,7 +42,9 @@ def draw_bounding_box_on_image(
font = get_font("OpenSans-Regular.ttf", font_size=font_size)

text = f"{label.class_name}: {label.score:.2f}" if label.score else label.class_name
text_width, text_height = font.getsize(text)
(left, top, right, bottom) = font.getbbox(text)
text_width = right - left
text_height = bottom - top
x_1, y_1, x_2, y_2 = label.bounding_box.get_absolute_ullr()
draw.rectangle(
(x_1 - line_width, y_1 - line_width, x_2 + line_width, y_2 + line_width),
Expand All @@ -69,7 +71,7 @@ def draw_bounding_box_on_image(
return image


def draw_labels_on_image( # pylint: disable=too-many-arguments
def draw_labels_on_image( # noqa: PLR0913
image: ImageType,
pred_bbox_label_list: List[ObjDetInstanceLabel],
gt_bbox_label_list: List[ObjDetInstanceLabel],
Expand Down
2 changes: 0 additions & 2 deletions niceml/utilities/boundingboxes/bboxencoding.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,7 +5,6 @@
def encode_boxes(
anchor_boxes_xywh: np.ndarray, gt_boxes_xywh: np.ndarray, box_variances: np.ndarray
) -> np.ndarray:

"""
Encodes the anchor boxes to a numpy array
Expand Down Expand Up @@ -48,7 +47,6 @@ def decode_boxes(
encoded_array_xywh: np.ndarray,
box_variances: np.ndarray,
) -> np.ndarray:

"""
Decodes the incoming array to target boxes
Expand Down
5 changes: 4 additions & 1 deletion niceml/utilities/boundingboxes/bboxlabeling.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@
from attr import define, field

from niceml.utilities.boundingboxes.bboxconversion import dict_to_bounding_box
from niceml.utilities.boundingboxes.boundingbox import BoundingBox, bounding_box_from_ullr
from niceml.utilities.boundingboxes.boundingbox import (
BoundingBox,
bounding_box_from_ullr,
)
from niceml.utilities.imagesize import ImageSize
from niceml.utilities.instancelabeling import InstanceLabel

Expand Down
5 changes: 4 additions & 1 deletion niceml/utilities/boundingboxes/filtering/nmsfilter.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@
import numpy as np
from attrs import define

from niceml.utilities.boundingboxes.bboxconversion import convert_to_ullr, convert_to_xywh
from niceml.utilities.boundingboxes.bboxconversion import (
convert_to_ullr,
convert_to_xywh,
)
from niceml.utilities.boundingboxes.filtering.predictionfilter import PredictionFilter
from niceml.utilities.ioumatrix import compute_iou_matrix

Expand Down
5 changes: 4 additions & 1 deletion niceml/utilities/boundingboxes/filtering/unifiedboxfilter.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,7 +4,10 @@
import numpy as np
from attrs import define

from niceml.utilities.boundingboxes.bboxconversion import convert_to_ullr, convert_to_xywh
from niceml.utilities.boundingboxes.bboxconversion import (
convert_to_ullr,
convert_to_xywh,
)
from niceml.utilities.boundingboxes.filtering.predictionfilter import PredictionFilter
from niceml.utilities.ioumatrix import compute_iou_matrix

Expand Down
8 changes: 4 additions & 4 deletions niceml/utilities/commonutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -97,12 +97,12 @@ def human_readable_size(obj: Any) -> str:
size = asizeof.asizeof(obj)
if size < 1024:
return f"{size} bytes"
if 1024 <= size < 1024 ** 2:
if 1024 <= size < 1024**2:
size_kb = size / 1024
return f"{size_kb:.2f} KB"
if 1024 ** 2 <= size < 1024 ** 3:
size_mb = size / 1024 ** 2
if 1024**2 <= size < 1024**3:
size_mb = size / 1024**2
return f"{size_mb:.2f} MB"

size_gb = size / 1024 ** 3
size_gb = size / 1024**3
return f"{size_gb:.2f} GB"
7 changes: 6 additions & 1 deletion niceml/utilities/imageutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -121,7 +121,12 @@ def binarize_multichannel_image(
mask including information where the prediction is above threshold
"""
binary_multichannel_images: Dict[str, np.ndarray] = {}
scores_mask = cv2.threshold(image_scores, threshold, 1, cv2.THRESH_BINARY,)[
scores_mask = cv2.threshold(
image_scores,
threshold,
1,
cv2.THRESH_BINARY,
)[
1
].astype(np.uint8)

Expand Down
1 change: 0 additions & 1 deletion niceml/utilities/instancelabelmatching.py
Original file line number Diff line number Diff line change
Expand Up @@ -45,7 +45,6 @@ def get_kind_of_label_match(
pred_label.color = Color.BLUE
pred_label.active = True
for gt_label in gt_label_list:

iou = pred_label.calc_iou(other=gt_label)

pred_label, gt_label = get_kind_of_instance_label_match(
Expand Down
2 changes: 1 addition & 1 deletion niceml/utilities/splitutils.py
Original file line number Diff line number Diff line change
Expand Up @@ -114,7 +114,7 @@ def create_copy_files_container(
identifier = "".join(
[char for char in cur_basename if char in ALPHANUMERICLIST]
)
cur_seed = int(identifier, base=len(ALPHANUMERICLIST)) % (2 ** 32 - 1)
cur_seed = int(identifier, base=len(ALPHANUMERICLIST)) % (2**32 - 1)
rng = np.random.default_rng(seed=cur_seed)
drawn_set = rng.choice(set_list, 1, p=prob_list)[0]
output_file = join(drawn_set, file)
Expand Down
Loading

0 comments on commit 3d3e6af

Please sign in to comment.