diff --git a/.github/workflows/fetch-oas.yml b/.github/workflows/fetch-oas.yml
index 44692ddb5c..0dd32805b5 100644
--- a/.github/workflows/fetch-oas.yml
+++ b/.github/workflows/fetch-oas.yml
@@ -10,6 +10,9 @@ on:
This will override any version calculated by the release-drafter.
required: true
+env:
+ release_version: ${{ github.event.inputs.version || github.event.inputs.release_number }}
+
jobs:
oas_fetch:
name: Fetch OpenAPI Specifications
@@ -21,19 +24,19 @@ jobs:
- name: Checkout
uses: actions/checkout@v4
with:
- ref: ${{ github.event.inputs.version }}
+ ref: release/${{ env.release_version }}
- name: Load docker images
run: |-
- docker pull defectdojo/defectdojo-django:${{ github.event.inputs.version }}-alpine
- docker pull defectdojo/defectdojo-nginx:${{ github.event.inputs.version }}-alpine
+ docker pull defectdojo/defectdojo-django:${{ env.release_version }}-alpine
+ docker pull defectdojo/defectdojo-nginx:${{ env.release_version }}-alpine
docker images
- name: Start Dojo
run: docker-compose --profile postgres-redis --env-file ./docker/environments/postgres-redis.env up --no-deps -d postgres nginx uwsgi
env:
- DJANGO_VERSION: ${{ github.event.inputs.version }}-alpine
- NGINX_VERSION: ${{ github.event.inputs.version }}-alpine
+ DJANGO_VERSION: ${{ env.release_version }}-alpine
+ NGINX_VERSION: ${{ env.release_version }}-alpine
- name: Download OpenAPI Specifications
run: |-
diff --git a/components/package.json b/components/package.json
index 6bff77e4dc..4c9fc573d8 100644
--- a/components/package.json
+++ b/components/package.json
@@ -1,6 +1,6 @@
{
"name": "defectdojo",
- "version": "2.31.0",
+ "version": "2.31.1",
"license" : "BSD-3-Clause",
"private": true,
"dependencies": {
diff --git a/dojo/__init__.py b/dojo/__init__.py
index c79303a1f2..174901e835 100644
--- a/dojo/__init__.py
+++ b/dojo/__init__.py
@@ -4,6 +4,6 @@
# Django starts so that shared_task will use this app.
from .celery import app as celery_app # noqa
-__version__ = '2.31.0'
+__version__ = '2.31.1'
__url__ = 'https://github.com/DefectDojo/django-DefectDojo'
__docs__ = 'https://documentation.defectdojo.com'
diff --git a/dojo/api_v2/serializers.py b/dojo/api_v2/serializers.py
index 45d2707a6e..2d12611508 100644
--- a/dojo/api_v2/serializers.py
+++ b/dojo/api_v2/serializers.py
@@ -1133,6 +1133,14 @@ class Meta:
model = Tool_Type
fields = "__all__"
+ def validate(self, data):
+ if self.context["request"].method == "POST":
+ name = data.get("name")
+ # Make sure this will not create a duplicate test type
+ if Tool_Type.objects.filter(name=name).count() > 0:
+ raise serializers.ValidationError('A Tool Type with the name already exists')
+ return data
+
class RegulationSerializer(serializers.ModelSerializer):
class Meta:
diff --git a/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py b/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py
new file mode 100644
index 0000000000..4b886301de
--- /dev/null
+++ b/dojo/db_migrations/0201_populate_finding_sla_expiration_date.py
@@ -0,0 +1,133 @@
+from django.db import migrations
+from django.utils import timezone
+from datetime import datetime
+from django.conf import settings
+from dateutil.relativedelta import relativedelta
+import logging
+
+from dojo.utils import get_work_days
+
+logger = logging.getLogger(__name__)
+
+
+def calculate_sla_expiration_dates(apps, schema_editor):
+ System_Settings = apps.get_model('dojo', 'System_Settings')
+
+ ss, _ = System_Settings.objects.get_or_create()
+ if not ss.enable_finding_sla:
+ return
+
+ logger.info('Calculating SLA expiration dates for all findings')
+
+ SLA_Configuration = apps.get_model('dojo', 'SLA_Configuration')
+ Finding = apps.get_model('dojo', 'Finding')
+
+ findings = Finding.objects.filter(sla_expiration_date__isnull=True).order_by('id').only('id', 'sla_start_date', 'date', 'severity', 'test', 'mitigated')
+
+ page_size = 1000
+ total_count = Finding.objects.filter(id__gt=0).count()
+ logger.info('Found %d findings to be updated', total_count)
+
+ i = 0
+ batch = []
+ last_id = 0
+ total_pages = (total_count // page_size) + 2
+ for p in range(1, total_pages):
+ page = findings.filter(id__gt=last_id)[:page_size]
+ for find in page:
+ i += 1
+ last_id = find.id
+
+ start_date = find.sla_start_date if find.sla_start_date else find.date
+
+ sla_config = SLA_Configuration.objects.filter(id=find.test.engagement.product.sla_configuration_id).first()
+ sla_period = getattr(sla_config, find.severity.lower(), None)
+
+ days = None
+ if settings.SLA_BUSINESS_DAYS:
+ if find.mitigated:
+ days = get_work_days(find.date, find.mitigated.date())
+ else:
+ days = get_work_days(find.date, timezone.now().date())
+ else:
+ if isinstance(start_date, datetime):
+ start_date = start_date.date()
+
+ if find.mitigated:
+ days = (find.mitigated.date() - start_date).days
+ else:
+ days = (timezone.now().date() - start_date).days
+
+ days = days if days > 0 else 0
+
+ days_remaining = None
+ if sla_period:
+ days_remaining = sla_period - days
+
+ if days_remaining:
+ if find.mitigated:
+ find.sla_expiration_date = find.mitigated.date() + relativedelta(days=days_remaining)
+ else:
+ find.sla_expiration_date = timezone.now().date() + relativedelta(days=days_remaining)
+
+ batch.append(find)
+
+ if (i > 0 and i % page_size == 0):
+ Finding.objects.bulk_update(batch, ['sla_expiration_date'])
+ batch = []
+ logger.info('%s out of %s findings processed...', i, total_count)
+
+ Finding.objects.bulk_update(batch, ['sla_expiration_date'])
+ batch = []
+ logger.info('%s out of %s findings processed...', i, total_count)
+
+
+def reset_sla_expiration_dates(apps, schema_editor):
+ System_Settings = apps.get_model('dojo', 'System_Settings')
+
+ ss, _ = System_Settings.objects.get_or_create()
+ if not ss.enable_finding_sla:
+ return
+
+ logger.info('Resetting SLA expiration dates for all findings')
+
+ Finding = apps.get_model('dojo', 'Finding')
+
+ findings = Finding.objects.filter(sla_expiration_date__isnull=False).order_by('id').only('id')
+
+ page_size = 1000
+ total_count = Finding.objects.filter(id__gt=0).count()
+ logger.info('Found %d findings to be reset', total_count)
+
+ i = 0
+ batch = []
+ last_id = 0
+ total_pages = (total_count // page_size) + 2
+ for p in range(1, total_pages):
+ page = findings.filter(id__gt=last_id)[:page_size]
+ for find in page:
+ i += 1
+ last_id = find.id
+
+ find.sla_expiration_date = None
+ batch.append(find)
+
+ if (i > 0 and i % page_size == 0):
+ Finding.objects.bulk_update(batch, ['sla_expiration_date'])
+ batch = []
+ logger.info('%s out of %s findings processed...', i, total_count)
+
+ Finding.objects.bulk_update(batch, ['sla_expiration_date'])
+ batch = []
+ logger.info('%s out of %s findings processed...', i, total_count)
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('dojo', '0200_finding_sla_expiration_date_product_async_updating_and_more'),
+ ]
+
+ operations = [
+ migrations.RunPython(calculate_sla_expiration_dates, reset_sla_expiration_dates),
+ ]
diff --git a/dojo/filters.py b/dojo/filters.py
index 51279d76a9..723c52337f 100644
--- a/dojo/filters.py
+++ b/dojo/filters.py
@@ -11,6 +11,7 @@
from django.conf import settings
import six
from django.utils.translation import gettext_lazy as _
+from django.utils import timezone
from django_filters import FilterSet, CharFilter, OrderingFilter, \
ModelMultipleChoiceFilter, ModelChoiceFilter, MultipleChoiceFilter, \
BooleanFilter, NumberFilter, DateFilter
@@ -148,16 +149,12 @@ def any(self, qs, name):
return qs
def sla_satisfied(self, qs, name):
- for finding in qs:
- if finding.violates_sla:
- qs = qs.exclude(id=finding.id)
- return qs
+ # return findings that have an sla expiration date after today or no sla expiration date
+ return qs.filter(Q(sla_expiration_date__isnull=True) | Q(sla_expiration_date__gt=timezone.now().date()))
def sla_violated(self, qs, name):
- for finding in qs:
- if not finding.violates_sla:
- qs = qs.exclude(id=finding.id)
- return qs
+ # return active findings that have an sla expiration date before today
+ return qs.filter(Q(active=True) & Q(sla_expiration_date__lt=timezone.now().date()))
options = {
None: (_('Any'), any),
@@ -184,13 +181,13 @@ def any(self, qs, name):
def sla_satisifed(self, qs, name):
for product in qs:
- if product.violates_sla:
+ if product.violates_sla():
qs = qs.exclude(id=product.id)
return qs
def sla_violated(self, qs, name):
for product in qs:
- if not product.violates_sla:
+ if not product.violates_sla():
qs = qs.exclude(id=product.id)
return qs
diff --git a/dojo/forms.py b/dojo/forms.py
index 558c09ae69..27a1fb0c28 100755
--- a/dojo/forms.py
+++ b/dojo/forms.py
@@ -2388,6 +2388,23 @@ class Meta:
model = Tool_Type
exclude = ['product']
+ def __init__(self, *args, **kwargs):
+ instance = kwargs.get('instance', None)
+ self.newly_created = True
+ if instance is not None:
+ self.newly_created = instance.pk is None
+ super().__init__(*args, **kwargs)
+
+ def clean(self):
+ form_data = self.cleaned_data
+ if self.newly_created:
+ name = form_data.get("name")
+ # Make sure this will not create a duplicate test type
+ if Tool_Type.objects.filter(name=name).count() > 0:
+ raise forms.ValidationError('A Tool Type with the name already exists')
+
+ return form_data
+
class RegulationForm(forms.ModelForm):
class Meta:
diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py
index 8a8b208d45..ecd5da084f 100644
--- a/dojo/jira_link/helper.py
+++ b/dojo/jira_link/helper.py
@@ -1036,28 +1036,28 @@ def get_issuetype_fields(
else:
try:
- issuetypes = jira.createmeta_issuetypes(project_key)
+ issuetypes = jira.project_issue_types(project_key)
except JIRAError as e:
e.text = f"Jira API call 'createmeta/issuetypes' failed with status: {e.status_code} and message: {e.text}. Project misconfigured or no permissions in Jira ?"
raise e
issuetype_id = None
- for it in issuetypes['values']:
- if it['name'] == issuetype_name:
- issuetype_id = it['id']
+ for it in issuetypes:
+ if it.name == issuetype_name:
+ issuetype_id = it.id
break
if not issuetype_id:
raise JIRAError("Issue type ID can not be matched. Misconfigured default issue type ?")
try:
- issuetype_fields = jira.createmeta_fieldtypes(project_key, issuetype_id)
+ issuetype_fields = jira.project_issue_fields(project_key, issuetype_id)
except JIRAError as e:
e.text = f"Jira API call 'createmeta/fieldtypes' failed with status: {e.status_code} and message: {e.text}. Misconfigured project or default issue type ?"
raise e
try:
- issuetype_fields = [f['fieldId'] for f in issuetype_fields['values']]
+ issuetype_fields = [f.fieldId for f in issuetype_fields]
except Exception:
raise JIRAError("Misconfigured default issue type ?")
diff --git a/dojo/jira_link/views.py b/dojo/jira_link/views.py
index e05ea5ce21..a1a73f0b01 100644
--- a/dojo/jira_link/views.py
+++ b/dojo/jira_link/views.py
@@ -1,7 +1,7 @@
# Standard library imports
import json
import logging
-
+import datetime
# Third party imports
from django.contrib import messages
from django.contrib.admin.utils import NestedObjects
@@ -105,97 +105,13 @@ def webhook(request, secret=None):
if findings:
for finding in findings:
jira_helper.process_resolution_from_jira(finding, resolution_id, resolution_name, assignee_name, jira_now, jissue)
+ # Check for any comment that could have come along with the resolution
+ if (error_response := check_for_and_create_comment(parsed)) is not None:
+ return error_response
if parsed.get('webhookEvent') == 'comment_created':
- """
- example incoming requests from JIRA Server 8.14.0
- {
- "timestamp":1610269967824,
- "webhookEvent":"comment_created",
- "comment":{
- "self":"https://jira.host.com/rest/api/2/issue/115254/comment/466578",
- "id":"466578",
- "author":{
- "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo",
- "name":"defect.dojo",
- "key":"defect.dojo", # seems to be only present on JIRA Server, not on Cloud
- "avatarUrls":{
- "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48",
- "24x24":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24",
- "16x16":"https://www.gravatar.com/avatar9637bfb970eff6176357df615f548f1c?d=mm&s=16",
- "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32"
- },
- "displayName":"Defect Dojo",
- "active":true,
- "timeZone":"Europe/Amsterdam"
- },
- "body":"(Valentijn Scholten):test4",
- "updateAuthor":{
- "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo",
- "name":"defect.dojo",
- "key":"defect.dojo",
- "avatarUrls":{
- "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48",
- "24x24""https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24",
- "16x16":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=16",
- "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32"
- },
- "displayName":"Defect Dojo",
- "active":true,
- "timeZone":"Europe/Amsterdam"
- },
- "created":"2021-01-10T10:12:47.824+0100",
- "updated":"2021-01-10T10:12:47.824+0100"
- }
- }
- """
-
- comment_text = parsed['comment']['body']
- commentor = ''
- if 'name' in parsed['comment']['updateAuthor']:
- commentor = parsed['comment']['updateAuthor']['name']
- elif 'emailAddress' in parsed['comment']['updateAuthor']:
- commentor = parsed['comment']['updateAuthor']['emailAddress']
- else:
- logger.debug('Could not find the author of this jira comment!')
- commentor_display_name = parsed['comment']['updateAuthor']['displayName']
- # example: body['comment']['self'] = "http://www.testjira.com/jira_under_a_path/rest/api/2/issue/666/comment/456843"
- jid = parsed['comment']['self'].split('/')[-3]
- jissue = get_object_or_404(JIRA_Issue, jira_id=jid)
- logging.info(f"Received issue comment for {jissue.jira_key}")
- logger.debug('jissue: %s', vars(jissue))
-
- jira_usernames = JIRA_Instance.objects.values_list('username', flat=True)
- for jira_userid in jira_usernames:
- # logger.debug('incoming username: %s jira config username: %s', commentor.lower(), jira_userid.lower())
- if jira_userid.lower() == commentor.lower():
- logger.debug('skipping incoming JIRA comment as the user id of the comment in JIRA (%s) matches the JIRA username in DefectDojo (%s)', commentor.lower(), jira_userid.lower())
- return HttpResponse('')
-
- findings = None
- if jissue.finding:
- findings = [jissue.finding]
- create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding", args=(jissue.finding.id,)), icon='check')
-
- elif jissue.finding_group:
- findings = [jissue.finding_group.findings.all()]
- create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding_group", args=(jissue.finding_group.id,)), icon='check')
-
- elif jissue.engagement:
- return HttpResponse('Comment for engagement ignored')
- else:
- raise Http404(f'No finding or engagement found for JIRA issue {jissue.jira_key}')
-
- for finding in findings:
- # logger.debug('finding: %s', vars(jissue.finding))
- new_note = Notes()
- new_note.entry = f'({commentor_display_name} ({commentor})): {comment_text}'
- new_note.author, created = User.objects.get_or_create(username='JIRA')
- new_note.save()
- finding.notes.add(new_note)
- finding.jira_issue.jira_change = timezone.now()
- finding.jira_issue.save()
- finding.save()
+ if (error_response := check_for_and_create_comment(parsed)) is not None:
+ return error_response
if parsed.get('webhookEvent') not in ['comment_created', 'jira:issue_updated']:
logger.info(f"Unrecognized JIRA webhook event received: {parsed.get('webhookEvent')}")
@@ -203,6 +119,7 @@ def webhook(request, secret=None):
except Exception as e:
if isinstance(e, Http404):
logger.warning('404 error processing JIRA webhook')
+ logger.warning(str(e))
else:
logger.exception(e)
@@ -218,6 +135,112 @@ def webhook(request, secret=None):
return HttpResponse('')
+def check_for_and_create_comment(parsed_json):
+ """
+ example incoming requests from JIRA Server 8.14.0
+ {
+ "timestamp":1610269967824,
+ "webhookEvent":"comment_created",
+ "comment":{
+ "self":"https://jira.host.com/rest/api/2/issue/115254/comment/466578",
+ "id":"466578",
+ "author":{
+ "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo",
+ "name":"defect.dojo",
+ "key":"defect.dojo", # seems to be only present on JIRA Server, not on Cloud
+ "avatarUrls":{
+ "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48",
+ "24x24":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24",
+ "16x16":"https://www.gravatar.com/avatar9637bfb970eff6176357df615f548f1c?d=mm&s=16",
+ "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32"
+ },
+ "displayName":"Defect Dojo",
+ "active":true,
+ "timeZone":"Europe/Amsterdam"
+ },
+ "body":"(Valentijn Scholten):test4",
+ "updateAuthor":{
+ "self":"https://jira.host.com/rest/api/2/user?username=defect.dojo",
+ "name":"defect.dojo",
+ "key":"defect.dojo",
+ "avatarUrls":{
+ "48x48":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=48",
+ "24x24""https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=24",
+ "16x16":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=16",
+ "32x32":"https://www.gravatar.com/avatar/9637bfb970eff6176357df615f548f1c?d=mm&s=32"
+ },
+ "displayName":"Defect Dojo",
+ "active":true,
+ "timeZone":"Europe/Amsterdam"
+ },
+ "created":"2021-01-10T10:12:47.824+0100",
+ "updated":"2021-01-10T10:12:47.824+0100"
+ }
+ }
+ """
+ comment = parsed_json.get("comment", None)
+ if comment is None:
+ return
+
+ comment_text = comment.get('body')
+ commenter = ''
+ if 'name' in comment.get('updateAuthor'):
+ commenter = comment.get('updateAuthor', {}).get('name')
+ elif 'emailAddress' in comment.get('updateAuthor'):
+ commenter = comment.get('updateAuthor', {}).get('emailAddress')
+ else:
+ logger.debug('Could not find the author of this jira comment!')
+ commenter_display_name = comment.get('updateAuthor', {}).get('displayName')
+ # example: body['comment']['self'] = "http://www.testjira.com/jira_under_a_path/rest/api/2/issue/666/comment/456843"
+ jid = comment.get('self', '').split('/')[-3]
+ jissue = get_object_or_404(JIRA_Issue, jira_id=jid)
+ logging.info(f"Received issue comment for {jissue.jira_key}")
+ logger.debug('jissue: %s', vars(jissue))
+
+ jira_usernames = JIRA_Instance.objects.values_list('username', flat=True)
+ for jira_user_id in jira_usernames:
+ # logger.debug('incoming username: %s jira config username: %s', commenter.lower(), jira_user_id.lower())
+ if jira_user_id.lower() == commenter.lower():
+ logger.debug('skipping incoming JIRA comment as the user id of the comment in JIRA (%s) matches the JIRA username in DefectDojo (%s)', commenter.lower(), jira_user_id.lower())
+ return HttpResponse('')
+
+ findings = None
+ if jissue.finding:
+ findings = [jissue.finding]
+ create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding", args=(jissue.finding.id,)), icon='check')
+
+ elif jissue.finding_group:
+ findings = [jissue.finding_group.findings.all()]
+ create_notification(event='other', title=f'JIRA incoming comment - {jissue.finding}', finding=jissue.finding, url=reverse("view_finding_group", args=(jissue.finding_group.id,)), icon='check')
+
+ elif jissue.engagement:
+ return HttpResponse('Comment for engagement ignored')
+ else:
+ raise Http404(f'No finding or engagement found for JIRA issue {jissue.jira_key}')
+
+ # Set the fields for the notes
+ author, _ = User.objects.get_or_create(username='JIRA')
+ entry = f'({commenter_display_name} ({commenter})): {comment_text}'
+ # Iterate (potentially) over each of the findings the note should be added to
+ for finding in findings:
+ # Determine if this exact note was created within the last 30 seconds to avoid duplicate notes
+ existing_notes = finding.notes.filter(
+ entry=entry,
+ author=author,
+ date__gte=(timezone.now() - datetime.timedelta(seconds=30)),
+ )
+ # Check the query for any hits
+ if existing_notes.count() == 0:
+ new_note = Notes()
+ new_note.entry = entry
+ new_note.author = author
+ new_note.save()
+ finding.notes.add(new_note)
+ finding.jira_issue.jira_change = timezone.now()
+ finding.jira_issue.save()
+ finding.save()
+
+
def get_custom_field(jira, label):
url = jira._options["server"].strip('/') + '/rest/api/2/field'
response = jira._session.get(url).json()
diff --git a/dojo/models.py b/dojo/models.py
index 7bda3997c0..45d522963e 100755
--- a/dojo/models.py
+++ b/dojo/models.py
@@ -1102,7 +1102,7 @@ def findings_active_verified_count(self):
@cached_property
def endpoint_host_count(self):
# active_endpoints is (should be) prefetched
- endpoints = self.active_endpoints
+ endpoints = getattr(self, 'active_endpoints', None)
hosts = []
for e in endpoints:
@@ -1116,7 +1116,10 @@ def endpoint_host_count(self):
@cached_property
def endpoint_count(self):
# active_endpoints is (should be) prefetched
- return len(self.active_endpoints)
+ endpoints = getattr(self, 'active_endpoints', None)
+ if endpoints:
+ return len(self.active_endpoints)
+ return None
def open_findings(self, start_date=None, end_date=None):
if start_date is None or end_date is None:
@@ -1192,13 +1195,11 @@ def get_absolute_url(self):
from django.urls import reverse
return reverse('view_product', args=[str(self.id)])
- @property
def violates_sla(self):
- findings = Finding.objects.filter(test__engagement__product=self, active=True)
- for f in findings:
- if f.violates_sla:
- return True
- return False
+ findings = Finding.objects.filter(test__engagement__product=self,
+ active=True,
+ sla_expiration_date__lt=timezone.now().date())
+ return findings.count() > 0
class Product_Member(models.Model):
@@ -2887,20 +2888,19 @@ def set_sla_expiration_date(self):
self.sla_expiration_date = get_current_date() + relativedelta(days=days_remaining)
def sla_days_remaining(self):
- sla_calculation = None
- sla_period = self.get_sla_period()
- if sla_period:
- sla_calculation = sla_period - self.sla_age
- return sla_calculation
-
- def sla_deadline(self):
- days_remaining = self.sla_days_remaining()
- if days_remaining:
+ if self.sla_expiration_date:
if self.mitigated:
- return self.mitigated.date() + relativedelta(days=days_remaining)
- return get_current_date() + relativedelta(days=days_remaining)
+ mitigated_date = self.mitigated
+ if isinstance(mitigated_date, datetime):
+ mitigated_date = self.mitigated.date()
+ return (self.sla_expiration_date - mitigated_date).days
+ else:
+ return (self.sla_expiration_date - get_current_date()).days
return None
+ def sla_deadline(self):
+ return self.sla_expiration_date
+
def github(self):
try:
return self.github_issue
@@ -3294,8 +3294,7 @@ def inherit_tags(self, potentially_existing_tags):
@property
def violates_sla(self):
- days_remaining = self.sla_days_remaining()
- return days_remaining < 0 if days_remaining else False
+ return (self.sla_expiration_date and self.sla_expiration_date < timezone.now())
class FindingAdmin(admin.ModelAdmin):
diff --git a/dojo/templates/dojo/dashboard.html b/dojo/templates/dojo/dashboard.html
index 8d3227f975..8e04908609 100644
--- a/dojo/templates/dojo/dashboard.html
+++ b/dojo/templates/dojo/dashboard.html
@@ -207,7 +207,7 @@
{% else %}
{% trans "View Responses" %}
- {% trans "Create Engagement" %}
+ {% trans "Create Engagement" %}
{% endif %}
diff --git a/dojo/tools/github_vulnerability/parser.py b/dojo/tools/github_vulnerability/parser.py
index 15bf37606c..3c134342d2 100644
--- a/dojo/tools/github_vulnerability/parser.py
+++ b/dojo/tools/github_vulnerability/parser.py
@@ -66,6 +66,9 @@ def get_findings(self, filename, test):
if "vulnerableManifestPath" in alert:
finding.file_path = alert["vulnerableManifestPath"]
+ if "vulnerableRequirements" in alert and alert["vulnerableRequirements"].startswith("= "):
+ finding.component_version = alert["vulnerableRequirements"][2:]
+
if "createdAt" in alert:
finding.date = dateutil.parser.parse(alert["createdAt"])
diff --git a/dojo/tools/sarif/parser.py b/dojo/tools/sarif/parser.py
index 14d8184957..e7963612b4 100644
--- a/dojo/tools/sarif/parser.py
+++ b/dojo/tools/sarif/parser.py
@@ -77,7 +77,10 @@ def __get_last_invocation_date(self, data):
def get_rules(run):
rules = {}
- for item in run["tool"]["driver"].get("rules", []):
+ rules_array = run["tool"]["driver"].get("rules", [])
+ if len(rules_array) == 0 and run["tool"].get("extensions") is not None:
+ rules_array = run["tool"]["extensions"][0].get("rules", [])
+ for item in rules_array:
rules[item["id"]] = item
return rules
diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml
index 26edb33e5e..0af7d7c32b 100644
--- a/helm/defectdojo/Chart.yaml
+++ b/helm/defectdojo/Chart.yaml
@@ -1,8 +1,8 @@
apiVersion: v2
-appVersion: "2.31.0"
+appVersion: "2.31.1"
description: A Helm chart for Kubernetes to install DefectDojo
name: defectdojo
-version: 1.6.108
+version: 1.6.109
icon: https://www.defectdojo.org/img/favicon.ico
maintainers:
- name: madchap
diff --git a/unittests/scans/github_vulnerability/github-vuln-version.json b/unittests/scans/github_vulnerability/github-vuln-version.json
new file mode 100644
index 0000000000..e80afe7e58
--- /dev/null
+++ b/unittests/scans/github_vulnerability/github-vuln-version.json
@@ -0,0 +1,106 @@
+{
+ "data": {
+ "repository": {
+ "vulnerabilityAlerts": {
+ "nodes": [
+ {
+ "id": "RVA_kwDOLJyUo88AAAABQUWapw",
+ "createdAt": "2024-01-26T02:42:32Z",
+ "vulnerableManifestPath": "sompath/pom.xml",
+ "securityVulnerability": {
+ "severity": "CRITICAL",
+ "updatedAt": "2022-12-09T22:02:22Z",
+ "package": {
+ "name": "org.springframework:spring-web",
+ "ecosystem": "MAVEN"
+ },
+ "firstPatchedVersion": {
+ "identifier": "6.0.0"
+ },
+ "vulnerableVersionRange": "< 6.0.0",
+ "advisory": {
+ "description": "Pivotal Spring Framework before 6.0.0 suffers from a potential remote code execution (RCE) issue if used for Java deserialization of untrusted data. Depending on how the library is implemented within a product, this issue may or not occur, and authentication may be required.\n\nMaintainers recommend investigating alternative components or a potential mitigating control. Version 4.2.6 and 3.2.17 contain [enhanced documentation](https://github.com/spring-projects/spring-framework/commit/5cbe90b2cd91b866a5a9586e460f311860e11cfa) advising users to take precautions against unsafe Java deserialization, version 5.3.0 [deprecate the impacted classes](https://github.com/spring-projects/spring-framework/issues/25379) and version 6.0.0 [removed it entirely](https://github.com/spring-projects/spring-framework/issues/27422).",
+ "summary": "Pivotal Spring Framework contains unsafe Java deserialization methods",
+ "identifiers": [
+ {
+ "value": "GHSA-4wrc-f8pq-fpqp",
+ "type": "GHSA"
+ },
+ {
+ "value": "CVE-2016-1000027",
+ "type": "CVE"
+ }
+ ],
+ "references": [
+ {
+ "url": "https://nvd.nist.gov/vuln/detail/CVE-2016-1000027"
+ },
+ {
+ "url": "https://bugzilla.redhat.com/show_bug.cgi?id=CVE-2016-1000027"
+ },
+ {
+ "url": "https://security-tracker.debian.org/tracker/CVE-2016-1000027"
+ },
+ {
+ "url": "https://www.tenable.com/security/research/tra-2016-20"
+ },
+ {
+ "url": "https://github.com/spring-projects/spring-framework/issues/24434"
+ },
+ {
+ "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-1231625331"
+ },
+ {
+ "url": "https://github.com/spring-projects/spring-framework/commit/5cbe90b2cd91b866a5a9586e460f311860e11cfa"
+ },
+ {
+ "url": "https://support.contrastsecurity.com/hc/en-us/articles/4402400830612-Spring-web-Java-Deserialization-CVE-2016-1000027"
+ },
+ {
+ "url": "https://github.com/spring-projects/spring-framework/issues/21680"
+ },
+ {
+ "url": "https://github.com/spring-projects/spring-framework/commit/2b051b8b321768a4cfef83077db65c6328ffd60f"
+ },
+ {
+ "url": "https://jira.spring.io/browse/SPR-17143?redirect=false"
+ },
+ {
+ "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-579669626"
+ },
+ {
+ "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-582313417"
+ },
+ {
+ "url": "https://github.com/spring-projects/spring-framework/issues/24434#issuecomment-744519525"
+ },
+ {
+ "url": "https://security.netapp.com/advisory/ntap-20230420-0009/"
+ },
+ {
+ "url": "https://spring.io/blog/2022/05/11/spring-framework-5-3-20-and-5-2-22-available-now"
+ },
+ {
+ "url": "https://github.com/advisories/GHSA-4wrc-f8pq-fpqp"
+ }
+ ],
+ "cvss": {
+ "vectorString": "CVSS:3.1/AV:N/AC:L/PR:N/UI:N/S:U/C:H/I:H/A:H"
+ }
+ }
+ },
+ "state": "OPEN",
+ "vulnerableManifestFilename": "pom.xml",
+ "vulnerableRequirements": "= 5.3.29",
+ "number": 1,
+ "dependencyScope": "RUNTIME",
+ "dismissComment": null,
+ "dismissReason": null,
+ "dismissedAt": null,
+ "fixedAt": null
+ }
+ ]
+ }
+ }
+ }
+}
diff --git a/unittests/test_swagger_schema.py b/unittests/test_swagger_schema.py
index 9f1316b4d2..b126335937 100644
--- a/unittests/test_swagger_schema.py
+++ b/unittests/test_swagger_schema.py
@@ -785,6 +785,9 @@ def __init__(self, *args, **kwargs):
self.viewset = ToolTypesViewSet
self.model = Tool_Type
self.serializer = ToolTypeSerializer
+ self.field_transformers = {
+ "name": lambda v: v + "_new"
+ }
class UserTest(BaseClass.SchemaTest):
diff --git a/unittests/tools/test_github_vulnerability_parser.py b/unittests/tools/test_github_vulnerability_parser.py
index acc955e349..1453c02a39 100644
--- a/unittests/tools/test_github_vulnerability_parser.py
+++ b/unittests/tools/test_github_vulnerability_parser.py
@@ -251,3 +251,18 @@ def test_parse_state(self):
self.assertEqual(finding.file_path, "apache/cxf/cxf-shiro/pom.xml")
self.assertEqual(finding.active, False)
self.assertEqual(finding.is_mitigated, True)
+
+ def test_parser_version(self):
+ testfile = open("unittests/scans/github_vulnerability/github-vuln-version.json")
+ parser = GithubVulnerabilityParser()
+ findings = parser.get_findings(testfile, Test())
+ self.assertEqual(1, len(findings))
+ for finding in findings:
+ finding.clean()
+
+ with self.subTest(i=0):
+ finding = findings[0]
+ self.assertEqual(finding.title, "Pivotal Spring Framework contains unsafe Java deserialization methods")
+ self.assertEqual(finding.severity, "Critical")
+ self.assertEqual(finding.component_name, "org.springframework:spring-web")
+ self.assertEqual(finding.component_version, "5.3.29")