From eec7d35e26edb3c3aaf2ed7ba6c47654bd7ec21d Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 4 Mar 2024 20:59:35 +0000 Subject: [PATCH 01/11] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/package.json b/components/package.json index 5e46baad2d..5528766c57 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.32.0", + "version": "2.33.0-dev", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 1ee2891686..61db2f0d7a 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = '2.32.0' +__version__ = '2.33.0-dev' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index 82d01cfa8c..e5eb9b0e92 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.32.0" +appVersion: "2.33.0-dev" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.114 +version: 1.6.115-dev icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap From 370cffb289848da35b2d75632c31238ccac56bdb Mon Sep 17 00:00:00 2001 From: Andreas Reichert <70580399+reichertan@users.noreply.github.com> Date: Wed, 6 Mar 2024 02:28:45 +0100 Subject: [PATCH 02/11] Bugfix: checkmarx parser - datetime is no longer put into the Finding.date field (#9570) * Checkmarx parser: datetime is no longer put into the Finding.date field * Conversion of the init and teardown methods to functions has been revoked. --- dojo/tools/checkmarx/parser.py | 6 +-- unittests/tools/test_checkmarx_parser.py | 54 +++++++++++++++++------- 2 files changed, 41 insertions(+), 19 deletions(-) diff --git a/dojo/tools/checkmarx/parser.py b/dojo/tools/checkmarx/parser.py index d8be5b8b68..4f1f07d725 100755 --- a/dojo/tools/checkmarx/parser.py +++ b/dojo/tools/checkmarx/parser.py @@ -58,7 +58,7 @@ def _get_findings_xml(self, filename, test): language = "" findingdetail = "" group = "" - find_date = parser.parse(root.get("ScanStart")) + find_date = parser.parse(root.get("ScanStart")).date() if query.get("Language") is not None: language = query.get("Language") @@ -389,9 +389,9 @@ def get_findings(self, file, test): def _parse_date(self, value): if isinstance(value, str): - return parser.parse(value) + return parser.parse(value).date() elif isinstance(value, dict) and isinstance(value.get("seconds"), int): - return datetime.datetime.utcfromtimestamp(value.get("seconds")) + return datetime.datetime.utcfromtimestamp(value.get("seconds")).date() else: return None diff --git a/unittests/tools/test_checkmarx_parser.py b/unittests/tools/test_checkmarx_parser.py index c43e24fb57..f09e7d7da1 100644 --- a/unittests/tools/test_checkmarx_parser.py +++ b/unittests/tools/test_checkmarx_parser.py @@ -203,8 +203,8 @@ def check_parse_file_with_single_vulnerability_has_single_finding(self, findings item.file_path, ) # ScanStart - self.assertEqual(datetime.datetime, type(item.date)) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) + self.assertEqual(datetime.date, type(item.date)) + self.assertEqual(datetime.date(2018, 2, 25), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -293,7 +293,7 @@ def test_file_name_aggregated_parse_file_with_multiple_vulnerabilities_has_multi finding = findings[0] self.assertEqual("SQL Injection (Assignment5.java)", finding.title) self.assertEqual("High", finding.severity) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), finding.date) + self.assertEqual(datetime.date(2018, 2, 25), finding.date) self.assertEqual(True, finding.static_finding) self.assertEqual("WebGoat/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/plugin/challenge5/challenge6/Assignment5.java", finding.file_path) @@ -312,7 +312,7 @@ def test_detailed_parse_file_with_multiple_vulnerabilities_has_multiple_findings finding = findings[0] self.assertEqual("SQL Injection (Assignment5.java)", finding.title) self.assertEqual("High", finding.severity) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), finding.date) + self.assertEqual(datetime.date(2018, 2, 25), finding.date) self.assertEqual(True, finding.static_finding) self.assertEqual("WebGoat/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/plugin/challenge5/challenge6/Assignment5.java", finding.file_path) self.assertEqual(50, finding.line) @@ -516,8 +516,8 @@ def check_parse_file_with_utf8_replacement_char(self, findings): item.file_path, ) # ScanStart - self.assertEqual(datetime.datetime, type(item.date)) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) + self.assertEqual(datetime.date, type(item.date)) + self.assertEqual(datetime.date(2018, 2, 25), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -665,8 +665,8 @@ def check_parse_file_with_utf8_various_non_ascii_char(self, findings): item.file_path, ) # ScanStart - self.assertEqual(datetime.datetime, type(item.date)) - self.assertEqual(datetime.datetime(2018, 2, 25, 11, 35, 52), item.date) + self.assertEqual(datetime.date, type(item.date)) + self.assertEqual(datetime.date(2018, 2, 25), item.date) self.assertEqual(bool, type(item.static_finding)) self.assertEqual(True, item.static_finding) @@ -685,8 +685,8 @@ def test_file_with_multiple_findings_is_aggregated_with_query_id(self, mock): # ScanStart self.assertEqual("Client Potential ReDoS In Match (prettify.js)", finding.title) self.assertEqual("Low", finding.severity) - self.assertEqual(datetime.datetime, type(finding.date)) - self.assertEqual(datetime.datetime(2021, 11, 17, 13, 50, 45), finding.date) + self.assertEqual(datetime.date, type(finding.date)) + self.assertEqual(datetime.date(2021, 11, 17), finding.date) self.assertEqual(bool, type(finding.static_finding)) self.assertEqual(True, finding.static_finding) @@ -705,8 +705,8 @@ def test_file_with_empty_filename(self, mock): # ScanStart self.assertEqual("Missing HSTS Header", finding.title) self.assertEqual("Medium", finding.severity) - self.assertEqual(datetime.datetime, type(finding.date)) - self.assertEqual(datetime.datetime(2021, 12, 24, 9, 12, 14), finding.date) + self.assertEqual(datetime.date, type(finding.date)) + self.assertEqual(datetime.date(2021, 12, 24), finding.date) self.assertEqual(bool, type(finding.static_finding)) self.assertEqual(True, finding.static_finding) @@ -791,7 +791,7 @@ def test_file_issue6956(self, mock): self.assertEqual(89, finding.cwe) self.assertEqual("/webgoat-lessons/challenge/src/main/java/org/owasp/webgoat/challenges/challenge5/Assignment5.java", finding.file_path) self.assertEqual(61, finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) + self.assertEqual(datetime.date(2022, 5, 6), finding.date) if finding.unique_id_from_tool == "SYlu22e7ZQydKJFOlC/o1EsyixQ=": with self.subTest(i="SYlu22e7ZQydKJFOlC/o1EsyixQ="): self.assertEqual("SQL Injection", finding.title) @@ -799,7 +799,7 @@ def test_file_issue6956(self, mock): self.assertEqual(89, finding.cwe) self.assertEqual("/webgoat-lessons/sql-injection/src/main/java/org/owasp/webgoat/sql_injection/introduction/SqlInjectionLesson5.java", finding.file_path) self.assertEqual(72, finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) + self.assertEqual(datetime.date(2022, 5, 6), finding.date) # test one in SCA part if finding.unique_id_from_tool == "GkVx1zoIKcd1EF72zqWrGzeVTmo=": with self.subTest(i="GkVx1zoIKcd1EF72zqWrGzeVTmo="): @@ -812,7 +812,7 @@ def test_file_issue6956(self, mock): self.assertTrue(finding.active) self.assertFalse(finding.verified) self.assertIsNone(finding.line) - self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) + self.assertEqual(datetime.date(2022, 5, 6), finding.date) # test one in KICS part if finding.unique_id_from_tool == "eZrh18HAPbe2LbDAprSPrwncAC0=": with self.subTest(i="eZrh18HAPbe2LbDAprSPrwncAC0="): @@ -822,4 +822,26 @@ def test_file_issue6956(self, mock): self.assertTrue(finding.active) self.assertFalse(finding.verified) self.assertEqual("/webgoat-server/Dockerfile", finding.file_path) - self.assertEqual(datetime.date(2022, 5, 6), finding.date.date()) + self.assertEqual(datetime.date(2022, 5, 6), finding.date) + + @patch('dojo.tools.checkmarx.parser.add_language') + def test_finding_date_should_be_date_xml(self, mock): + my_file_handle, product, engagement, test = self.init( + get_unit_tests_path() + "/scans/checkmarx/single_finding.xml" + ) + parser = CheckmarxParser() + parser.set_mode('detailed') + findings = parser.get_findings(my_file_handle, test) + self.teardown(my_file_handle) + self.assertEqual(findings[0].date, datetime.date(2018, 2, 25)) + + @patch('dojo.tools.checkmarx.parser.add_language') + def test_finding_date_should_be_date_json(self, mock): + my_file_handle, product, engagement, test = self.init( + get_unit_tests_path() + "/scans/checkmarx/multiple_findings.json" + ) + parser = CheckmarxParser() + parser.set_mode('detailed') + findings = parser.get_findings(my_file_handle, test) + self.teardown(my_file_handle) + self.assertEqual(findings[0].date, datetime.date(2022, 2, 25)) From f5769f82a101479d15e4e0d923077fe045dce02a Mon Sep 17 00:00:00 2001 From: Felix Hernandez Date: Mon, 11 Mar 2024 12:52:23 -0600 Subject: [PATCH 03/11] Added crunch42 parser (#9714) --- .../en/integrations/parsers/file/crunch42.md | 8 + dojo/settings/settings.dist.py | 1 + dojo/tools/crunch42/__init__.py | 0 dojo/tools/crunch42/parser.py | 88 ++++ .../crunch42/crunch42_many_findings.json | 251 ++++++++++ .../crunch42/crunch42_many_findings2.json | 442 ++++++++++++++++++ unittests/tools/test_crunch42_parser.py | 32 ++ 7 files changed, 822 insertions(+) create mode 100644 docs/content/en/integrations/parsers/file/crunch42.md create mode 100644 dojo/tools/crunch42/__init__.py create mode 100644 dojo/tools/crunch42/parser.py create mode 100644 unittests/scans/crunch42/crunch42_many_findings.json create mode 100644 unittests/scans/crunch42/crunch42_many_findings2.json create mode 100644 unittests/tools/test_crunch42_parser.py diff --git a/docs/content/en/integrations/parsers/file/crunch42.md b/docs/content/en/integrations/parsers/file/crunch42.md new file mode 100644 index 0000000000..e8aa1b1e55 --- /dev/null +++ b/docs/content/en/integrations/parsers/file/crunch42.md @@ -0,0 +1,8 @@ +--- +title: "Crunch42 Scan" +toc_hide: true +--- +Import JSON findings from Crunch42 vulnerability scan tool. + +### Sample Scan Data +Sample Crunch42 Scan scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/crunch42). \ No newline at end of file diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index a970de1cc3..28a56dede6 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1366,6 +1366,7 @@ def saml2_attrib_map_format(dict): 'Codechecker Report native': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Coverity API': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Cobalt.io API': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, + 'Crunch42 Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Dependency Track Finding Packaging Format (FPF) Export': DEDUPE_ALGO_HASH_CODE, 'Mobsfscan Scan': DEDUPE_ALGO_HASH_CODE, 'SonarQube Scan detailed': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, diff --git a/dojo/tools/crunch42/__init__.py b/dojo/tools/crunch42/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dojo/tools/crunch42/parser.py b/dojo/tools/crunch42/parser.py new file mode 100644 index 0000000000..e1a841e29a --- /dev/null +++ b/dojo/tools/crunch42/parser.py @@ -0,0 +1,88 @@ +import json +from dojo.models import Finding + + +class Crunch42Parser(object): + + def get_scan_types(self): + return ["Crunch42 Scan"] + + def get_label_for_scan_types(self, scan_type): + return "Crunch42 Scan" + + def get_description_for_scan_types(self, scan_type): + return "Import JSON output of Crunch42 scan report." + + def parse_json(self, json_output): + try: + data = json_output.read() + try: + tree = json.loads(str(data, "utf-8")) + except Exception: + tree = json.loads(data) + except Exception: + raise ValueError("Invalid format") + + return tree + + def process_tree(self, tree, test): + return list(self.get_items(tree, test)) if tree else [] + + def get_findings(self, filename, test): + reportTree = self.parse_json(filename) + + if isinstance(reportTree, list): + temp = [] + for moduleTree in reportTree: + temp += self.process_tree(moduleTree, test) + return temp + else: + return self.process_tree(reportTree, test) + + def get_items(self, tree, test): + items = {} + iterator = 0 + if "report" in tree and tree["report"].get("security"): + results = tree["report"].get("security").get("issues") + for key, node in results.items(): + for issue in node["issues"]: + item = self.get_item( + issue, key, test + ) + items[iterator] = item + iterator += 1 + return list(items.values()) + + def get_item(self, issue, title, test): + fingerprint = issue["fingerprint"] + pointer = issue["pointer"] + message = issue["specificDescription"] if 'specificDescription' in issue else title + score = issue["score"] + criticality = issue["criticality"] + if criticality == 1: + severity = "Info" + elif criticality == 2: + severity = "Low" + elif criticality == 3: + severity = "Medium" + elif criticality <= 4: + severity = "High" + else: + severity = "Critical" + # create the finding object + finding = Finding( + unique_id_from_tool=fingerprint, + title=title, + test=test, + severity=severity, + description="**fingerprint**: " + str(fingerprint) + "\n" + + "**pointer**: " + str(pointer) + "\n" + + "**message**: " + str(message) + "\n" + + "**score**: " + str(score) + "\n", + false_p=False, + duplicate=False, + out_of_scope=False, + static_finding=True, + dynamic_finding=False, + ) + return finding diff --git a/unittests/scans/crunch42/crunch42_many_findings.json b/unittests/scans/crunch42/crunch42_many_findings.json new file mode 100644 index 0000000000..1ea3aca89f --- /dev/null +++ b/unittests/scans/crunch42/crunch42_many_findings.json @@ -0,0 +1,251 @@ +{ + "end": "1709535630", + "report": { + "index": [ + "/components/security/ApiKey", + "/paths/~1integration-test~1generate/post/security/0/ApiKeyAuth", + "/paths/~1integration-test~1health/get/security", + "/paths/~1integration-test~1invalidate/delete/security/0/ApiKeyAuth", + "/paths/~1integration-test~1ping/get/security", + "/paths/~1integration-test~1refresh/get/security/0/ApiKeyAuth", + "/paths/~1integration-test~1refresh/put/security/0/ApiKeyAuth", + "/paths/~1integration-test~1verify/get/security/0/ApiKeyAuth" + ], + "assessmentVersion": "3.1.6", + "assessmentReportVersion": "1.0.1", + "commit": "ahso2mom3neiviungoh4ENgahXie2Aer4ain5oba-E", + "oasVersion": "3.0.0", + "apiVersion": "1.0.0", + "fileId": "c65d4166-ddf7-11ee-a7f6-bf9763730afb", + "apiId": "", + "openapiState": "valid", + "score": 82.86, + "valid": true, + "criticality": 4, + "issueCounter": 8, + "minimalReport": false, + "maxEntriesPerIssue": 30, + "maxImpactedPerEntry": 30, + "security": { + "issueCounter": 8, + "score": 12.86, + "criticality": 4, + "issues": { + "v3-global-securityscheme-apikey-inheader": { + "description": "Transporting API keys in a header over network allowed", + "issues": [ + { + "score": 0, + "pointer": 0, + "tooManyImpacted": false, + "criticality": 1, + "request": true, + "fingerprint": "teephei0aes4ohxur7Atie6zuiCh9weeshue0kai" + } + ], + "issueCounter": 1, + "score": 0, + "criticality": 1, + "tooManyError": false + }, + "v3-operation-securityrequirement-apikey-inheader": { + "description": "Operation accepts API keys transported in a header over network", + "issues": [ + { + "score": -2.14, + "pointer": 1, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "Iibooquavie0hah0quoh7thooghiith7utoow6th" + }, + { + "score": -2.14, + "pointer": 3, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "roz6Iph0eiPaih1shooPi1geiyuziitei0aiGhed" + }, + { + "score": -2.14, + "pointer": 5, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "lae4iet6XeiyiSheeZof3sheik9lahdaiph7edah" + }, + { + "score": -2.14, + "pointer": 6, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "oNgie5Ieke9fiep6yochaT2ain8oona4xeiphiCh" + }, + { + "score": -2.14, + "pointer": 7, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "aiShievooyi1Gohn1aeque5Mae3aiBoh8oquaphe" + } + ], + "issueCounter": 5, + "score": -10.71, + "criticality": 3, + "tooManyError": false + }, + "v3-operation-securityrequirement-emptyarray": { + "description": "The security section contains an empty array", + "issues": [ + { + "specificDescription": "The security section of the operation 'get' contains an empty array", + "score": -3.21, + "pointer": 2, + "tooManyImpacted": false, + "criticality": 4, + "request": true, + "fingerprint": "oofushaeQuiev6Shegai2roh0ceighae5Daij7pi" + }, + { + "specificDescription": "The security section of the operation 'get' contains an empty array", + "score": -3.21, + "pointer": 4, + "tooManyImpacted": false, + "criticality": 4, + "request": true, + "fingerprint": "Eife6Tu5liequiec8AhZ6booGheegh5oShues2bi" + } + ], + "issueCounter": 2, + "score": -6.43, + "criticality": 4, + "tooManyError": false + } + }, + "subgroupIssueCounter": { + "authentication": { + "none": 0, + "info": 1, + "low": 0, + "medium": 5, + "high": 2, + "critical": 0 + }, + "authorization": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "transport": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + } + } + }, + "data": { + "issueCounter": 0, + "score": 70, + "criticality": 0, + "issues": {}, + "subgroupIssueCounter": { + "parameters": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "responseHeader": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "responseDefinition": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "schema": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "paths": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + } + } + }, + "issuesKey": [ + "v3-operation-securityrequirement-emptyarray", + "v3-global-securityscheme-apikey-inheader", + "v3-operation-securityrequirement-apikey-inheader" + ], + "summary": { + "oasVersion": "3.0.0", + "apiVersion": "1.0.0", + "basepath": "", + "apiName": "Example Authentication Service", + "description": "Authentication Service", + "endpoints": [ + "https://auth-dev-internal.example.com/", + "https://auth-dev-internal.example.com/" + ], + "pathCounter": 1, + "operationCounter": 7, + "parameterCounter": 4, + "requestBodyCounter": 0, + "schemesCounter": { + "https": 7 + }, + "requestContentType": {}, + "responseContentType": { + "application/json": 19 + }, + "securitySchemes": { + "ApiKeyAuth": { + "counterInsecure": 0, + "counterSecure": 5, + "type": "apiKey", + "apiKeyIn": "header", + "apiKeyName": "X-API-Key" + } + }, + "componentsSchemasCounter": 6, + "componentsResponsesCounter": 0, + "componentsParametersCounter": 2, + "componentsExamplesCounter": 0, + "componentsRequestBodiesCounter": 0, + "componentsHeadersCounter": 0, + "componentsSecuritySchemesCounter": 1, + "componentsLinksCounter": 0, + "componentsCallbacksCounter": 0 + } + }, + "start": "1702028474", + "taskId": "0ccd5572-ddf9-11ee-935d-d7d416afd73f" +} \ No newline at end of file diff --git a/unittests/scans/crunch42/crunch42_many_findings2.json b/unittests/scans/crunch42/crunch42_many_findings2.json new file mode 100644 index 0000000000..b9aa1f75fa --- /dev/null +++ b/unittests/scans/crunch42/crunch42_many_findings2.json @@ -0,0 +1,442 @@ +{ + "end": "2131451849", + "report": { + "index": [ + "/definitions/Objects/additionalProperties", + "/definitions/Objects/properties/all_objects/items", + "/definitions/ObjectsList/additionalProperties", + "/definitions/auth_claims", + "/definitions/auth_claims/additionalProperties", + "/definitions/auth_claims/properties/level/format", + "/paths/~1admin~1all_objects/get/parameters/0", + "/paths/~1admin~1all_objects/get/responses/403", + "/paths/~1admin~1all_objects/get/security/0/access-token", + "/paths/~1admin~1objects~1search/get/parameters/0", + "/paths/~1admin~1objects~1search/get/parameters/1", + "/paths/~1admin~1objects~1search/get/responses/403", + "/paths/~1admin~1objects~1search/get/security/0/access-token", + "/paths/~1login/post", + "/paths/~1login/post/parameters/0", + "/paths/~1login/post/parameters/1", + "/paths/~1register/post", + "/paths/~1object~1edit_info/put/parameters/1", + "/paths/~1object~1edit_info/put/responses/403", + "/paths/~1object~1edit_info/put/security/0/access-token", + "/paths/~1object~1info/get/security/0/access-token", + "/securityDefinitions/access-token" + ], + "assessmentVersion": "3.1.6", + "assessmentReportVersion": "1.0.1", + "commit": "theePhohphooQuoh6ii3naiS1Goalee9Chooghei-N", + "oasVersion": "2.0", + "apiVersion": "UAT-JWT-Validation", + "fileId": "2eeb479e-ddfa-11ee-9768-bb6e68d5b5fa", + "apiId": "", + "openapiState": "valid", + "score": 79.94, + "valid": true, + "criticality": 3, + "issueCounter": 13, + "warnings": { + "issues": { + "warning-global-schema-unused": { + "description": "Reusable schema definition is not used in the OpenAPI definition", + "totalIssues": 1, + "issues": [ + { + "pointer": 3, + "specificDescription": "The reusable schema definition 'acme_claims' is not used in the OpenAPI definition", + "fingerprint": "ahthi2Ahshaeghah2iewoo0aiF4quoath5Iej0ku" + } + ], + "tooManyError": false + }, + "warning-sample-undefined": { + "description": "No sample values or examples were provided for API Conformance Scan", + "totalIssues": 5, + "issues": [ + { + "pointer": 17, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "aereePheeb0puh5tahwoshi8Yei9woophahr7koh" + }, + { + "pointer": 9, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "aiseiquohNaik9aThae9oshu8te8ree9Yayie7Ha" + }, + { + "pointer": 10, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "thuf5Imiefe3aeTee4soh8quae8ahtho0ap8wen4" + }, + { + "pointer": 6, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "faeti4aide0ahTho0shiixo5cheipha9Eigahr3s" + }, + { + "pointer": 14, + "specificDescription": "No sample defined in the 'Parameter' object", + "fingerprint": "Dei9Ahraer7iech8iuk6eeyeero8quea3nahc8ah" + } + ], + "tooManyError": false + }, + "warning-schema-additionalproperties-boolean": { + "description": "Schema defines additionalProperties as a boolean value", + "totalIssues": 3, + "issues": [ + { + "pointer": 2, + "specificDescription": "", + "fingerprint": "shoo1diedoh2aex6mivi9geab9saeyoo7Dae6oth" + }, + { + "pointer": 4, + "specificDescription": "", + "fingerprint": "ooreiz0gepaeSephah6ToN8eC7tioseez4auQu3U" + }, + { + "pointer": 0, + "specificDescription": "", + "fingerprint": "aedaal8uu5aabuohuoSheidoonohSheef2iquee6" + } + ], + "tooManyError": false + }, + "warning-schema-format-improper": { + "description": "Schema format is not applicable to the schema's type", + "totalIssues": 1, + "issues": [ + { + "pointer": 5, + "specificDescription": "The format 'int32' of the schema is not applicable to the schema's type 'number'", + "fingerprint": "va8Lieweu5SieTh1ahcoole0Nahhai5ivaechith" + } + ], + "tooManyError": false + } + } + }, + "operationsNoAuthentication": [ + 13, + 16 + ], + "minimalReport": false, + "maxEntriesPerIssue": 30, + "maxImpactedPerEntry": 30, + "security": { + "issueCounter": 5, + "score": 20, + "criticality": 3, + "issues": { + "global-securityscheme-apikey-inheader": { + "description": "Transporting API keys in a header over network allowed", + "issues": [ + { + "score": 0, + "pointer": 21, + "tooManyImpacted": false, + "criticality": 1, + "request": true, + "fingerprint": "auCh0yi8sheumohruegh7of4EiT0ahngooK1aeje" + } + ], + "issueCounter": 1, + "score": 0, + "criticality": 1, + "tooManyError": false + }, + "operation-securityrequirement-apikey-inheader": { + "description": "Operation accepts API keys transported in a header over network", + "issues": [ + { + "score": -2.5, + "pointer": 8, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "Eima0iu4xaatoh1lohboophohpheiBai1iR0opei" + }, + { + "score": -2.5, + "pointer": 12, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "Ud1ohcetah5iongai8yee0veishogai2vuQuu7me" + }, + { + "score": -2.5, + "pointer": 19, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "wooN7xoof5bieChie9Aech5ohm4eerae1enu6ohr" + }, + { + "score": -2.5, + "pointer": 20, + "tooManyImpacted": false, + "criticality": 3, + "request": true, + "fingerprint": "eeliequooliexohfookosang7hooruR4pae9Aiph" + } + ], + "issueCounter": 4, + "score": -10, + "criticality": 3, + "tooManyError": false + } + }, + "subgroupIssueCounter": { + "authentication": { + "none": 0, + "info": 1, + "low": 0, + "medium": 4, + "high": 0, + "critical": 0 + }, + "authorization": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "transport": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + } + } + }, + "data": { + "issueCounter": 8, + "score": 59.94, + "criticality": 3, + "issues": { + "parameter-string-maxlength": { + "description": "String parameter has no maximum length defined", + "issues": [ + { + "specificDescription": "String parameter 'user' has no maximum length defined", + "score": -1.87, + "pointer": 14, + "tooManyImpacted": false, + "pointersAffected": [ + 13 + ], + "criticality": 3, + "request": true, + "fingerprint": "eeT0queiSahchohc5meik9Zoomoolah6Weo3phes" + }, + { + "specificDescription": "String parameter 'pass' has no maximum length defined", + "score": -1.87, + "pointer": 15, + "tooManyImpacted": false, + "pointersAffected": [ + 13 + ], + "criticality": 3, + "request": true, + "fingerprint": "ohvieX1AhzuphoocheeVoi0echoGh9coo7thai1o" + } + ], + "issueCounter": 2, + "score": -3.73, + "criticality": 3, + "tooManyError": false + }, + "parameter-string-pattern": { + "description": "String parameter has no pattern defined", + "issues": [ + { + "specificDescription": "String parameter 'user' has no pattern defined", + "score": -2.8, + "pointer": 14, + "tooManyImpacted": false, + "pointersAffected": [ + 13 + ], + "criticality": 3, + "request": true, + "fingerprint": "oveedeisohwahThae4Ier5oghaebaingai5iqueS" + }, + { + "specificDescription": "String parameter 'pass' has no pattern defined", + "score": -2.8, + "pointer": 15, + "tooManyImpacted": false, + "pointersAffected": [ + 13 + ], + "criticality": 3, + "request": true, + "fingerprint": "Iyung2laiGaish6kos6quiedeiX5uob3Bozee3mu" + } + ], + "issueCounter": 2, + "score": -5.6, + "criticality": 3, + "tooManyError": false + }, + "response-schema-undefined": { + "description": "Response that should contain a body has no schema defined", + "issues": [ + { + "score": -0.18, + "pointer": 7, + "tooManyImpacted": false, + "criticality": 3, + "response": true, + "fingerprint": "aeVahquu6chai1beaf9neithu8epha0Ohsh6echi" + }, + { + "score": -0.18, + "pointer": 11, + "tooManyImpacted": false, + "criticality": 3, + "response": true, + "fingerprint": "ai8Meishei0oHixuSucaiceL0aqu8uocahyahG6l" + }, + { + "score": -0.18, + "pointer": 18, + "tooManyImpacted": false, + "criticality": 3, + "response": true, + "fingerprint": "euN9zohhohPeesoY8ahbaichae6Ood0nohbio5ke" + } + ], + "issueCounter": 3, + "score": -0.53, + "criticality": 3, + "tooManyError": false + }, + "schema-response-object-without-properties": { + "description": "Schema of a JSON object in a response has no properties defined", + "issues": [ + { + "score": -0.2, + "pointer": 1, + "tooManyImpacted": false, + "criticality": 3, + "response": true, + "fingerprint": "ufuPheiyaelaePood3AeW8ooc3pooj2AiwaiCeil" + } + ], + "issueCounter": 1, + "score": -0.2, + "criticality": 3, + "tooManyError": false + } + }, + "subgroupIssueCounter": { + "parameters": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "responseHeader": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + }, + "responseDefinition": { + "none": 0, + "info": 0, + "low": 0, + "medium": 3, + "high": 0, + "critical": 0 + }, + "schema": { + "none": 0, + "info": 0, + "low": 0, + "medium": 1, + "high": 0, + "critical": 0 + }, + "paths": { + "none": 0, + "info": 0, + "low": 0, + "medium": 0, + "high": 0, + "critical": 0 + } + } + }, + "issuesKey": [ + "schema-response-object-without-properties", + "warning-schema-additionalproperties-boolean", + "parameter-string-pattern", + "parameter-string-maxlength", + "global-securityscheme-apikey-inheader", + "operation-securityrequirement-apikey-inheader", + "response-schema-undefined", + "warning-schema-format-improper", + "warning-sample-undefined", + "warning-global-schema-unused" + ], + "summary": { + "oasVersion": "2.0", + "apiVersion": "UAT-JWT-Validation", + "basepath": "", + "apiName": "Example App API", + "description": "Example Sharing API", + "endpoints": [ + "https//example.asia-1.cloud.provider.com/api" + ], + "pathCounter": 6, + "operationCounter": 6, + "parameterCounter": 4, + "requestBodyCounter": 3, + "schemesCounter": { + "https": 6 + }, + "requestContentType": { + "application/json": 2, + "application/x-www-form-urlencoded": 1 + }, + "responseContentType": { + "application/json": 16 + }, + "securitySchemes": { + "access-token": { + "counterInsecure": 0, + "counterSecure": 4, + "type": "apiKey", + "apiKeyIn": "header", + "apiKeyName": "x-access-token" + } + }, + "componentsSchemasCounter": 6, + "componentsResponsesCounter": 0, + "componentsParametersCounter": 0, + "componentsExamplesCounter": 0, + "componentsRequestBodiesCounter": 0, + "componentsHeadersCounter": 0, + "componentsSecuritySchemesCounter": 0, + "componentsLinksCounter": 0, + "componentsCallbacksCounter": 0 + } + }, + "start": "1693265564", + "taskId": "970e33ac-ddfc-11ee-a42e-af596b69b8f4" +} \ No newline at end of file diff --git a/unittests/tools/test_crunch42_parser.py b/unittests/tools/test_crunch42_parser.py new file mode 100644 index 0000000000..ea5188d303 --- /dev/null +++ b/unittests/tools/test_crunch42_parser.py @@ -0,0 +1,32 @@ +from ..dojo_test_case import DojoTestCase +from dojo.models import Test +from dojo.tools.crunch42.parser import Crunch42Parser + + +class TestCrunch42Parser(DojoTestCase): + + def test_crunch42parser_single_has_many_findings(self): + testfile = open("unittests/scans/crunch42/crunch42_many_findings.json") + parser = Crunch42Parser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(8, len(findings)) + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("teephei0aes4ohxur7Atie6zuiCh9weeshue0kai", finding.unique_id_from_tool) + self.assertEqual("Info", finding.severity) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) + + def test_crunch42parser_single_has_many_findings2(self): + testfile = open("unittests/scans/crunch42/crunch42_many_findings2.json") + parser = Crunch42Parser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(5, len(findings)) + with self.subTest(i=0): + finding = findings[0] + self.assertEqual("auCh0yi8sheumohruegh7of4EiT0ahngooK1aeje", finding.unique_id_from_tool) + self.assertEqual("Info", finding.severity) + self.assertIsNotNone(finding.description) + self.assertGreater(len(finding.description), 0) From 0d35b18ed837ef86656f7bef9665bd14981bd21d Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 11 Mar 2024 13:52:50 -0500 Subject: [PATCH 04/11] Jira: Improve alerting on a per step basis (#9691) --- dojo/jira_link/helper.py | 190 +++++++++++++++++++++++---------------- 1 file changed, 115 insertions(+), 75 deletions(-) diff --git a/dojo/jira_link/helper.py b/dojo/jira_link/helper.py index 5318aa0e3e..9d560d89dc 100644 --- a/dojo/jira_link/helper.py +++ b/dojo/jira_link/helper.py @@ -1,4 +1,5 @@ import logging +from typing import Any from dojo.utils import add_error_message_to_response, get_system_setting, to_str_typed import os import io @@ -695,6 +696,13 @@ def prepare_jira_issue_fields( def add_jira_issue(obj, *args, **kwargs): + def failure_to_add_message(message: str, exception: Exception, object: Any) -> bool: + if exception: + logger.exception(exception) + logger.error(message) + log_jira_alert(message, obj) + return False + logger.info('trying to create a new jira issue for %d:%s', obj.id, to_str_typed(obj)) if not is_jira_enabled(): @@ -702,9 +710,7 @@ def add_jira_issue(obj, *args, **kwargs): if not is_jira_configured_and_enabled(obj): message = 'Object %s cannot be pushed to JIRA as there is no JIRA configuration for %s.' % (obj.id, to_str_typed(obj)) - logger.error(message) - log_jira_alert(message, obj) - return False + return failure_to_add_message(message, None, obj) jira_project = get_jira_project(obj) jira_instance = get_jira_instance(obj) @@ -719,19 +725,23 @@ def add_jira_issue(obj, *args, **kwargs): logger.warning("The JIRA issue will NOT be created.") return False logger.debug('Trying to create a new JIRA issue for %s...', to_str_typed(obj)) - meta = None + # Attempt to get the jira connection try: JIRAError.log_to_tempfile = False jira = get_jira_connection(jira_instance) - - labels = get_labels(obj) + get_tags(obj) - if labels: - labels = list(dict.fromkeys(labels)) # de-dup - - duedate = None - if System_Settings.objects.get().enable_finding_sla: - duedate = obj.sla_deadline() - + except Exception as e: + message = f"The following jira instance could not be connected: {jira_instance} - {e.text}" + return failure_to_add_message(message, e, obj) + # Set the list of labels to set on the jira issue + labels = get_labels(obj) + get_tags(obj) + if labels: + labels = list(dict.fromkeys(labels)) # de-dup + # Determine what due date to set on the jira issue + duedate = None + if System_Settings.objects.get().enable_finding_sla: + duedate = obj.sla_deadline() + # Set the fields that will compose the jira issue + try: issuetype_fields = get_issuetype_fields(jira, jira_project.project_key, jira_instance.default_issue_type) fields = prepare_jira_issue_fields( project_key=jira_project.project_key, @@ -747,16 +757,40 @@ def add_jira_issue(obj, *args, **kwargs): duedate=duedate, issuetype_fields=issuetype_fields, default_assignee=jira_project.default_assignee) - + except TemplateDoesNotExist as e: + message = f"Failed to find a jira issue template to be used - {e}" + return failure_to_add_message(message, e, obj) + except Exception as e: + message = f"Failed to fetch fields for {jira_instance.default_issue_type} under project {jira_project.project_key} - {e}" + return failure_to_add_message(message, e, obj) + # Create a new issue in Jira with the fields set in the last step + try: logger.debug('sending fields to JIRA: %s', fields) new_issue = jira.create_issue(fields) + logger.debug('saving JIRA_Issue for %s finding %s', new_issue.key, obj.id) + j_issue = JIRA_Issue(jira_id=new_issue.id, jira_key=new_issue.key, jira_project=jira_project) + j_issue.set_obj(obj) + j_issue.jira_creation = timezone.now() + j_issue.jira_change = timezone.now() + j_issue.save() + jira.issue(new_issue.id) + logger.info('Created the following jira issue for %d:%s', obj.id, to_str_typed(obj)) + except Exception as e: + message = f"Failed to create jira issue with the following payload: {fields} - {e}" + return failure_to_add_message(message, e, obj) + # Attempt to set a default assignee + try: if jira_project.default_assignee: created_assignee = str(new_issue.get_field('assignee')) logger.debug("new issue created with assignee %s", created_assignee) if created_assignee != jira_project.default_assignee: jira.assign_issue(new_issue.key, jira_project.default_assignee) - - # Upload dojo finding screenshots to Jira + except Exception as e: + message = f"Failed to assign the default user: {jira_project.default_assignee} - {e}" + # Do not return here as this should be a soft failure that should be logged + failure_to_add_message(message, e, obj) + # Upload dojo finding screenshots to Jira + try: findings = [obj] if isinstance(obj, Finding_Group): findings = obj.findings.all() @@ -771,7 +805,22 @@ def add_jira_issue(obj, *args, **kwargs): settings.MEDIA_ROOT + '/' + pic) except FileNotFoundError as e: logger.info(e) - + except Exception as e: + message = f"Failed to attach attachments to the jira issue: {e}" + # Do not return here as this should be a soft failure that should be logged + failure_to_add_message(message, e, obj) + # Add any notes that already exist in the finding to the JIRA + try: + for find in findings: + if find.notes.all(): + for note in find.notes.all().reverse(): + add_comment(obj, note) + except Exception as e: + message = f"Failed to add notes to the jira ticket: {e}" + # Do not return here as this should be a soft failure that should be logged + failure_to_add_message(message, e, obj) + # Determine whether to assign this new jira issue to a mapped epic + try: if jira_project.enable_engagement_epic_mapping: eng = obj.test.engagement logger.debug('Adding to EPIC Map: %s', eng.name) @@ -780,36 +829,11 @@ def add_jira_issue(obj, *args, **kwargs): add_issues_to_epic(jira, obj, epic_id=epic.jira_id, issue_keys=[str(new_issue.id)], ignore_epics=True) else: logger.info('The following EPIC does not exist: %s', eng.name) + except Exception as e: + message = f"Failed to assign jira issue to existing epic: {e}" + return failure_to_add_message(message, e, obj) - # only link the new issue if it was successfully created, incl attachments and epic link - logger.debug('saving JIRA_Issue for %s finding %s', new_issue.key, obj.id) - j_issue = JIRA_Issue( - jira_id=new_issue.id, jira_key=new_issue.key, jira_project=jira_project) - j_issue.set_obj(obj) - - j_issue.jira_creation = timezone.now() - j_issue.jira_change = timezone.now() - j_issue.save() - jira.issue(new_issue.id) - - logger.info('Created the following jira issue for %d:%s', obj.id, to_str_typed(obj)) - - # Add any notes that already exist in the finding to the JIRA - for find in findings: - if find.notes.all(): - for note in find.notes.all().reverse(): - add_comment(obj, note) - - return True - except TemplateDoesNotExist as e: - logger.exception(e) - log_jira_alert(str(e), obj) - return False - except JIRAError as e: - logger.exception(e) - logger.error("jira_meta for project: %s and url: %s meta: %s", jira_project.project_key, jira_project.jira_instance.url, json.dumps(meta, indent=4)) # this is None safe - log_jira_alert(e.text, obj) - return False + return True # we need two separate celery tasks due to the decorators we're using to map to/from ids @@ -831,6 +855,13 @@ def update_jira_issue_for_finding_group(finding_group, *args, **kwargs): def update_jira_issue(obj, *args, **kwargs): + def failure_to_update_message(message: str, exception: Exception, obj: Any) -> bool: + if exception: + logger.exception(exception) + logger.error(message) + log_jira_alert(message, obj) + return False + logger.debug('trying to update a linked jira issue for %d:%s', obj.id, to_str_typed(obj)) if not is_jira_enabled(): @@ -841,21 +872,22 @@ def update_jira_issue(obj, *args, **kwargs): if not is_jira_configured_and_enabled(obj): message = 'Object %s cannot be pushed to JIRA as there is no JIRA configuration for %s.' % (obj.id, to_str_typed(obj)) - logger.error(message) - log_jira_alert(message, obj) - return False + return failure_to_update_message(message, None, obj) j_issue = obj.jira_issue - meta = None try: JIRAError.log_to_tempfile = False jira = get_jira_connection(jira_instance) issue = jira.issue(j_issue.jira_id) - - labels = get_labels(obj) + get_tags(obj) - if labels: - labels = list(dict.fromkeys(labels)) # de-dup - + except Exception as e: + message = f"The following jira instance could not be connected: {jira_instance} - {e}" + return failure_to_update_message(message, e, obj) + # Set the list of labels to set on the jira issue + labels = get_labels(obj) + get_tags(obj) + if labels: + labels = list(dict.fromkeys(labels)) # de-dup + # Set the fields that will compose the jira issue + try: issuetype_fields = get_issuetype_fields(jira, jira_project.project_key, jira_instance.default_issue_type) fields = prepare_jira_issue_fields( project_key=jira_project.project_key, @@ -868,26 +900,38 @@ def update_jira_issue(obj, *args, **kwargs): # Do not update the priority in jira after creation as this could have changed in jira, but should not change in dojo # priority_name=jira_priority(obj), issuetype_fields=issuetype_fields) - + except Exception as e: + message = f"Failed to fetch fields for {jira_instance.default_issue_type} under project {jira_project.project_key} - {e}" + return failure_to_update_message(message, e, obj) + # Update the issue in jira + try: logger.debug('sending fields to JIRA: %s', fields) - issue.update( summary=fields['summary'], description=fields['description'], # Do not update the priority in jira after creation as this could have changed in jira, but should not change in dojo # priority=fields['priority'], fields=fields) - + j_issue.jira_change = timezone.now() + j_issue.save() + except Exception as e: + message = f"Failed to update the jira issue with the following payload: {fields} - {e}" + return failure_to_update_message(message, e, obj) + # Update the status in jira + try: push_status_to_jira(obj, jira_instance, jira, issue) - - # Upload dojo finding screenshots to Jira + except Exception as e: + message = f"Failed to update the jira issue status - {e}" + return failure_to_update_message(message, e, obj) + # Upload dojo finding screenshots to Jira + try: findings = [obj] if isinstance(obj, Finding_Group): findings = obj.findings.all() for find in findings: for pic in get_file_images(find): - # It doesn't look like the celery cotainer has anything in the media + # It doesn't look like the celery container has anything in the media # folder. Has this feature ever worked? try: jira_attachment( @@ -895,7 +939,12 @@ def update_jira_issue(obj, *args, **kwargs): settings.MEDIA_ROOT + '/' + pic) except FileNotFoundError as e: logger.info(e) - + except Exception as e: + message = f"Failed to attach attachments to the jira issue: {e}" + # Do not return here as this should be a soft failure that should be logged + failure_to_update_message(message, e, obj) + # Determine whether to assign this new jira issue to a mapped epic + try: if jira_project.enable_engagement_epic_mapping: eng = find.test.engagement logger.debug('Adding to EPIC Map: %s', eng.name) @@ -904,20 +953,11 @@ def update_jira_issue(obj, *args, **kwargs): add_issues_to_epic(jira, obj, epic_id=epic.jira_id, issue_keys=[str(j_issue.jira_id)], ignore_epics=True) else: logger.info('The following EPIC does not exist: %s', eng.name) + except Exception as e: + message = f"Failed to assign jira issue to existing epic: {e}" + return failure_to_update_message(message, e, obj) - j_issue.jira_change = timezone.now() - j_issue.save() - - logger.debug('Updated the following linked jira issue for %d:%s', find.id, find.title) - return True - - except JIRAError as e: - logger.exception(e) - logger.error("jira_meta for project: %s and url: %s meta: %s", jira_project.project_key, jira_project.jira_instance.url, json.dumps(meta, indent=4)) # this is None safe - if issue_from_jira_is_active(issue): - # Only alert if the upstream JIRA is active, we don't care about closed issues - log_jira_alert(e.text, obj) - return False + return True def get_jira_issue_from_jira(find): From 434d231fec1223d51c4c208002580f6bd2511862 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 11 Mar 2024 13:55:35 -0500 Subject: [PATCH 05/11] Product Metrics: Correct week to week charts (#9695) * Metrics: Sane ordering of product metric charts * Separate closed metric calculation from open * Reorder some things * Removing old code comments --- dojo/product/views.py | 151 +++++++++++++++++---------------- dojo/static/dojo/js/metrics.js | 6 -- 2 files changed, 77 insertions(+), 80 deletions(-) diff --git a/dojo/product/views.py b/dojo/product/views.py index ee7c3b35e8..59edb5b075 100755 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -323,15 +323,15 @@ def finding_querys(request, prod): end_date = timezone.now() week = end_date - timedelta(days=7) # seven days and /newer are considered "new" - filters['accepted'] = findings_qs.filter(finding_helper.ACCEPTED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) + filters['accepted'] = findings_qs.filter(finding_helper.ACCEPTED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters['verified'] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") filters['new_verified'] = findings_qs.filter(finding_helper.VERIFIED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") - filters['open'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['inactive'] = findings_qs.filter(finding_helper.INACTIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['closed'] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['false_positive'] = findings_qs.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['out_of_scope'] = findings_qs.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]) - filters['all'] = findings_qs + filters['open'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['inactive'] = findings_qs.filter(finding_helper.INACTIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['closed'] = findings_qs.filter(finding_helper.CLOSED_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['false_positive'] = findings_qs.filter(finding_helper.FALSE_POSITIVE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['out_of_scope'] = findings_qs.filter(finding_helper.OUT_OF_SCOPE_FINDINGS_QUERY).filter(date__range=[start_date, end_date]).order_by("date") + filters['all'] = findings_qs.order_by("date") filters['open_vulns'] = findings_qs.filter(finding_helper.OPEN_FINDINGS_QUERY).filter( cwe__isnull=False, ).order_by('cwe').values( @@ -476,6 +476,7 @@ def view_product_metrics(request, pid): add_breadcrumb(parent=prod, top_level=False, request=request) + # An ordered dict does not make sense here. open_close_weekly = OrderedDict() severity_weekly = OrderedDict() critical_weekly = OrderedDict() @@ -483,81 +484,83 @@ def view_product_metrics(request, pid): medium_weekly = OrderedDict() open_objs_by_severity = get_zero_severity_level() + closed_objs_by_severity = get_zero_severity_level() accepted_objs_by_severity = get_zero_severity_level() - for v in filters.get('open', None): - iso_cal = v.date.isocalendar() - x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) - y = x.strftime("%m/%d
%Y
") - x = (tcalendar.timegm(x.timetuple()) * 1000) - if x not in critical_weekly: - critical_weekly[x] = {'count': 0, 'week': y} - if x not in high_weekly: - high_weekly[x] = {'count': 0, 'week': y} - if x not in medium_weekly: - medium_weekly[x] = {'count': 0, 'week': y} - - if x in open_close_weekly: - if v.mitigated: - open_close_weekly[x]['closed'] += 1 + for finding in filters.get("all", []): + iso_cal = finding.date.isocalendar() + date = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) + html_date = date.strftime("%m/%d
%Y
") + unix_timestamp = (tcalendar.timegm(date.timetuple()) * 1000) + + # Open findings + if finding in filters.get("open", []): + if unix_timestamp not in critical_weekly: + critical_weekly[unix_timestamp] = {'count': 0, 'week': html_date} + if unix_timestamp not in high_weekly: + high_weekly[unix_timestamp] = {'count': 0, 'week': html_date} + if unix_timestamp not in medium_weekly: + medium_weekly[unix_timestamp] = {'count': 0, 'week': html_date} + + if unix_timestamp in open_close_weekly: + open_close_weekly[unix_timestamp]['open'] += 1 else: - open_close_weekly[x]['open'] += 1 - else: - if v.mitigated: - open_close_weekly[x] = {'closed': 1, 'open': 0, 'accepted': 0} - else: - open_close_weekly[x] = {'closed': 0, 'open': 1, 'accepted': 0} - open_close_weekly[x]['week'] = y + open_close_weekly[unix_timestamp] = {'closed': 0, 'open': 1, 'accepted': 0} + open_close_weekly[unix_timestamp]['week'] = html_date - if view == 'Finding': - severity = v.severity - elif view == 'Endpoint': - severity = v.finding.severity + if view == 'Finding': + severity = finding.severity + elif view == 'Endpoint': + severity = finding.finding.severity - if x in severity_weekly: - if severity in severity_weekly[x]: - severity_weekly[x][severity] += 1 + if unix_timestamp in severity_weekly: + if severity in severity_weekly[unix_timestamp]: + severity_weekly[unix_timestamp][severity] += 1 + else: + severity_weekly[unix_timestamp][severity] = 1 else: - severity_weekly[x][severity] = 1 - else: - severity_weekly[x] = get_zero_severity_level() - severity_weekly[x][severity] = 1 - severity_weekly[x]['week'] = y + severity_weekly[unix_timestamp] = get_zero_severity_level() + severity_weekly[unix_timestamp][severity] = 1 + severity_weekly[unix_timestamp]['week'] = html_date - if severity == 'Critical': - if x in critical_weekly: - critical_weekly[x]['count'] += 1 - else: - critical_weekly[x] = {'count': 1, 'week': y} - elif severity == 'High': - if x in high_weekly: - high_weekly[x]['count'] += 1 + if severity == 'Critical': + if unix_timestamp in critical_weekly: + critical_weekly[unix_timestamp]['count'] += 1 + else: + critical_weekly[unix_timestamp] = {'count': 1, 'week': html_date} + elif severity == 'High': + if unix_timestamp in high_weekly: + high_weekly[unix_timestamp]['count'] += 1 + else: + high_weekly[unix_timestamp] = {'count': 1, 'week': html_date} + elif severity == 'Medium': + if unix_timestamp in medium_weekly: + medium_weekly[unix_timestamp]['count'] += 1 + else: + medium_weekly[unix_timestamp] = {'count': 1, 'week': html_date} + # Optimization: count severity level on server side + if open_objs_by_severity.get(finding.severity) is not None: + open_objs_by_severity[finding.severity] += 1 + # Close findings + if finding in filters.get("closed", []): + if unix_timestamp in open_close_weekly: + open_close_weekly[unix_timestamp]['closed'] += 1 else: - high_weekly[x] = {'count': 1, 'week': y} - elif severity == 'Medium': - if x in medium_weekly: - medium_weekly[x]['count'] += 1 + open_close_weekly[unix_timestamp] = {'closed': 1, 'open': 0, 'accepted': 0} + open_close_weekly[unix_timestamp]['week'] = html_date + # Optimization: count severity level on server side + if closed_objs_by_severity.get(finding.severity) is not None: + closed_objs_by_severity[finding.severity] += 1 + # Risk Accepted findings + if finding in filters.get("accepted", []): + if unix_timestamp in open_close_weekly: + open_close_weekly[unix_timestamp]['accepted'] += 1 else: - medium_weekly[x] = {'count': 1, 'week': y} - - # Optimization: count severity level on server side - if open_objs_by_severity.get(v.severity) is not None: - open_objs_by_severity[v.severity] += 1 - - for a in filters.get('accepted', None): - iso_cal = a.date.isocalendar() - x = iso_to_gregorian(iso_cal[0], iso_cal[1], 1) - y = x.strftime("%m/%d
%Y
") - x = (tcalendar.timegm(x.timetuple()) * 1000) - - if x in open_close_weekly: - open_close_weekly[x]['accepted'] += 1 - else: - open_close_weekly[x] = {'closed': 0, 'open': 0, 'accepted': 1} - open_close_weekly[x]['week'] = y - - if accepted_objs_by_severity.get(a.severity) is not None: - accepted_objs_by_severity[a.severity] += 1 + open_close_weekly[unix_timestamp] = {'closed': 0, 'open': 0, 'accepted': 1} + open_close_weekly[unix_timestamp]['week'] = html_date + # Optimization: count severity level on server side + if accepted_objs_by_severity.get(finding.severity) is not None: + accepted_objs_by_severity[finding.severity] += 1 test_data = {} for t in tests: @@ -584,7 +587,7 @@ def view_product_metrics(request, pid): 'inactive_objs': filters.get('inactive', None), 'inactive_objs_by_severity': sum_by_severity_level(filters.get('inactive')), 'closed_objs': filters.get('closed', None), - 'closed_objs_by_severity': sum_by_severity_level(filters.get('closed')), + 'closed_objs_by_severity': closed_objs_by_severity, 'false_positive_objs': filters.get('false_positive', None), 'false_positive_objs_by_severity': sum_by_severity_level(filters.get('false_positive')), 'out_of_scope_objs': filters.get('out_of_scope', None), diff --git a/dojo/static/dojo/js/metrics.js b/dojo/static/dojo/js/metrics.js index 392ad2ac6f..2e95555d37 100644 --- a/dojo/static/dojo/js/metrics.js +++ b/dojo/static/dojo/js/metrics.js @@ -1618,8 +1618,6 @@ function open_close_weekly(opened, closed, accepted, ticks) { var options = { xaxes: [{ ticks: ticks, - transform: function(v) { return -v; }, - inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 @@ -1661,8 +1659,6 @@ function severity_weekly(critical, high, medium, low, info, ticks) { var options = { xaxes: [{ ticks: ticks, - transform: function(v) { return -v; }, - inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 @@ -1713,8 +1709,6 @@ function severity_counts_weekly(critical, high, medium, ticks) { var options = { xaxes: [{ ticks: ticks, - transform: function(v) { return -v; }, - inverseTransform: function(v) { return -v; } }], yaxes: [{ min: 0 From 34ff4527d9240a0c8c83f8846bc85c919fcb714c Mon Sep 17 00:00:00 2001 From: manuelsommer <47991713+manuel-sommer@users.noreply.github.com> Date: Mon, 11 Mar 2024 19:56:30 +0100 Subject: [PATCH 06/11] remove flotaxis, #9700 (#9709) --- components/yarn.lock | 4 ---- 1 file changed, 4 deletions(-) diff --git a/components/yarn.lock b/components/yarn.lock index ffe72a3aaf..d3d65c363f 100644 --- a/components/yarn.lock +++ b/components/yarn.lock @@ -538,10 +538,6 @@ fast-levenshtein@~2.0.6: resolved "https://registry.yarnpkg.com/fast-levenshtein/-/fast-levenshtein-2.0.6.tgz#3d8a5c66883a16a30ca8643e851f19baa7797917" integrity sha512-DCXu6Ifhqcks7TZKY3Hxp3y6qphY5SJZmrWMDrKcERSOXWQdMhU9Ig/PYrzyw/ul9jOIyh0N4M0tbC5hodg8dw== -flot-axis@markrcote/flot-axislabels#*: - version "0.0.0" - resolved "https://codeload.github.com/markrcote/flot-axislabels/tar.gz/a181e09d04d120d05e5bc2baaa8738b5b3670428" - flot@flot/flot#~0.8.3: version "0.8.3" resolved "https://codeload.github.com/flot/flot/tar.gz/453b017cc5acfd75e252b93e8635f57f4196d45d" From 63e96d26a754781584d279bb265ace2adbde99df Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 11 Mar 2024 15:31:09 -0500 Subject: [PATCH 07/11] Endpoints: Remove "verified" query on listing pages (#9717) * Endpoints: Remove "verified" query on listing pages * Rename active findings to open findings in the view host page * Update endpoint / hosts counts --- dojo/endpoint/views.py | 18 ++++-------------- dojo/models.py | 2 +- dojo/product/views.py | 7 +++++-- dojo/templates/base.html | 6 ++---- dojo/templates/dojo/endpoints.html | 15 ++++++--------- dojo/templates/dojo/product.html | 4 ---- dojo/templates/dojo/view_endpoint.html | 8 ++++---- dojo/utils.py | 7 ++++++- 8 files changed, 28 insertions(+), 39 deletions(-) diff --git a/dojo/endpoint/views.py b/dojo/endpoint/views.py index c2b491eb1a..0f5b7676c7 100644 --- a/dojo/endpoint/views.py +++ b/dojo/endpoint/views.py @@ -33,12 +33,6 @@ def process_endpoints_view(request, host_view=False, vulnerable=False): if vulnerable: endpoints = Endpoint.objects.filter( - finding__active=True, - finding__verified=True, - finding__out_of_scope=False, - finding__mitigated__isnull=True, - finding__false_p=False, - finding__duplicate=False, status_endpoint__mitigated=False, status_endpoint__false_positive=False, status_endpoint__out_of_scope=False, @@ -124,12 +118,12 @@ def process_endpoint_view(request, eid, host_view=False): endpoints = endpoint.host_endpoints() endpoint_metadata = None all_findings = endpoint.host_findings() - active_verified_findings = endpoint.host_active_verified_findings() + active_findings = endpoint.host_active_findings() else: endpoints = None endpoint_metadata = dict(endpoint.endpoint_meta.values_list('name', 'value')) all_findings = endpoint.findings.all() - active_verified_findings = endpoint.active_verified_findings() + active_findings = endpoint.active_findings() if all_findings: start_date = timezone.make_aware(datetime.combine(all_findings.last().date, datetime.min.time())) @@ -148,12 +142,8 @@ def process_endpoint_view(request, eid, host_view=False): monthly_counts = get_period_counts(all_findings, closed_findings, None, months_between, start_date, relative_delta='months') - paged_findings = get_page_items(request, active_verified_findings, 25) - - vulnerable = False - - if active_verified_findings.count() != 0: - vulnerable = True + paged_findings = get_page_items(request, active_findings, 25) + vulnerable = active_findings.count() != 0 product_tab = Product_Tab(endpoint.product, "Host" if host_view else "Endpoint", tab="endpoints") return render(request, diff --git a/dojo/models.py b/dojo/models.py index 98922853f4..be7aa51f9e 100755 --- a/dojo/models.py +++ b/dojo/models.py @@ -1124,7 +1124,7 @@ def endpoint_count(self): endpoints = getattr(self, 'active_endpoints', None) if endpoints: return len(self.active_endpoints) - return None + return 0 def open_findings(self, start_date=None, end_date=None): if start_date is None or end_date is None: diff --git a/dojo/product/views.py b/dojo/product/views.py index 59edb5b075..6291540342 100755 --- a/dojo/product/views.py +++ b/dojo/product/views.py @@ -112,8 +112,11 @@ def prefetch_for_product(prods): prefetched_prods = prefetched_prods.prefetch_related('members') prefetched_prods = prefetched_prods.prefetch_related('prod_type__members') active_endpoint_query = Endpoint.objects.filter( - finding__active=True, - finding__mitigated__isnull=True).distinct() + status_endpoint__mitigated=False, + status_endpoint__false_positive=False, + status_endpoint__out_of_scope=False, + status_endpoint__risk_accepted=False, + ).distinct() prefetched_prods = prefetched_prods.prefetch_related( Prefetch('endpoint_set', queryset=active_endpoint_query, to_attr='active_endpoints')) prefetched_prods = prefetched_prods.prefetch_related('tags') diff --git a/dojo/templates/base.html b/dojo/templates/base.html index f4043d42e3..2f1cace966 100644 --- a/dojo/templates/base.html +++ b/dojo/templates/base.html @@ -765,10 +765,8 @@

diff --git a/dojo/templates/dojo/endpoints.html b/dojo/templates/dojo/endpoints.html index ecaaef6d52..6597e1f747 100644 --- a/dojo/templates/dojo/endpoints.html +++ b/dojo/templates/dojo/endpoints.html @@ -87,7 +87,7 @@

{% comment %} The display field is translated in the function. No need to translate here as well{% endcomment %} {% dojo_sort request 'Product' 'product' 'asc' %} {% endif %} - Active Verified Findings + Active (Verified) Findings Status @@ -117,13 +117,10 @@

{% endif %} {% if host_view %} - {{ e.host_active_verified_findings_count }} + {{ e.host_active_findings_count }} ({{ e.host_active_verified_findings_count }}) {% else %} - {% if e.active_verified_findings_count > 0 %} - {{ e.active_verified_findings_count }} - {% else %} - 0 - {% endif %} + {{ e.active_findings_count }} + ({{ e.active_verified_findings_count }}) {% endif %} @@ -133,10 +130,10 @@

{% if e.mitigated %} Mitigated {% else %} - {% if e.active_verified_findings_count > 0 %} + {% if e.active_findings_count > 0 %} Vulnerable {% else %} - No active verified findings + No active findings {% endif %} {% endif %} {% endif %} diff --git a/dojo/templates/dojo/product.html b/dojo/templates/dojo/product.html index e328557c87..d022812de8 100644 --- a/dojo/templates/dojo/product.html +++ b/dojo/templates/dojo/product.html @@ -248,12 +248,8 @@

{% endif %} - {% if prod.endpoint_count %} {{ prod.endpoint_host_count }} / {{ prod.endpoint_count }} - {% else %} - 0 - {% endif %} {% if prod.product_manager %} diff --git a/dojo/templates/dojo/view_endpoint.html b/dojo/templates/dojo/view_endpoint.html index 30d974b8a6..d09261e5ec 100644 --- a/dojo/templates/dojo/view_endpoint.html +++ b/dojo/templates/dojo/view_endpoint.html @@ -103,7 +103,7 @@

  - Finding Age ({{ all_findings|length|apnumber }} verified + Finding Age ({{ all_findings|length|apnumber }} finding{{ all_findings|length|pluralize }})
@@ -178,9 +178,9 @@

{% if item %} {% if item.vulnerable %} - + {% else %} - + {% endif %}  {{ item|url_shortner }}{% if endpoint.is_broken %} 🚩{% endif %} {% endif %} @@ -248,7 +248,7 @@

Additional Information
-

Active Verified Findings

+

Open Findings

{% if findings %}
diff --git a/dojo/utils.py b/dojo/utils.py index 89519e7562..e3f52ca224 100644 --- a/dojo/utils.py +++ b/dojo/utils.py @@ -1576,7 +1576,12 @@ def __init__(self, product, title=None, tab=None): active=True, mitigated__isnull=True).count() active_endpoints = Endpoint.objects.filter( - product=self.product, finding__active=True, finding__mitigated__isnull=True) + product=self.product, + status_endpoint__mitigated=False, + status_endpoint__false_positive=False, + status_endpoint__out_of_scope=False, + status_endpoint__risk_accepted=False, + ) self.endpoints_count = active_endpoints.distinct().count() self.endpoint_hosts_count = active_endpoints.values('host').distinct().count() self.benchmark_type = Benchmark_Type.objects.filter( From 9b2ef5093bcd4d5d6cc7ccf0b7aa9952369369d2 Mon Sep 17 00:00:00 2001 From: Felix Hernandez Date: Mon, 11 Mar 2024 14:33:52 -0600 Subject: [PATCH 08/11] Added Checkmarx One Parser (#9715) * Added Checkmarx One Parser * added cweID * changing cve by cwe * replace cve by cwe --- .../parsers/file/checkmarx_one.md | 8 + dojo/settings/settings.dist.py | 1 + dojo/tools/checkmarx_one/__init__.py | 0 dojo/tools/checkmarx_one/parser.py | 110 +++++++ .../scans/checkmarx_one/checkmarx_one.json | 284 ++++++++++++++++++ .../scans/checkmarx_one/many_findings.json | 258 ++++++++++++++++ .../scans/checkmarx_one/no_findings.json | 6 + unittests/tools/test_checkmarx_one_parser.py | 47 +++ 8 files changed, 714 insertions(+) create mode 100644 docs/content/en/integrations/parsers/file/checkmarx_one.md create mode 100644 dojo/tools/checkmarx_one/__init__.py create mode 100644 dojo/tools/checkmarx_one/parser.py create mode 100644 unittests/scans/checkmarx_one/checkmarx_one.json create mode 100644 unittests/scans/checkmarx_one/many_findings.json create mode 100644 unittests/scans/checkmarx_one/no_findings.json create mode 100644 unittests/tools/test_checkmarx_one_parser.py diff --git a/docs/content/en/integrations/parsers/file/checkmarx_one.md b/docs/content/en/integrations/parsers/file/checkmarx_one.md new file mode 100644 index 0000000000..1d5a07f0ca --- /dev/null +++ b/docs/content/en/integrations/parsers/file/checkmarx_one.md @@ -0,0 +1,8 @@ +--- +title: "Checkmarx One Scan" +toc_hide: true +--- +Import JSON Checkmarx One scanner reports + +### Sample Scan Data +Sample Checkmarx One scans can be found [here](https://github.com/DefectDojo/django-DefectDojo/tree/master/unittests/scans/checkmarx_one). \ No newline at end of file diff --git a/dojo/settings/settings.dist.py b/dojo/settings/settings.dist.py index 28a56dede6..a5be3612dd 100644 --- a/dojo/settings/settings.dist.py +++ b/dojo/settings/settings.dist.py @@ -1362,6 +1362,7 @@ def saml2_attrib_map_format(dict): 'CargoAudit Scan': DEDUPE_ALGO_HASH_CODE, 'Checkmarx Scan detailed': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Checkmarx Scan': DEDUPE_ALGO_HASH_CODE, + 'Checkmarx One Scan': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Checkmarx OSA': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL_OR_HASH_CODE, 'Codechecker Report native': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, 'Coverity API': DEDUPE_ALGO_UNIQUE_ID_FROM_TOOL, diff --git a/dojo/tools/checkmarx_one/__init__.py b/dojo/tools/checkmarx_one/__init__.py new file mode 100644 index 0000000000..e69de29bb2 diff --git a/dojo/tools/checkmarx_one/parser.py b/dojo/tools/checkmarx_one/parser.py new file mode 100644 index 0000000000..699ac64e42 --- /dev/null +++ b/dojo/tools/checkmarx_one/parser.py @@ -0,0 +1,110 @@ +import datetime +import json +from dateutil import parser +from dojo.models import Finding + + +class CheckmarxOneParser(object): + def get_scan_types(self): + return ["Checkmarx One Scan"] + + def get_label_for_scan_types(self, scan_type): + return scan_type + + def get_description_for_scan_types(self, scan_type): + return "Checkmarx One Scan" + + def _parse_date(self, value): + if isinstance(value, str): + return parser.parse(value) + elif isinstance(value, dict) and isinstance(value.get("seconds"), int): + return datetime.datetime.utcfromtimestamp(value.get("seconds")) + else: + return None + + def get_findings(self, file, test): + data = json.load(file) + findings = [] + if "vulnerabilities" in data: + results = data.get("vulnerabilities", []) + for result in results: + id = result.get("identifiers")[0].get("value") + cwe = None + if 'vulnerabilityDetails' in result: + cwe = result.get("vulnerabilites").get("cweId") + severity = result.get("severity") + locations_uri = result.get("location").get("file") + locations_startLine = result.get("location").get("start_line") + locations_endLine = result.get("location").get("end_line") + finding = Finding( + unique_id_from_tool=id, + file_path=locations_uri, + title=id + "_" + locations_uri, + test=test, + cwe=cwe, + severity=severity, + description="**id**: " + str(id) + "\n" + + "**uri**: " + locations_uri + "\n" + + "**startLine**: " + str(locations_startLine) + "\n" + + "**endLine**: " + str(locations_endLine) + "\n", + false_p=False, + duplicate=False, + out_of_scope=False, + static_finding=True, + dynamic_finding=False, + ) + findings.append(finding) + elif "results" in data: + results = data.get("results", []) + for vulnerability in results: + result_type = vulnerability.get("type") + date = self._parse_date(vulnerability.get("firstFoundAt")) + cwe = None + if 'vulnerabilityDetails' in vulnerability: + cwe = vulnerability.get("vulnerabilites", {}).get("cweId") + if result_type == "sast": + descriptionDetails = vulnerability.get("description") + file_path = vulnerability.get("data").get("nodes")[0].get("fileName") + finding = Finding( + description=descriptionDetails, + title=descriptionDetails, + file_path=file_path, + date=date, + cwe=cwe, + severity=vulnerability.get("severity").title(), + test=test, + static_finding=True, + ) + if vulnerability.get("id"): + finding.unique_id_from_tool = ( + vulnerability.get("id") + ) + else: + finding.unique_id_from_tool = str( + vulnerability.get("similarityId") + ) + findings.append(finding) + if result_type == "kics": + description = vulnerability.get("description") + file_path = vulnerability.get("data").get("filename") + finding = Finding( + title=f'{description}', + description=description, + date=date, + cwe=cwe, + severity=vulnerability.get("severity").title(), + verified=vulnerability.get("state") != "TO_VERIFY", + file_path=file_path, + test=test, + static_finding=True, + ) + if vulnerability.get("id"): + finding.unique_id_from_tool = vulnerability.get( + "id" + ) + else: + finding.unique_id_from_tool = str( + vulnerability.get("similarityId") + ) + findings.append(finding) + return findings diff --git a/unittests/scans/checkmarx_one/checkmarx_one.json b/unittests/scans/checkmarx_one/checkmarx_one.json new file mode 100644 index 0000000000..a9e432abf6 --- /dev/null +++ b/unittests/scans/checkmarx_one/checkmarx_one.json @@ -0,0 +1,284 @@ +{ + "scan": { + "end_time": "2024-01-18T09:12:43", + "analyzer": { + "id": "CxOne-SAST", + "name": "Checkmarx", + "url": "https://checkmarx.com/", + "vendor": { + "name": "Checkmarx" + }, + "version": "2.0.63" + }, + "scanner": { + "id": "CxOne-SAST", + "name": "Checkmarx", + "vendor": { + "name": "Checkmarx" + }, + "version": "2.0.63" + }, + "start_time": "2024-01-18T09:12:43", + "status": "success", + "type": "sast" + }, + "schema": "https://gitlab.com/gitlab-org/gitlab/-/raw/master/lib/gitlab/ci/parsers/security/validators/schemas/15.0.0/sast-report-format.json", + "version": "15.0.0", + "vulnerabilities": [ + { + "id": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Constants.ts:450", + "category": "Checkmarx-sast", + "name": "Client_HTML5_Store_Sensitive_data_In_Web_Storage", + "message": "Client_HTML5_Store_Sensitive_data_In_Web_Storage@/src/helpers/Constants.ts:450", + "description": "The application stores data makeKey on the client, in an insecure manner, at line 115 of /src/helpers/Utility.ts.", + "cve": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Constants.ts:450", + "severity": "Medium", + "confidence": "Medium", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/4c5703d8-dddf-11ee-8275-bb5b871f4ca1/scans?id=56efc3de-dddf-11ee-91f7-17d54222fb10\u0026branch=release%2FRC-6", + "value": "511341974" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/src/helpers/Constants.ts", + "end_line": 451, + "start_line": 450 + } + ] + }, + "flags": [], + "location": { + "file": "/src/helpers/Constants.ts", + "start_line": 450, + "end_line": 451, + "class": "" + } + }, + { + "id": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Helper.ts:349", + "category": "Checkmarx-sast", + "name": "Client_HTML5_Store_Sensitive_data_In_Web_Storage", + "message": "Client_HTML5_Store_Sensitive_data_In_Web_Storage@/src/helpers/Helper.ts:349", + "description": "The application stores data Key on the client, in an insecure manner, at line 349 of /src/helpers/Helper.ts.", + "cve": "Client_HTML5_Store_Sensitive_data_In_Web_Storage:/src/helpers/Helper.ts:349", + "severity": "Medium", + "confidence": "Medium", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/7c649cf6-dde0-11ee-a703-43244b0a9879/scans?id=86fc33ea-dde0-11ee-ba5f-3beb4c589dd3\u0026branch=release%2FRC-6", + "value": "832413795" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/src/helpers/Helper.ts", + "end_line": 350, + "start_line": 339 + } + ] + }, + "flags": [], + "location": { + "file": "/src/helpers/Helper.ts", + "start_line": 349, + "end_line": 350, + "class": "" + } + }, + { + "id": "Use_Of_Hardcoded_Password:/src/pages/UserError_test.tsx:71", + "category": "Checkmarx-sast", + "name": "Use_Of_Hardcoded_Password", + "message": "Use_Of_Hardcoded_Password@/src/pages/UserError_test.tsx:71", + "description": "The application uses the hard-coded password \u0026#34;testPassword\u0026#34; for authentication purposes, either using it to verify users\u0026#39; identities, or to access another remote system. This password at line 71 of /src/pages/UserError_test.tsx appears in the code, implying it is accessible to anyone with source code access, and cannot be changed without rebuilding the application.\n\n", + "cve": "Use_Of_Hardcoded_Password:/src/pages/UserError_test.tsx:71", + "severity": "Low", + "confidence": "Low", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/53d5b99a-dde1-11ee-ab71-9be9755a4da6/scans?id=5e592014-dde1-11ee-8985-f37d989e23db\u0026branch=release%2FRC-6", + "value": "143486243" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/src/pages/UserError_test.tsx", + "end_line": 72, + "start_line": 71 + } + ] + }, + "flags": [], + "location": { + "file": "/src/pages/UserError_test.tsx", + "start_line": 71, + "end_line": 72, + "class": "" + } + }, + { + "id": "Client_Hardcoded_Domain:/public/index.html:32", + "category": "Checkmarx-sast", + "name": "Client_Hardcoded_Domain", + "message": "Client_Hardcoded_Domain@/public/index.html:32", + "description": "The JavaScript file imported in https://fonts.googleapis.com/icon?family=Material+Icons in /public/index.html at line 32 is from a remote domain, which may allow attackers to replace its contents with malicious code.", + "cve": "Client_Hardcoded_Domain:/public/index.html:32", + "severity": "Info", + "confidence": "Info", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/34480339-8f8c-4b68-b8fb-4eea09a2045d/scans?id=78adc5f1-0864-411e-b8d6-bfa134458bd8\u0026branch=release%2Fpilot-1", + "value": "2595392" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/public/index.html", + "end_line": 87, + "start_line": 32 + } + ] + }, + "flags": [], + "location": { + "file": "/public/index.html", + "start_line": 32, + "end_line": 87, + "class": "" + } + }, + { + "id": "Client_DOM_XSS:/src/app/App_test.tsx:744", + "category": "Checkmarx-sast", + "name": "Client_DOM_XSS", + "message": "Client_DOM_XSS@/src/app/App_test.tsx:744", + "description": "The method TrustMe embeds untrusted data in generated output with location, at line 298 of /src/app/App_test.tsx. This untrusted data is embedded into the output without proper sanitization or encoding, enabling an attacker to inject malicious code into the generated web-page.\n\n", + "cve": "Client_DOM_XSS:/src/app/App_test.tsx:744", + "severity": "Info", + "confidence": "Info", + "solution": "", + "scanner": { + "id": "Checkmarx-sast", + "name": "Checkmarx-sast", + "vendor": { + "name": "" + }, + "version": "" + }, + "identifiers": [ + { + "type": "cxOneScan", + "name": "CxOne Scan", + "url": "https://ast.checkmarx.net/projects/38ebbafc-dde2-11ee-ae0c-b72e7e0d42ae/scans?id=42ff549a-dde2-11ee-8c8c-83e0db45059d\u0026branch=release%2FRC-6", + "value": "836714351" + } + ], + "links": [], + "tracking": { + "type": "source", + "items": [ + { + "signatures": [ + { + "algorithm": "sast-Algorithm ", + "value": "NA" + } + ], + "file": "/src/app/App_test.tsx", + "end_line": 746, + "start_line": 744 + } + ] + }, + "flags": [], + "location": { + "file": "/src/app/App_test.tsx", + "start_line": 744, + "end_line": 746, + "class": "" + } + } + ] +} \ No newline at end of file diff --git a/unittests/scans/checkmarx_one/many_findings.json b/unittests/scans/checkmarx_one/many_findings.json new file mode 100644 index 0000000000..13a030e2e3 --- /dev/null +++ b/unittests/scans/checkmarx_one/many_findings.json @@ -0,0 +1,258 @@ +{ + "results": [ + { + "type": "kics", + "label": "IaC Security", + "id": "98727183", + "similarityId": "fbed62efe2786d647806451d0480f57b4bc08786633fb73c29579faee8f9d252", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "HIGH", + "created": "2023-11-21T10:07:38Z", + "firstFoundAt": "2022-12-26T09:31:48Z", + "foundAt": "2023-11-21T10:07:38Z", + "firstScanId": "79cd6248-ddcc-11ee-80c3-c34e822ea27f", + "description": "A user should be specified in the dockerfile, otherwise the image will run as root", + "descriptionHTML": "\u003cp\u003eA user should be specified in the dockerfile, otherwise the image will run as root\u003c/p\u003e\n", + "data": { + "queryId": "94d39580-ddcc-11ee-b570-27d2d85c4cb8 [Taken from query_id]", + "queryName": "Missing User Instruction", + "group": "Build Process [Taken from category]", + "line": 1, + "platform": "Dockerfile", + "issueType": "MissingAttribute", + "expectedValue": "The 'Dockerfile' should contain the 'USER' instruction", + "value": "The 'Dockerfile' does not contain any 'USER' instruction", + "filename": "/qe/testharness/Dockerfile" + }, + "comments": {}, + "vulnerabilityDetails": { + "cvss": {} + } + }, + { + "type": "kics", + "label": "IaC Security", + "id": "28307228", + "similarityId": "e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "HIGH", + "created": "2023-11-21T10:07:38Z", + "firstFoundAt": "2022-12-26T09:31:48Z", + "foundAt": "2023-11-21T10:07:38Z", + "firstScanId": "811759c2-ddd7-11ee-9b56-d34cc93fb257", + "description": "A user should be specified in the dockerfile, otherwise the image will run as root", + "descriptionHTML": "\u003cp\u003eA user should be specified in the dockerfile, otherwise the image will run as root\u003c/p\u003e\n", + "data": { + "queryId": "5d2efac8-ddd8-11ee-9117-b34a238abecc [Taken from query_id]", + "queryName": "Missing User Instruction", + "group": "Build Process [Taken from category]", + "line": 1, + "platform": "Dockerfile", + "issueType": "MissingAttribute", + "expectedValue": "The 'Dockerfile' should contain the 'USER' instruction", + "value": "The 'Dockerfile' does not contain any 'USER' instruction", + "filename": "/qe/testharness/Dockerfile" + }, + "comments": {}, + "vulnerabilityDetails": { + "cvss": {} + } + }, + { + "type": "sast", + "label": "sast", + "id": "04894977", + "similarityId": "697307927", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "MEDIUM", + "created": "2023-11-21T09:16:10Z", + "firstFoundAt": "2022-03-17T14:45:41Z", + "foundAt": "2023-11-21T09:16:10Z", + "firstScanId": "9d120bda-ddd8-11ee-bd4c-8b5b82bf6c89", + "description": "Method getObject at line 96 of /shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java sends user information outside the application. This may constitute a Privacy Violation.\n\n", + "descriptionHTML": "\u003cp\u003eMethod getObject at line 96 of /shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java sends user information outside the application. This may constitute a Privacy Violation.\u003c/p\u003e\n", + "data": { + "queryId": 12956636075206043460, + "queryName": "Privacy_Violation", + "group": "Java_Medium_Threat", + "resultHash": "2417044825981779912395719508", + "languageName": "Java", + "nodes": [ + { + "id": "9823731082518796021644390089", + "line": 96, + "name": "secret", + "column": 48, + "length": 12, + "method": "getObject", + "nodeID": 55222, + "domType": "ParamDecl", + "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", + "fullName": "com.example.api.clients.ObjectsManagerUtil.getObject.secret", + "typeName": "String", + "methodLine": 96, + "definitions": "1" + }, + { + "id": "ahpeiL2gaeboi8aqueiv8liekah=", + "line": 48, + "name": "secret", + "column": 71, + "length": 12, + "method": "getObject", + "nodeID": 55222, + "domType": "UnknownReference", + "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", + "fullName": "com.example.api.clients.ObjectsManagerUtil.getObject.secret", + "typeName": "String", + "methodLine": 76, + "definitions": "1" + }, + { + "id": "Aewo6hui2ek5guNgaesie4ioPha=", + "line": 56, + "name": "error", + "column": 27, + "length": 12, + "method": "getObject", + "nodeID": 55222, + "domType": "MethodInvokeExpr", + "fileName": "/shared/src/main/java/com/example/api/clients/ObjectsManagerUtil.java", + "fullName": "com.example.api.clients.ObjectsManagerUtil.log.error", + "typeName": "error", + "methodLine": 96, + "definitions": "0" + } + ] + }, + "comments": {}, + "vulnerabilityDetails": { + "cweId": 359, + "cvss": {}, + "compliances": [ + "FISMA 2014", + "NIST SP 800-53", + "OWASP Top 10 2013", + "OWASP Top 10 2017", + "OWASP Top 10 2021", + "PCI DSS v3.2.1", + "ASD STIG 4.10" + ] + } + }, + { + "type": "kics", + "label": "IaC Security", + "id": "9930754", + "similarityId": "df0b5ce1f88f1af07e63731e0a9628920a008ea0ca4bbd117d75a3cdbdd283ff", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "MEDIUM", + "created": "2023-11-21T10:07:38Z", + "firstFoundAt": "2022-08-01T08:30:25Z", + "foundAt": "2023-11-21T10:07:38Z", + "firstScanId": "eff24b42-ddda-11ee-9e73-83b44de11797", + "description": "Incoming container traffic should be bound to a specific host interface", + "descriptionHTML": "\u003cp\u003eIncoming container traffic should be bound to a specific host interface\u003c/p\u003e\n", + "data": { + "queryId": "fd070ec6-ddda-11ee-a521-73cad7abf17a [Taken from query_id]", + "queryName": "Container Traffic Not Bound To Host Interface", + "group": "Networking and Firewall [Taken from category]", + "line": 16, + "platform": "DockerCompose", + "issueType": "IncorrectValue", + "expectedValue": "Docker compose file to have 'ports' attribute bound to a specific host interface.", + "value": "Docker compose file doesn't have 'ports' attribute bound to a specific host interface", + "filename": "/qe/integration/docker-compose.yml" + }, + "comments": {}, + "vulnerabilityDetails": { + "cvss": {} + } + }, + { + "type": "sast", + "label": "sast", + "id": "47966330", + "similarityId": "2994069268", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "LOW", + "created": "2023-11-21T09:16:10Z", + "firstFoundAt": "2023-02-09T09:32:55Z", + "foundAt": "2023-11-21T09:16:10Z", + "firstScanId": "4f9f7b28-dddb-11ee-b736-53a846e9935e", + "description": "Method getClient at line 43 of /qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java defines testPassword, which is designated to contain user passwords. However, while plaintext passwords are later assigned to testPassword, this variable is never cleared from memory.\n\n", + "descriptionHTML": "\u003cp\u003eMethod getClient at line 43 of /qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java defines testPassword, which is designated to contain user passwords. However, while plaintext passwords are later assigned to testPassword, this variable is never cleared from memory.\u003c/p\u003e\n", + "data": { + "queryId": 7846472296093057013, + "queryName": "Heap_Inspection", + "group": "Java_Low_Visibility", + "resultHash": "oochiuquiede0IeVeijaWooTieh=", + "languageName": "Java", + "nodes": [ + { + "id": "Oec6Nie9ool0too4chieNoh5zoo=", + "line": 84, + "name": "testPassword", + "column": 18, + "length": 12, + "method": "getClient", + "nodeID": 6459, + "domType": "Declarator", + "fileName": "/qe/integration-tests/src/java/com/example/api/integrationtests/utils/IntegratHelper.java", + "fullName": "com.example.api.integrationtests.utils.IntegratHelper.getClient.testPassword", + "typeName": "char", + "methodLine": 35, + "definitions": "1" + } + ] + }, + "comments": {}, + "vulnerabilityDetails": { + "cweId": 244, + "cvss": {}, + "compliances": [ + "OWASP Top 10 2013", + "OWASP Top 10 2021", + "ASD STIG 4.10" + ] + } + }, + { + "type": "kics", + "label": "IaC Security", + "id": "87775678", + "similarityId": "d2b3d5c205f6e52f7588c4ecab08caec2a9d53dc2ded74e1fffd9f2ebf3fa203", + "status": "RECURRENT", + "state": "TO_VERIFY", + "severity": "LOW", + "created": "2023-11-21T10:07:38Z", + "firstFoundAt": "2023-01-05T09:31:43Z", + "foundAt": "2023-11-21T10:07:38Z", + "firstScanId": "82a21764-dddc-11ee-9364-1f3a853093bf", + "description": "Ensure that HEALTHCHECK is being used. The HEALTHCHECK instruction tells Docker how to test a container to check that it is still working", + "descriptionHTML": "\u003cp\u003eEnsure that HEALTHCHECK is being used. The HEALTHCHECK instruction tells Docker how to test a container to check that it is still working\u003c/p\u003e\n", + "data": { + "queryId": "90b50eba-dddc-11ee-acec-cf20c0abdb94 [Taken from query_id]", + "queryName": "Healthcheck Instruction Missing", + "group": "Insecure Configurations [Taken from category]", + "line": 1, + "platform": "Dockerfile", + "issueType": "MissingAttribute", + "expectedValue": "Dockerfile should contain instruction 'HEALTHCHECK'", + "value": "Dockerfile doesn't contain instruction 'HEALTHCHECK'", + "filename": "/qe/unitests/Dockerfile" + }, + "comments": {}, + "vulnerabilityDetails": { + "cvss": {} + } + } + ], + "totalCount": 6, + "scanID": "fc1ab89e-ddc8-11ee-96d4-97cff7d4e776" +} \ No newline at end of file diff --git a/unittests/scans/checkmarx_one/no_findings.json b/unittests/scans/checkmarx_one/no_findings.json new file mode 100644 index 0000000000..c526fa4dc0 --- /dev/null +++ b/unittests/scans/checkmarx_one/no_findings.json @@ -0,0 +1,6 @@ +{ + "results": [ + ], + "totalCount": 0, + "scanID": "4fc677bc-dddd-11ee-8004-6fd4f0411f73" +} \ No newline at end of file diff --git a/unittests/tools/test_checkmarx_one_parser.py b/unittests/tools/test_checkmarx_one_parser.py new file mode 100644 index 0000000000..31d6fdbed5 --- /dev/null +++ b/unittests/tools/test_checkmarx_one_parser.py @@ -0,0 +1,47 @@ +from dojo.models import Test +from dojo.tools.checkmarx_one.parser import CheckmarxOneParser +from ..dojo_test_case import DojoTestCase + + +class TestCheckmarxOneParser(DojoTestCase): + + def test_checkmarx_one_many_vulns(self): + with open("unittests/scans/checkmarx_one/checkmarx_one.json") as testfile: + parser = CheckmarxOneParser() + findings = parser.get_findings(testfile, Test()) + testfile.close() + self.assertEqual(5, len(findings)) + with self.subTest(i=0): + for finding in findings: + self.assertIsNotNone(finding.unique_id_from_tool) + self.assertIsNotNone(finding.title) + self.assertIsNotNone(finding.test) + self.assertIsNotNone(finding.date) + self.assertIsNotNone(finding.severity) + self.assertIsNotNone(finding.description) + finding_test = findings[0] + self.assertEqual("Medium", finding_test.severity) + self.assertEqual("/src/helpers/Constants.ts", finding_test.file_path) + + def test_checkmarx_one_many_findings(self): + with open("unittests/scans/checkmarx_one/many_findings.json") as testfile: + parser = CheckmarxOneParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(6, len(findings)) + with self.subTest(i=0): + for finding in findings: + self.assertIsNotNone(finding.unique_id_from_tool) + self.assertIsNotNone(finding.title) + self.assertIsNotNone(finding.test) + self.assertIsNotNone(finding.date) + self.assertIsNotNone(finding.severity) + self.assertIsNotNone(finding.description) + finding_test = findings[0] + self.assertEqual("High", finding_test.severity) + self.assertEqual("/qe/testharness/Dockerfile", finding_test.file_path) + + def test_checkmarx_one_no_findings(self): + with open("unittests/scans/checkmarx_one/no_findings.json") as testfile: + parser = CheckmarxOneParser() + findings = parser.get_findings(testfile, Test()) + self.assertEqual(0, len(findings)) From e9bd0a853a3c118c9a156085c282c188bf8be0a2 Mon Sep 17 00:00:00 2001 From: Cody Maffucci <46459665+Maffooch@users.noreply.github.com> Date: Mon, 11 Mar 2024 15:56:29 -0500 Subject: [PATCH 09/11] Helm: Remove PSQLHA Test (#9716) This test will no longer be supported per https://github.com/DefectDojo/django-DefectDojo/discussions/9690 [sc-4779] --- .github/workflows/k8s-tests.yml | 12 ------------ 1 file changed, 12 deletions(-) diff --git a/.github/workflows/k8s-tests.yml b/.github/workflows/k8s-tests.yml index f5ec107d83..dd34b88d76 100644 --- a/.github/workflows/k8s-tests.yml +++ b/.github/workflows/k8s-tests.yml @@ -29,14 +29,6 @@ env: --set mysql.enabled=false \ --set createPostgresqlSecret=true \ " - HELM_PGHA_DATABASE_SETTINGS: " \ - --set database=postgresqlha \ - --set postgresql.enabled=false \ - --set mysql.enabled=false \ - --set postgresqlha.enabled=true \ - --set createPostgresqlHaSecret=true \ - --set createPostgresqlHaPgpoolSecret=true \ - " jobs: setting_minikube_cluster: name: Kubernetes Deployment @@ -64,10 +56,6 @@ jobs: brokers: redis k8s: 'v1.23.9' os: debian - - databases: pgsqlha - brokers: rabbit - k8s: 'v1.23.9' - os: debian - databases: pgsql brokers: rabbit k8s: 'v1.23.9' From 361bf02156bfcb6a8cd7b820cd506440534fd8ee Mon Sep 17 00:00:00 2001 From: Paul Osinski <42211303+paulOsinski@users.noreply.github.com> Date: Mon, 11 Mar 2024 16:57:06 -0400 Subject: [PATCH 10/11] fix
tag on django.po (#9718) --- dojo/locale/en/LC_MESSAGES/django.po | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/dojo/locale/en/LC_MESSAGES/django.po b/dojo/locale/en/LC_MESSAGES/django.po index ab26c8cbdb..92e365e334 100644 --- a/dojo/locale/en/LC_MESSAGES/django.po +++ b/dojo/locale/en/LC_MESSAGES/django.po @@ -3748,7 +3748,7 @@ msgid "" "tags, references, languages or technologies contain the search query and " "products whose\n" " name, tags or description contain the " -"search query.
Advanced search operators: (Restrict results to a certain " +"search query.
Advanced search operators: (Restrict results to a certain " "type) product:,\n" " engagement:, finding:, endpoint:, tag:, " "language:, technology: or vulnerability_id:.\n" From 09922394f7cf5736b800b6b7198b63458bece18a Mon Sep 17 00:00:00 2001 From: DefectDojo release bot Date: Mon, 11 Mar 2024 21:01:10 +0000 Subject: [PATCH 11/11] Update versions in application files --- components/package.json | 2 +- dojo/__init__.py | 2 +- helm/defectdojo/Chart.yaml | 4 ++-- 3 files changed, 4 insertions(+), 4 deletions(-) diff --git a/components/package.json b/components/package.json index 5528766c57..283ee0dcd1 100644 --- a/components/package.json +++ b/components/package.json @@ -1,6 +1,6 @@ { "name": "defectdojo", - "version": "2.33.0-dev", + "version": "3.32.1", "license" : "BSD-3-Clause", "private": true, "dependencies": { diff --git a/dojo/__init__.py b/dojo/__init__.py index 61db2f0d7a..54f0f18473 100644 --- a/dojo/__init__.py +++ b/dojo/__init__.py @@ -4,6 +4,6 @@ # Django starts so that shared_task will use this app. from .celery import app as celery_app # noqa: F401 -__version__ = '2.33.0-dev' +__version__ = '3.32.1' __url__ = 'https://github.com/DefectDojo/django-DefectDojo' __docs__ = 'https://documentation.defectdojo.com' diff --git a/helm/defectdojo/Chart.yaml b/helm/defectdojo/Chart.yaml index e5eb9b0e92..3dd58e1c92 100644 --- a/helm/defectdojo/Chart.yaml +++ b/helm/defectdojo/Chart.yaml @@ -1,8 +1,8 @@ apiVersion: v2 -appVersion: "2.33.0-dev" +appVersion: "3.32.1" description: A Helm chart for Kubernetes to install DefectDojo name: defectdojo -version: 1.6.115-dev +version: 1.6.115 icon: https://www.defectdojo.org/img/favicon.ico maintainers: - name: madchap