-
Notifications
You must be signed in to change notification settings - Fork 6
/
Copy pathAsset_History.py
158 lines (126 loc) · 5.9 KB
/
Asset_History.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
81
82
83
84
85
86
87
88
89
90
91
92
93
94
95
96
97
98
99
100
101
102
103
104
105
106
107
108
109
110
111
112
113
114
115
116
117
118
119
120
121
122
123
124
125
126
127
128
129
130
131
132
133
134
135
136
137
138
139
140
141
142
143
144
145
146
147
148
149
150
151
152
153
154
155
156
157
158
# Asset Discover: Burp Suite Extension to find related assets (domain, IP, S3 Buckets etc.) from a webpage. #AssetDiscovery
# By: RedHunt Labs (www.redhuntlabs.com)
# Twitter: https://twitter.com/redhuntlabs
# Code Credits:
# OpenSecurityResearch CustomPassiveScanner: https://github.com/OpenSecurityResearch/CustomPassiveScanner
# PortSwigger example-scanner-checks: https://github.com/PortSwigger/example-scanner-checks
from burp import IBurpExtender
from burp import IScannerCheck
from burp import IScanIssue
from array import array
import re
import urllib2
import json
# Implement BurpExtender to inherit from multiple base classes
# IBurpExtender is the base class required for all extensions
# IScannerCheck lets us register our extension with Burp as a custom scanner check
class BurpExtender(IBurpExtender, IScannerCheck):
scopedomains = []
# The only method of the IBurpExtender interface.
# This method is invoked when the extension is loaded and registers
# an instance of the IBurpExtenderCallbacks interface
def registerExtenderCallbacks(self, callbacks):
# Put the callbacks parameter into a class variable so we have class-level scope
self._callbacks = callbacks
# Set the name of our extension, which will appear in the Extender tool when loaded
self._callbacks.setExtensionName("Asset History")
# Register our extension as a custom scanner check, so Burp will use this extension
# to perform active or passive scanning and report on scan issues returned
self._callbacks.registerScannerCheck(self)
return
# This method is called when multiple issues are reported for the same URL
# In this case we are checking if the issue detail is different, as the
# issues from our scans include affected parameters/values in the detail,
# which we will want to report as unique issue instances
def consolidateDuplicateIssues(self, existingIssue, newIssue):
if (existingIssue.getIssueDetail() == newIssue.getIssueDetail()):
return -1
else:
return 0
# Implement the doPassiveScan method of IScannerCheck interface
# Burp Scanner invokes this method for each base request/response that is passively scanned.
def doPassiveScan(self, baseRequestResponse):
# Local variables used to store a list of ScanIssue objects
scan_issues = []
tmp_issues = []
# Call the findRegEx method of our CustomScans object to check
# the response for anything matching a specified regular expression
# This one matches an IP
issuename = "Asset History: URL"
issuelevel = "Information"
issuedetail = "Historic URLs Discovered: <b>$asset$</b>"
# Get an instance of IHelpers, which has lots of useful methods, as a class
# variable, so we have class-level scope to all the helper methods
self._helpers = self._callbacks.getHelpers()
self._requestResponse = baseRequestResponse
# Finally, per the interface contract, doPassiveScan needs to return a
# list of scan issues, if any, and None otherwise
if self._callbacks.isInScope(self._helpers.analyzeRequest(self._requestResponse).getUrl()):
url = self._helpers.analyzeRequest(self._requestResponse).getUrl()
domain = str(url).split("//")[-1].split(":")[0].split('?')[0]
global scopedomains
scopedomains = self.scopedomains
print scopedomains
if domain not in scopedomains:
print "Target Domain: "+domain
url = "http://web.archive.org/cdx/search/cdx?url=/"+domain+"/*&output=json"
print url
try:
scopedomains.append(domain)
webarchive = urllib2.urlopen(url)
webarchivejson = json.load(webarchive)
print webarchivejson
urls = []
for wa in webarchivejson:
urls.append(wa[2])
if urls:
urls.pop(0)
uniqueurl = '<li>'+'</li>\r\n<li>'.join(set(urls))
print "URLs Discovered:"
for url in set(urls):
print url
# Create a ScanIssue object and append it to our list of issues, marking
# the matched value in the response.
if uniqueurl:
scan_issues.append(ScanIssue(self._requestResponse.getHttpService(),
self._helpers.analyzeRequest(self._requestResponse).getUrl(),
[self._callbacks.applyMarkers(self._requestResponse, None, None)],
issuename, issuelevel, issuedetail.replace("$asset$", uniqueurl)))
except:
print("Exception Occured")
scopedomains.pop()
if len(scan_issues) > 0:
return scan_issues
else:
return None
# Implementation of the IScanIssue interface with simple constructor and getter methods
class ScanIssue(IScanIssue):
def __init__(self, httpservice, url, requestresponsearray, name, severity, detailmsg):
self._url = url
self._httpservice = httpservice
self._requestresponsearray = requestresponsearray
self._name = name
self._severity = severity
self._detailmsg = detailmsg
def getUrl(self):
return self._url
def getHttpMessages(self):
return self._requestresponsearray
def getHttpService(self):
return self._httpservice
def getRemediationDetail(self):
return None
def getIssueDetail(self):
return self._detailmsg
def getIssueBackground(self):
return None
def getRemediationBackground(self):
return None
def getIssueType(self):
return 0
def getIssueName(self):
return self._name
def getSeverity(self):
return self._severity
def getConfidence(self):
return "Tentative"