diff --git a/.gitignore b/.gitignore index 4fa592139..913ec0a71 100644 --- a/.gitignore +++ b/.gitignore @@ -11,3 +11,5 @@ __pycache__/ db/test.txt default.conf + +.ropeproject/ \ No newline at end of file diff --git a/CONTRIBUTORS.md b/CONTRIBUTORS.md index 83b5da1f0..8c0501c3f 100644 --- a/CONTRIBUTORS.md +++ b/CONTRIBUTORS.md @@ -60,6 +60,7 @@ - [Kyle Nweeia](https://github.com/kyle-nweeia) - [Xib3rR4dAr](https://github.com/Xib3rR4dAr) - [Rohit Soni](https://github.com/StreetOfHackerR007/) +- [Maxime Peim](https://github.com/maxime-peim) - [Christian Clauss](https://github.com/cclauss) Special thanks for all the people who had helped dirsearch so far! diff --git a/README.md b/README.md index 1faa5c2da..09f3b3caa 100644 --- a/README.md +++ b/README.md @@ -36,8 +36,9 @@ Table of Contents * [Proxies](#Proxies) * [Reports](#Reports) * [Some others commands](#Some-others-commands) -* [Tips](#Tips) * [Support Docker](#Support-Docker) +* [References](#References) +* [Tips](#Tips) * [License](#License) * [Contributors](#Contributors) @@ -52,7 +53,7 @@ Kali Linux Installation & Usage ------------ -**Requirement: python 3.x** +**Requirement: python 3.8 or higher** Choose one of these installation options: @@ -79,7 +80,7 @@ Wordlists (IMPORTANT) --------------- **Summary:** - Wordlist is a text file, each line is a path. - - About extensions, unlike other tools, dirsearch will only replace the `%EXT%` keyword with extensions in **-e | --extensions** flag. + - About extensions, unlike other tools, dirsearch will only replace the `%EXT%` keyword with extensions in **-e** flag. - For wordlists without `%EXT%` (like [SecLists](https://github.com/danielmiessler/SecLists)), **-f | --force-extensions** switch is required to append extensions to every word in wordlist, as well as the `/`. And for entries in wordlist that you do not want to append extensions, you can add `%NOFORCE%` at the end of them. - To use multiple wordlists, you can separate your wordlists with commas. Example: `wordlist1.txt,wordlist2.txt`. @@ -131,8 +132,8 @@ Options: Target URL list file --stdin Target URL list from STDIN --cidr=CIDR Target CIDR - --raw=FILE File contains the raw request (use `--scheme` flag to - set the scheme) + --raw=FILE Load raw HTTP request from file (use `--scheme` flag + to set the scheme) -e EXTENSIONS, --extensions=EXTENSIONS Extension list separated by commas (Example: php,asp) -X EXTENSIONS, --exclude-extensions=EXTENSIONS @@ -165,6 +166,10 @@ Options: -t THREADS, --threads=THREADS Number of threads -r, --recursive Brute-force recursively + --deep-recursive Perform recursive scan on every directory depth + (Example: api/users -> api/) + --force-recursive Do recursive brute-force for every found path, not + only paths end with slash --recursion-depth=DEPTH Maximum recursion depth --recursion-status=CODES @@ -218,8 +223,11 @@ Options: -F, --follow-redirects Follow HTTP redirects --random-agent Choose a random User-Agent for each request + --auth-type=TYPE Authentication type (basic, digest, bearer, ntlm) + --auth=CREDENTIAL Authentication credential (user:password or bearer + token) --user-agent=USERAGENT - --cookie=COOKIE + --cookie=COOKIE Connection Settings: --timeout=TIMEOUT Connection timeout @@ -232,8 +240,7 @@ Options: Proxy to replay with found paths --scheme=SCHEME Default scheme (for raw request or if there is no scheme in the URL) - --max-rate=REQUESTS - Max requests per second + --max-rate=RATE Max requests per second --retries=RETRIES Number of retries for failed requests -b, --request-by-hostname By default dirsearch requests by IP for speed. This @@ -242,7 +249,8 @@ Options: --exit-on-error Exit whenever an error occurs Reports: - -o FILE Output file + -o FILE, --output=FILE + Output file --format=FORMAT Report format (Available: simple, plain, json, xml, md, csv, html) ``` @@ -272,7 +280,6 @@ recursion-depth = 0 exclude-subdirs = %%ff/ random-user-agents = False max-time = 0 -save-logs-home = False full-url = False quiet-mode = False color = True @@ -286,9 +293,11 @@ recursion-status = 200-399,401,403 # skip-on-status = 429,999 [reports] -# report-output = output.txt report-format = plain -## Support: plain, simple, json, xml, md, csv +autosave-report = True +# report-output-folder = /home/user +# logs-location = /tmp +## Supported: plain, simple, json, xml, md, csv, html [dictionary] lowercase = False @@ -584,15 +593,6 @@ python3 dirsearch.py -u https://target --remove-extensions **There are more features and you will need to discover them by your self** -Tips ---------------- -- The server has a request limit? That's bad, but feel free to bypass it, by randomizing proxy with `--proxy-list` -- Want to find out config files or backups? Try `--suffixes ~` and `--prefixes .` -- For some endpoints that you do not want to force extensions, add `%NOFORCE%` at the end of them -- Want to find only folders/directories? Combine `--remove-extensions` and `--suffixes /`! -- The combination of `--cidr`, `-F`, `-q` and a low `--timeout` will reduce most of the noise + false negatives when brute-forcing with a CIDR -- Scan a list of URLs, but don't want to see a 429 flood? Use `--skip-on-status` + `429` will help you to skip a target whenever it returns 429 - Support Docker --------------- ### Install Docker Linux @@ -620,6 +620,29 @@ docker run -it --rm "dirsearch:v0.4.1" -u target -e php,html,js,zip ``` +References +--------------- +- [Comprehensive Guide on Dirsearch](https://www.hackingarticles.in/comprehensive-guide-on-dirsearch/) by Shubham Sharma +- [Comprehensive Guide on Dirsearch Part 2](https://www.hackingarticles.in/comprehensive-guide-on-dirsearch-part-2/) by Shubham Sharma +- [GUÍA COMPLETA SOBRE EL USO DE DIRSEARCH](https://esgeeks.com/guia-completa-uso-dirsearch/?feed_id=5703&_unique_id=6076249cc271f) by ESGEEKS +- [How to use Dirsearch to detect web directories](https://www.ehacking.net/2020/01/how-to-find-hidden-web-directories-using-dirsearch.html) by EHacking +- [dirsearch how to](https://vk9-sec.com/dirsearch-how-to/) by VK9 Security +- [Find Hidden Web Directories with Dirsearch](https://null-byte.wonderhowto.com/how-to/find-hidden-web-directories-with-dirsearch-0201615/) by Wonder How To +- [Brute force directories and files in webservers using dirsearch](https://upadhyayraj.medium.com/brute-force-directories-and-files-in-webservers-using-dirsearch-613e4a7fa8d5) by Raj Upadhyay +- [Live Bug Bounty Recon Session on Yahoo (Amass, crts.sh, dirsearch) w/ @TheDawgyg](https://www.youtube.com/watch?v=u4dUnJ1U0T4) by Nahamsec +- [Dirsearch to find Hidden Web Directories](https://medium.com/@irfaanshakeel/dirsearch-to-find-hidden-web-directories-d0357fbe47b0) by Irfan Shakeel +- [Getting access to 25000 employees details](https://medium.com/@ehsahil/getting-access-to-25k-employees-details-c085d18b73f0) by Sahil Ahamad + +Tips +--------------- +- The server has a request limit? That's bad, but feel free to bypass it, by randomizing proxy with `--proxy-list` +- Want to find out config files or backups? Try `--suffixes ~` and `--prefixes .` +- For some endpoints that you do not want to force extensions, add `%NOFORCE%` at the end of them +- Want to find only folders/directories? Combine `--remove-extensions` and `--suffixes /`! +- The combination of `--cidr`, `-F`, `-q` and a low `--timeout` will reduce most of the noise + false negatives when brute-forcing with a CIDR +- Scan a list of URLs, but don't want to see a 429 flood? Use `--skip-on-status` + `429` will help you to skip a target whenever it returns 429 + + License --------------- Copyright (C) Mauro Soria (maurosoria@gmail.com) diff --git a/lib/connection/requester.py b/lib/connection/requester.py index 9a19006f9..4b5106368 100755 --- a/lib/connection/requester.py +++ b/lib/connection/requester.py @@ -36,14 +36,14 @@ class Requester(object): def __init__( self, url, - maxPool=1, - maxRetries=5, + max_pool=1, + max_retries=5, timeout=20, ip=None, proxy=None, proxylist=None, redirect=False, - requestByHostname=False, + request_by_hostname=False, httpmethod="get", data=None, scheme=None, @@ -52,10 +52,6 @@ def __init__( self.data = data self.headers = {} - # If no backslash, append one - if not url.endswith("/"): - url += "/" - parsed = urllib.parse.urlparse(url) # If no protocol specified, set http by default @@ -67,19 +63,19 @@ def __init__( raise RequestException({"message": "Unsupported URL scheme: {0}".format(parsed.scheme)}) if parsed.path.startswith("/"): - self.basePath = parsed.path[1:] + self.base_path = parsed.path[1:] else: - self.basePath = parsed.path + self.base_path = parsed.path - # Safe quote all special characters in basePath to prevent from being encoded when performing requests - self.basePath = urllib.parse.quote(self.basePath, safe="!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") + # Safe quote all special characters in base_path to prevent from being encoded when performing requests + self.base_path = urllib.parse.quote(self.base_path, safe="!\"#$%&'()*+,-./:;<=>?@[\\]^_`{|}~") self.protocol = parsed.scheme self.host = parsed.netloc.split(":")[0] # Resolve DNS to decrease overhead if ip: self.ip = ip - # A proxy could have a different DNS that would resolve the name. Therefore, + # A proxy could have a different DNS that would resolve the name. ThereFore. # resolving the name when using proxy to raise an error is pointless elif not proxy and not proxylist: try: @@ -106,37 +102,37 @@ def __init__( ): self.headers["Host"] += ":{0}".format(self.port) - self.maxRetries = maxRetries - self.maxPool = maxPool + self.max_retries = max_retries + self.max_pool = max_pool self.timeout = timeout self.pool = None self.proxy = proxy self.proxylist = proxylist self.redirect = redirect - self.randomAgents = None + self.random_agents = None self.auth = None - self.requestByHostname = requestByHostname + self.request_by_hostname = request_by_hostname self.session = requests.Session() self.url = "{0}://{1}:{2}/".format( self.protocol, - self.host if self.requestByHostname else self.ip, + self.host if self.request_by_hostname else self.ip, self.port, ) - self.baseUrl = "{0}://{1}:{2}/".format( + self.base_url = "{0}://{1}:{2}/".format( self.protocol, self.host, self.port, ) - def setHeader(self, key, value): + def set_header(self, key, value): self.headers[key.strip()] = value.strip() if value else value - def setRandomAgents(self, agents): - self.randomAgents = list(agents) + def set_random_agents(self, agents): + self.random_agents = list(agents) - def setAuth(self, type, credential): + def set_auth(self, type, credential): if type == "bearer": - self.setHeader("Authorization", "Bearer {0}".format(credential)) + self.set_header("Authorization", "Bearer {0}".format(credential)) else: user = credential.split(":")[0] try: @@ -155,7 +151,7 @@ def request(self, path, proxy=None): result = None error = None - for i in range(self.maxRetries): + for i in range(self.max_retries): try: if not proxy: if self.proxylist: @@ -176,10 +172,10 @@ def request(self, path, proxy=None): else: proxies = None - url = self.url + self.basePath + path + url = self.url + self.base_path + path - if self.randomAgents: - self.headers["User-Agent"] = random.choice(self.randomAgents) + if self.random_agents: + self.headers["User-Agent"] = random.choice(self.random_agents) request = requests.Request( self.httpmethod, @@ -189,6 +185,7 @@ def request(self, path, proxy=None): data=self.data, ) prepare = request.prepare() + prepare.url = url response = self.session.send( prepare, proxies=proxies, @@ -207,11 +204,11 @@ def request(self, path, proxy=None): break except requests.exceptions.SSLError: - self.url = self.baseUrl + self.url = self.base_url continue except requests.exceptions.TooManyRedirects: - error = "Too many redirects: {0}".format(self.baseUrl) + error = "Too many redirects: {0}".format(self.base_url) except requests.exceptions.ProxyError: error = "Error with the proxy: {0}".format(proxy) @@ -220,7 +217,7 @@ def request(self, path, proxy=None): error = "Cannot connect to: {0}:{1}".format(self.host, self.port) except requests.exceptions.InvalidURL: - error = "Invalid URL: {0}".format(self.baseUrl) + error = "Invalid URL: {0}".format(self.base_url) except requests.exceptions.InvalidProxyURL: error = "Invalid proxy URL: {0}".format(proxy) @@ -232,10 +229,10 @@ def request(self, path, proxy=None): http.client.IncompleteRead, socket.timeout, ): - error = "Request timeout: {0}".format(self.baseUrl) + error = "Request timeout: {0}".format(self.base_url) except Exception: - error = "There was a problem in the request to: {0}".format(self.baseUrl) + error = "There was a problem in the request to: {0}".format(self.base_url) if error: raise RequestException({"message": error}) diff --git a/lib/controller/controller.py b/lib/controller/controller.py index d08402b36..5efe556b9 100755 --- a/lib/controller/controller.py +++ b/lib/controller/controller.py @@ -21,7 +21,8 @@ import sys import time import re -import urllib.parse + +from urllib.parse import urljoin from threading import Lock from queue import Queue @@ -48,7 +49,7 @@ class EmptyReportManager(object): def __init__(self): pass - def updateReport(self, *args): + def update_report(self, *args): pass @@ -56,7 +57,7 @@ class EmptyReport(object): def __init__(self): pass - def addResult(self, *args): + def add_result(self, *args): pass @@ -74,11 +75,11 @@ def __init__(self, script_path, arguments, output): self.exit = False self.arguments = arguments self.output = output - self.doneDirs = [] + self.done_dirs = [] if arguments.raw_file: _raw = Raw(arguments.raw_file, arguments.scheme) - self.urlList = [_raw.url()] + self.url_list = [_raw.url()] self.httpmethod = _raw.method() self.data = _raw.data() self.headers = _raw.headers() @@ -91,49 +92,47 @@ def __init__(self, script_path, arguments, output): "Cache-Control": "max-age=0", } - self.urlList = list(filter(None, dict.fromkeys(arguments.urlList))) + self.url_list = list(filter(None, dict.fromkeys(arguments.url_list))) self.httpmethod = arguments.httpmethod.lower() self.data = arguments.data self.headers = {**default_headers, **arguments.headers} if arguments.cookie: self.headers["Cookie"] = arguments.cookie - if arguments.cookie: + if arguments.useragent: self.headers["User-Agent"] = arguments.useragent self.recursion_depth = arguments.recursion_depth - if arguments.logsLocation and self.validatePath(arguments.logsLocation): - self.logsPath = FileUtils.build_path(arguments.logsLocation) + if arguments.logs_location and self.validate_path(arguments.logs_location): + self.logs_path = FileUtils.build_path(arguments.logs_location) else: - self.logsPath = FileUtils.build_path(self.script_path, "logs") - if not FileUtils.exists(self.logsPath): - FileUtils.create_directory(self.logsPath) + self.logs_path = FileUtils.build_path(self.script_path, "logs") + if not FileUtils.exists(self.logs_path): + FileUtils.create_directory(self.logs_path) - if arguments.outputLocation and self.validatePath(arguments.outputLocation): - self.save_path = FileUtils.build_path(arguments.outputLocation) + if arguments.output_location and self.validate_path(arguments.output_location): + self.save_path = FileUtils.build_path(arguments.output_location) else: self.save_path = FileUtils.build_path(self.script_path, "reports") if not FileUtils.exists(self.save_path): FileUtils.create_directory(self.save_path) - self.blacklists = self.getBlacklists() - self.includeStatusCodes = arguments.includeStatusCodes - self.excludeStatusCodes = arguments.excludeStatusCodes - self.excludeSizes = arguments.excludeSizes - self.excludeTexts = arguments.excludeTexts - self.excludeRegexps = arguments.excludeRegexps - self.excludeRedirects = arguments.excludeRedirects + self.blacklists = self.get_blacklists() + self.include_status_codes = arguments.include_status_codes + self.exclude_status_codes = arguments.exclude_status_codes + self.exclude_sizes = arguments.exclude_sizes + self.exclude_texts = arguments.exclude_texts + self.exclude_regexps = arguments.exclude_regexps + self.exclude_redirects = arguments.exclude_redirects self.recursive = arguments.recursive self.deep_recursive = arguments.deep_recursive self.force_recursive = arguments.force_recursive - self.recursionStatusCodes = arguments.recursionStatusCodes - self.minimumResponseSize = arguments.minimumResponseSize - self.maximumResponseSize = arguments.maximumResponseSize + self.recursion_status_codes = arguments.recursion_status_codes + self.minimum_response_size = arguments.minimum_response_size + self.maximum_response_size = arguments.maximum_response_size self.maxtime = arguments.maxtime - self.scanSubdirs = arguments.scanSubdirs - self.excludeSubdirs = ( - arguments.excludeSubdirs if arguments.excludeSubdirs else [] - ) + self.scan_subdirs = arguments.scan_subdirs + self.exclude_subdirs = arguments.exclude_subdirs self.dictionary = Dictionary( paths=arguments.wordlist, @@ -143,106 +142,105 @@ def __init__(self, script_path, arguments, output): lowercase=arguments.lowercase, uppercase=arguments.uppercase, capitalization=arguments.capitalization, - forcedExtensions=arguments.forceExtensions, - excludeExtensions=arguments.excludeExtensions, - noExtension=arguments.noExtension, - onlySelected=arguments.onlySelected + forced_extensions=arguments.force_extensions, + exclude_extensions=arguments.exclude_extensions, + no_extension=arguments.no_extension, + only_selected=arguments.only_selected ) - self.allJobs = len(self.scanSubdirs) if self.scanSubdirs else 1 - self.currentJob = 0 - self.startTime = time.time() - self.errorLog = None - self.errorLogPath = None - self.threadsLock = Lock() + self.all_jobs = len(self.scan_subdirs) if self.scan_subdirs else 1 + self.current_job = 0 + self.start_time = time.time() + self.error_log = None + self.error_log_path = None + self.threads_lock = Lock() self.batch = False - self.batchSession = None + self.batch_session = None self.output.header(program_banner) - self.printConfig() + self.print_config() - if arguments.useRandomAgents: - self.randomAgents = FileUtils.get_lines( + if arguments.use_random_agents: + self.random_agents = FileUtils.get_lines( FileUtils.build_path(script_path, "db", "user-agents.txt") ) - self.reportManager = EmptyReportManager() + self.report_manager = EmptyReportManager() self.report = EmptyReport() - if arguments.autosaveReport or arguments.outputFile: - if len(self.urlList) > 1: - self.setupBatchReports() - self.setupReports() + if arguments.autosave_report or arguments.output_file: + if len(self.url_list) > 1: + self.setup_batch_reports() + self.setup_reports() - self.setupErrorLogs() - self.output.errorLogFile(self.errorLogPath) + self.setup_error_logs() + self.output.error_log_file(self.error_log_path) try: - for url in self.urlList: + for url in self.url_list: try: gc.collect() - self.currentUrl = url if url.endswith("/") else url + "/" - self.output.setTarget(self.currentUrl, self.arguments.scheme) + url = url if url.endswith("/") else url + "/" + self.output.set_target(url, self.arguments.scheme) try: self.requester = Requester( url, - maxPool=arguments.threadsCount, - maxRetries=arguments.maxRetries, + max_pool=arguments.threads_count, + max_retries=arguments.max_retries, timeout=arguments.timeout, ip=arguments.ip, proxy=arguments.proxy, proxylist=arguments.proxylist, redirect=arguments.redirect, - requestByHostname=arguments.requestByHostname, + request_by_hostname=arguments.request_by_hostname, httpmethod=self.httpmethod, data=self.data, scheme=arguments.scheme, ) - if arguments.autosaveReport or arguments.outputFile: - self.report = Report(self.requester.host, self.requester.port, self.requester.protocol, self.requester.basePath) - for key, value in self.headers.items(): - self.requester.setHeader(key, value) + self.requester.set_header(key, value) if arguments.auth: - self.requester.setAuth(arguments.auth_type, arguments.auth) + self.requester.set_auth(arguments.auth_type, arguments.auth) self.requester.request("") + if arguments.autosave_report or arguments.output_file: + self.report = Report(self.requester.host, self.requester.port, self.requester.protocol, self.requester.base_path) + except RequestException as e: self.output.error(e.args[0]["message"]) raise SkipTargetInterrupt - if arguments.useRandomAgents: - self.requester.setRandomAgents(self.randomAgents) + if arguments.use_random_agents: + self.requester.set_random_agents(self.random_agents) # Initialize directories Queue with start Path - self.basePath = self.requester.basePath + self.base_path = self.requester.base_path self.status_skip = None - if self.scanSubdirs: - for subdir in self.scanSubdirs: - self.directories.put(subdir) + for subdir in self.scan_subdirs: + self.directories.put(subdir) else: self.directories.put("") - matchCallbacks = [self.matchCallback] - notFoundCallbacks = [self.notFoundCallback] - errorCallbacks = [self.errorCallback, self.appendErrorLog] + match_callbacks = [self.match_callback] + not_found_callbacks = [self.not_found_callback] + error_callbacks = [self.error_callback, self.append_error_log] self.fuzzer = Fuzzer( self.requester, self.dictionary, suffixes=arguments.suffixes, prefixes=arguments.prefixes, - excludeContent=arguments.excludeContent, - threads=arguments.threadsCount, + exclude_content=arguments.exclude_content, + threads=arguments.threads_count, delay=arguments.delay, maxrate=arguments.maxrate, - matchCallbacks=matchCallbacks, - notFoundCallbacks=notFoundCallbacks, - errorCallbacks=errorCallbacks, + match_callbacks=match_callbacks, + not_found_callbacks=not_found_callbacks, + error_callbacks=error_callbacks, ) try: self.prepare() @@ -259,38 +257,44 @@ def __init__(self, script_path, arguments, output): exit(0) finally: - if not self.errorLog.closed: - self.errorLog.close() + if not self.error_log.closed: + self.error_log.close() self.output.warning("\nTask Completed") - def printConfig(self): + def print_config(self): self.output.config( ', '.join(self.arguments.extensions), ', '.join(self.arguments.prefixes), ', '.join(self.arguments.suffixes), - str(self.arguments.threadsCount), + str(self.arguments.threads_count), str(len(self.dictionary)), str(self.httpmethod), ) - def getBlacklists(self): + def is_timed_out(self): + if self.maxtime and time.time() - self.start_time > self.maxtime: + return True + + return False + + def get_blacklists(self): reext = re.compile(r'\%ext\%', re.IGNORECASE) blacklists = {} for status in [400, 403, 500]: - blacklistFileName = FileUtils.build_path(self.script_path, "db") - blacklistFileName = FileUtils.build_path( - blacklistFileName, "{}_blacklist.txt".format(status) + blacklist_file_name = FileUtils.build_path(self.script_path, "db") + blacklist_file_name = FileUtils.build_path( + blacklist_file_name, "{}_blacklist.txt".format(status) ) - if not FileUtils.can_read(blacklistFileName): + if not FileUtils.can_read(blacklist_file_name): # Skip if cannot read file continue blacklists[status] = [] - for line in FileUtils.get_lines(blacklistFileName): + for line in FileUtils.get_lines(blacklist_file_name): # Skip comments if line.lstrip().startswith("#"): continue @@ -313,88 +317,88 @@ def getBlacklists(self): return blacklists - def setupErrorLogs(self): - fileName = "errors-{0}.log".format(time.strftime("%y-%m-%d_%H-%M-%S")) - self.errorLogPath = FileUtils.build_path( - self.logsPath, fileName + def setup_error_logs(self): + file_name = "errors-{0}.log".format(time.strftime("%y-%m-%d_%H-%M-%S")) + self.error_log_path = FileUtils.build_path( + self.logs_path, file_name ) try: - self.errorLog = open(self.errorLogPath, "w") + self.error_log = open(self.error_log_path, "w") except PermissionError: self.output.error( "Couldn't create the error log. Try running again with highest permission" ) sys.exit(1) - def setupBatchReports(self): + def setup_batch_reports(self): self.batch = True - if not self.arguments.outputFile: - self.batchSession = "BATCH-{0}".format(time.strftime("%y-%m-%d_%H-%M-%S")) - self.batchDirectoryPath = FileUtils.build_path( - self.save_path, self.batchSession + if not self.arguments.output_file: + self.batch_session = "BATCH-{0}".format(time.strftime("%y-%m-%d_%H-%M-%S")) + self.batch_directory_path = FileUtils.build_path( + self.save_path, self.batch_session ) - if not FileUtils.exists(self.batchDirectoryPath): - FileUtils.create_directory(self.batchDirectoryPath) + if not FileUtils.exists(self.batch_directory_path): + FileUtils.create_directory(self.batch_directory_path) - if not FileUtils.exists(self.batchDirectoryPath): + if not FileUtils.exists(self.batch_directory_path): self.output.error( - "Couldn't create batch folder at {}".format(self.batchDirectoryPath) + "Couldn't create batch folder at {}".format(self.batch_directory_path) ) sys.exit(1) - def getOutputExtension(self): - if self.arguments.outputFormat and self.arguments.outputFormat not in ["plain", "simple"]: - return ".{0}".format(self.arguments.outputFormat) + def get_output_extension(self): + if self.arguments.output_format and self.arguments.output_format not in ["plain", "simple"]: + return ".{0}".format(self.arguments.output_format) else: return ".txt" - def setupReports(self): - if self.arguments.outputFile: - outputFile = FileUtils.get_abs_path(self.arguments.outputFile) - self.output.outputFile(outputFile) + def setup_reports(self): + if self.arguments.output_file: + output_file = FileUtils.get_abs_path(self.arguments.output_file) + self.output.output_file(output_file) else: if self.batch: - fileName = "BATCH" - fileName += self.getOutputExtension() - directoryPath = self.batchDirectoryPath + file_name = "BATCH" + file_name += self.get_output_extension() + directory_path = self.batch_directory_path else: - localRequester = Requester(self.urlList[0]) - fileName = ("{}_".format(localRequester.basePath.replace(os.path.sep, ".")[:-1])) - fileName += time.strftime("%y-%m-%d_%H-%M-%S") - fileName += self.getOutputExtension() - directoryPath = FileUtils.build_path(self.save_path, localRequester.host) + local_requester = Requester(self.url_list[0]) + file_name = ("{}_".format(local_requester.base_path.replace(os.path.sep, ".")[:-1])) + file_name += time.strftime("%y-%m-%d_%H-%M-%S") + file_name += self.get_output_extension() + directory_path = FileUtils.build_path(self.save_path, local_requester.host) - outputFile = FileUtils.build_path(directoryPath, fileName) + output_file = FileUtils.build_path(directory_path, file_name) - if FileUtils.exists(outputFile): + if FileUtils.exists(output_file): i = 2 - while FileUtils.exists(outputFile + "_" + str(i)): + while FileUtils.exists(output_file + "_" + str(i)): i += 1 - outputFile += "_" + str(i) + output_file += "_" + str(i) - if not FileUtils.exists(directoryPath): - FileUtils.create_directory(directoryPath) + if not FileUtils.exists(directory_path): + FileUtils.create_directory(directory_path) - if not FileUtils.exists(directoryPath): + if not FileUtils.exists(directory_path): self.output.error( - "Couldn't create the reports folder at {}".format(directoryPath) + "Couldn't create the reports folder at {}".format(directory_path) ) sys.exit(1) - self.output.outputFile(outputFile) + self.output.output_file(output_file) - if self.arguments.outputFile and self.arguments.outputFormat: - self.reportManager = ReportManager(self.arguments.outputFormat, self.arguments.outputFile) - elif self.arguments.outputFormat: - self.reportManager = ReportManager(self.arguments.outputFormat, outputFile) + if self.arguments.output_file and self.arguments.output_format: + self.report_manager = ReportManager(self.arguments.output_format, self.arguments.output_file) + elif self.arguments.output_format: + self.report_manager = ReportManager(self.arguments.output_format, output_file) else: - self.reportManager = ReportManager("plain", outputFile) + self.report_manager = ReportManager("plain", output_file) - def validatePath(self, path): + def validate_path(self, path): if not FileUtils.exists(path): self.output.error( "{} does not exist".format( @@ -417,8 +421,52 @@ def validatePath(self, path): return True - # TODO: Refactor, this function should be a decorator for all the filters - def matchCallback(self, path): + def valid(self, path): + if not path: + return False + + if path.status in self.exclude_status_codes: + return False + + if self.include_status_codes and path.status not in self.include_status_codes: + return False + + if self.blacklists.get(path.status) and path.path in self.blacklists.get(path.status): + return False + + if self.exclude_sizes and FileUtils.size_human(len(path.response.body)).strip() in self.exclude_sizes: + return False + + if self.minimum_response_size and self.minimum_response_size > len(path.response.body): + return False + + if self.maximum_response_size and self.maximum_response_size < len(path.response.body): + return False + + for exclude_text in self.exclude_texts: + if exclude_text in path.response.body.decode('iso8859-1'): + return False + + for exclude_regexp in self.exclude_regexps: + if ( + re.search(exclude_regexp, path.response.body.decode('iso8859-1')) + is not None + ): + return False + + for exclude_redirect in self.exclude_redirects: + if path.response.redirect and ( + ( + re.match(exclude_redirect, path.response.redirect) is not None + ) or ( + exclude_redirect in path.response.redirect + ) + ): + return False + + return True + + def match_callback(self, path): self.index += 1 for status in self.arguments.skip_on_status: @@ -426,77 +474,42 @@ def matchCallback(self, path): self.status_skip = status return + if not self.valid(path): + del path + return + + added_to_queue = False + if ( - path.status and path.status not in self.excludeStatusCodes - ) and ( - not self.includeStatusCodes or path.status in self.includeStatusCodes - ) and ( - not self.blacklists.get(path.status) or path.path not in self.blacklists.get(path.status) - ) and ( - not self.excludeSizes or FileUtils.size_human(len(path.response.body)).strip() not in self.excludeSizes - ) and ( - not self.minimumResponseSize or self.minimumResponseSize < len(path.response.body) + any([self.recursive, self.deep_recursive, self.force_recursive]) ) and ( - not self.maximumResponseSize or self.maximumResponseSize > len(path.response.body) + not self.recursion_status_codes or path.status in self.recursion_status_codes ): + if path.response.redirect: + added_to_queue = self.add_redirect_directory(path) + else: + added_to_queue = self.add_directory(path.path) - for excludeText in self.excludeTexts: - if excludeText in path.response.body.decode('iso8859-1'): - del path - return - - for excludeRegexp in self.excludeRegexps: - if ( - re.search(excludeRegexp, path.response.body.decode('iso8859-1')) - is not None - ): - del path - return - - for excludeRedirect in self.excludeRedirects: - if path.response.redirect and ( - ( - re.match(excludeRedirect, path.response.redirect.decode('iso8859-1')) - is not None - ) or ( - excludeRedirect in path.response.redirect - ) - ): - del path - return - - addedToQueue = False - - if ( - any([self.recursive, self.deep_recursive, self.force_recursive]) - ) and ( - not self.recursionStatusCodes or path.status in self.recursionStatusCodes - ): - if path.response.redirect: - addedToQueue = self.addRedirectDirectory(path) - else: - addedToQueue = self.addDirectory(path.path) - - self.output.statusReport( - path.path, path.response, self.arguments.full_url, addedToQueue - ) + self.output.status_report( + path.path, path.response, self.arguments.full_url, added_to_queue + ) - if self.arguments.replay_proxy: - self.requester.request(path.path, proxy=self.arguments.replay_proxy) + if self.arguments.replay_proxy: + self.requester.request(path.path, proxy=self.arguments.replay_proxy) - newPath = self.currentDirectory + path.path + new_path = self.current_directory + path.path - self.report.addResult(newPath, path.status, path.response) - self.reportManager.updateReport(self.report) + self.report.add_result(new_path, path.status, path.response) + self.report_manager.update_report(self.report) - del path + del path - def notFoundCallback(self, path): + def not_found_callback(self, path): self.index += 1 - self.output.lastPath(path, self.index, len(self.dictionary), self.currentJob, self.allJobs, self.fuzzer.rate) + self.output.last_path(path, self.index, len(self.dictionary), self.current_job, self.all_jobs, self.fuzzer.rate) del path - def errorCallback(self, path, errorMsg): + def error_callback(self, path, error_msg): if self.arguments.exit_on_error: self.exit = True self.fuzzer.stop() @@ -504,20 +517,20 @@ def errorCallback(self, path, errorMsg): exit(1) else: - self.output.addConnectionError() + self.output.add_connection_error() - def appendErrorLog(self, path, errorMsg): - with self.threadsLock: + def append_error_log(self, path, error_msg): + with self.threads_lock: line = time.strftime("[%y-%m-%d %H:%M:%S] - ") - line += self.currentUrl + " - " + path + " - " + errorMsg - self.errorLog.write(os.linesep + line) - self.errorLog.flush() + line += self.requester.base_url + " - " + path + " - " + error_msg + self.error_log.write(os.linesep + line) + self.error_log.flush() - def handlePause(self, message): + def handle_pause(self, message): self.output.warning(message) self.fuzzer.pause() - # If one of the tasks is broken, don't let the user wait forever + # If one of the tasks is broken, don't let the user wait Fore.er for i in range(300): if self.fuzzer.stopped == len(self.fuzzer.threads): break @@ -531,10 +544,10 @@ def handlePause(self, message): if not self.directories.empty(): msg += " / [n]ext" - if len(self.urlList) > 1: + if len(self.url_list) > 1: msg += " / [s]kip target" - self.output.inLine(msg + ": ") + self.output.in_line(msg + ": ") option = input() @@ -542,7 +555,7 @@ def handlePause(self, message): self.exit = True self.fuzzer.stop() self.output.error("\nCanceled by the user") - self.reportManager.updateReport(self.report) + self.report_manager.update_report(self.report) exit(0) elif option.lower() == "c": @@ -553,14 +566,11 @@ def handlePause(self, message): self.fuzzer.stop() return - elif option.lower() == "s" and len(self.urlList) > 1: - self.output.newLine() + elif option.lower() == "s" and len(self.url_list) > 1: + self.output.new_line() raise SkipTargetInterrupt - else: - continue - - def processPaths(self): + def process_paths(self): while True: try: while not self.fuzzer.wait(0.25): @@ -576,7 +586,7 @@ def processPaths(self): raise SkipTargetInterrupt - elif self.maxtime and time.time() - self.startTime > self.maxtime: + elif self.is_timed_out(): self.output.error( "\nCanceled because the runtime exceeded the maximal set by user" ) @@ -585,89 +595,87 @@ def processPaths(self): break except KeyboardInterrupt: - self.handlePause("CTRL+C detected: Pausing threads, please wait...") + self.handle_pause("CTRL+C detected: Pausing threads, please wait...") def prepare(self): while not self.directories.empty(): gc.collect() - self.currentJob += 1 + self.current_job += 1 self.index = 0 - self.currentDirectory = self.directories.get() + self.current_directory = self.directories.get() self.output.warning( "[{1}] Starting: {0}".format( - self.currentDirectory, time.strftime("%H:%M:%S") + self.current_directory, time.strftime("%H:%M:%S") ) ) - self.fuzzer.requester.basePath = self.output.basePath = self.basePath + self.currentDirectory + self.fuzzer.requester.base_path = self.output.base_path = self.base_path + self.current_directory self.fuzzer.start() - self.processPaths() + self.process_paths() self.report.completed = True - self.reportManager.updateReport(self.report) + self.report_manager.update_report(self.report) self.report = None return - def addPort(self, url): - parsed = urllib.parse.urlparse(url) - if ":" not in parsed.netloc: - port = "443" if parsed.scheme == "https" else "80" - url = url.replace(parsed.netloc, parsed.netloc + ":" + port) - - return url - - def addDirectory(self, path, fullPath=None): + def add_directory(self, path): added = False path = path.split("?")[0].split("#")[0] - if path.rstrip("/") + "/" in [directory for directory in self.excludeSubdirs]: + if any([path.startswith(directory) for directory in self.exclude_subdirs]): return False - fullPath = self.currentDirectory + path if not fullPath else fullPath + full_path = self.current_directory + path dirs = [] if self.deep_recursive: for i in range(1, path.count("/") + 1): - dir = fullPath.replace(path, "") + "/".join(path.split("/")[:i]) + dir = full_path.replace(path, "") + "/".join(path.split("/")[:i]) dirs.append(dir.rstrip("/") + "/") if self.force_recursive: - if not fullPath.endswith("/"): - fullPath += "/" - dirs.append(fullPath) - elif self.recursive and fullPath.endswith("/"): - dirs.append(fullPath) + if not full_path.endswith("/"): + full_path += "/" + dirs.append(full_path) + elif self.recursive and full_path.endswith("/"): + dirs.append(full_path) for dir in dirs: - if self.scanSubdirs and dir in self.scanSubdirs: + if dir in self.scan_subdirs: continue - elif dir in self.doneDirs: + elif dir in self.done_dirs: continue elif self.recursion_depth and dir.count("/") > self.recursion_depth: continue self.directories.put(dir) - self.doneDirs.append(dir) + self.done_dirs.append(dir) - self.allJobs += 1 + self.all_jobs += 1 added = True return added - def addRedirectDirectory(self, path): + def add_port(self, url): + chunks = url.split("/") + if ":" not in chunks[2]: + chunks[2] += (":80" if chunks[0] == "http:" else ":443") + url = "/".join(chunks) + + return url + + def add_redirect_directory(self, path): # Resolve the redirect header relative to the current URL and add the # path to self.directories if it is a subdirectory of the current URL - baseUrl = self.currentUrl + self.currentDirectory - baseUrl = self.addPort(baseUrl) + base_url = self.requester.base_url + self.base_path + self.current_directory + path.path - absoluteUrl = urllib.parse.urljoin(baseUrl, path.response.redirect) - absoluteUrl = self.addPort(absoluteUrl) + redirect_url = urljoin(self.requester.base_url, path.response.redirect) + redirect_url = self.add_port(redirect_url) - if absoluteUrl.startswith(baseUrl) and absoluteUrl != baseUrl: - path = absoluteUrl[len(baseUrl):] - fullPath = absoluteUrl[len(self.addPort(self.currentUrl)):] + if redirect_url.startswith(base_url + "/"): + path = redirect_url[len(self.requester.base_url + self.base_path + self.current_directory):] - return self.addDirectory(path, fullPath) + return self.add_directory(path) return False diff --git a/lib/core/argument_parser.py b/lib/core/argument_parser.py index 3bd8c7921..2e1d59f4c 100755 --- a/lib/core/argument_parser.py +++ b/lib/core/argument_parser.py @@ -32,20 +32,20 @@ class ArgumentParser(object): def __init__(self, script_path): self.script_path = script_path - self.parseConfig() + self.parse_config() - options = self.parseArguments() + options = self.parse_arguments() self.quiet = options.quiet self.full_url = options.full_url - self.urlList = None + self.url_list = None self.raw_file = None if not options.url: - if options.urlList: + if options.url_list: - with File(options.urlList) as urlList: + with File(options.url_list) as urlList: if not urlList.exists(): print("The file with URLs does not exist") @@ -59,13 +59,13 @@ def __init__(self, script_path): print("The file with URLs cannot be read") exit(1) - self.urlList = list(urlList.get_lines()) + self.url_list = list(urlList.get_lines()) elif options.cidr: - self.urlList = [str(ip) for ip in IPv4Network(options.cidr)] + self.url_list = [str(ip) for ip in IPv4Network(options.cidr)] elif options.stdin_urls: - self.urlList = sys.stdin.read().splitlines() + self.url_list = sys.stdin.read().splitlines() elif options.raw_file: with File(options.raw_file) as raw_content: @@ -88,17 +88,17 @@ def __init__(self, script_path): exit(1) else: - self.urlList = [options.url] + self.url_list = [options.url] - if not options.extensions and not options.noExtension: + if not options.extensions and not options.no_extension: print("WARNING: No extension was specified!") - if options.noExtension: + if options.no_extension: options.extensions = str() # Enable to use multiple dictionaries at once - for dictFile in options.wordlist.split(","): - with File(dictFile) as wordlist: + for dict_file in options.wordlist.split(","): + with File(dict_file) as wordlist: if not wordlist.exists(): print("The wordlist file does not exist") exit(1) @@ -111,8 +111,8 @@ def __init__(self, script_path): print("The wordlist cannot be read") exit(1) - if options.proxyList: - with File(options.proxyList) as plist: + if options.proxy_list: + with File(options.proxy_list) as plist: if not plist.exists(): print("The proxylist file does not exist") exit(1) @@ -125,20 +125,20 @@ def __init__(self, script_path): print("The proxylist cannot be read") exit(1) - self.proxylist = open(options.proxyList).read().splitlines() + self.proxylist = open(options.proxy_list).read().splitlines() - options.requestByHostname = True + options.request_by_hostname = True elif options.proxy: self.proxy = options.proxy - options.requestByHostname = True + options.request_by_hostname = True else: self.proxy = None if options.replay_proxy: self.replay_proxy = options.replay_proxy - options.requestByHostname = True + options.request_by_hostname = True else: self.replay_proxy = None @@ -155,9 +155,9 @@ def __init__(self, script_path): else: self.headers = {} - if options.headerList: + if options.header_list: try: - with File(options.headerList) as hlist: + with File(options.header_list) as hlist: if not hlist.exists(): print("The header list file does not exist") exit(1) @@ -193,149 +193,149 @@ def __init__(self, script_path): oset([extension.lstrip(' .') for extension in options.extensions.split(",")]) ) - if options.excludeExtensions: - self.excludeExtensions = list( - oset([excludeExtension.lstrip(' .') for excludeExtension in options.excludeExtensions.split(",")]) + if options.exclude_extensions: + self.exclude_extensions = list( + oset([exclude_extension.lstrip(' .') for exclude_extension in options.exclude_extensions.split(",")]) ) else: - self.excludeExtensions = [] + self.exclude_extensions = [] self.useragent = options.useragent - self.useRandomAgents = options.useRandomAgents + self.use_random_agents = options.use_random_agents self.cookie = options.cookie - if options.threadsCount < 1: + if options.threads_count < 1: print("Threads number must be greater than zero") exit(1) - self.threadsCount = options.threadsCount + self.threads_count = options.threads_count - self.includeStatusCodes = [] + self.include_status_codes = [] - if options.includeStatusCodes: - for statusCode in options.includeStatusCodes.split(","): + if options.include_status_codes: + for status_code in options.include_status_codes.split(","): try: - if "-" in statusCode: - statusCodes = [ + if "-" in status_code: + status_codes = [ i for i in range( - int(statusCode.split("-")[0].strip()), - int(statusCode.split("-")[1].strip()) + 1 + int(status_code.split("-")[0].strip()), + int(status_code.split("-")[1].strip()) + 1 ) ] - self.includeStatusCodes.extend(statusCodes) + self.include_status_codes.extend(status_codes) else: - self.includeStatusCodes.append(int(statusCode.strip())) + self.include_status_codes.append(int(status_code.strip())) except ValueError: - print("Invalid status code or status code range: {0}".format(statusCode)) + print("Invalid status code or status code range: {0}".format(status_code)) exit(1) - self.excludeStatusCodes = [] + self.exclude_status_codes = [] - if options.excludeStatusCodes: - for statusCode in options.excludeStatusCodes.split(","): + if options.exclude_status_codes: + for status_code in options.exclude_status_codes.split(","): try: - if "-" in statusCode: - statusCodes = [ + if "-" in status_code: + status_codes = [ i for i in range( - int(statusCode.split("-")[0].strip()), - int(statusCode.split("-")[1].strip()) + 1 + int(status_code.split("-")[0].strip()), + int(status_code.split("-")[1].strip()) + 1 ) ] - self.excludeStatusCodes.extend(statusCodes) + self.exclude_status_codes.extend(status_codes) else: - self.excludeStatusCodes.append(int(statusCode.strip())) + self.exclude_status_codes.append(int(status_code.strip())) except ValueError: - print("Invalid status code or status code range: {0}".format(statusCode)) + print("Invalid status code or status code range: {0}".format(status_code)) exit(1) - self.recursionStatusCodes = [] + self.recursion_status_codes = [] - if options.recursionStatusCodes: - for statusCode in options.recursionStatusCodes.split(","): + if options.recursion_status_codes: + for status_code in options.recursion_status_codes.split(","): try: - if "-" in statusCode: - statusCodes = [ + if "-" in status_code: + status_codes = [ i for i in range( - int(statusCode.split("-")[0].strip()), - int(statusCode.split("-")[1].strip()) + 1 + int(status_code.split("-")[0].strip()), + int(status_code.split("-")[1].strip()) + 1 ) ] - self.recursionStatusCodes.extend(statusCodes) + self.recursion_status_codes.extend(status_codes) else: - self.recursionStatusCodes.append(int(statusCode.strip())) + self.recursion_status_codes.append(int(status_code.strip())) except ValueError: - print("Invalid status code or status code range: {0}".format(statusCode)) + print("Invalid status code or status code range: {0}".format(status_code)) exit(1) - if options.excludeSizes: + if options.exclude_sizes: try: - self.excludeSizes = list( + self.exclude_sizes = list( oset( [ - excludeSize.strip().upper() if excludeSize else None - for excludeSize in options.excludeSizes.split(",") + exclude_size.strip().upper() if exclude_size else None + for exclude_size in options.exclude_sizes.split(",") ] ) ) except ValueError: - self.excludeSizes = [] + self.exclude_sizes = [] else: - self.excludeSizes = [] + self.exclude_sizes = [] - if options.excludeTexts: + if options.exclude_texts: try: - self.excludeTexts = list( + self.exclude_texts = list( oset( [ - excludeText.strip() if excludeText else None - for excludeText in options.excludeTexts.split(",") + exclude_text.strip() if exclude_text else None + for exclude_text in options.exclude_texts.split(",") ] ) ) except ValueError: - self.excludeTexts = [] + self.exclude_texts = [] else: - self.excludeTexts = [] + self.exclude_texts = [] - if options.excludeRegexps: + if options.exclude_regexps: try: - self.excludeRegexps = list( + self.exclude_regexps = list( oset( [ - excludeRegexp.strip() if excludeRegexp else None - for excludeRegexp in options.excludeRegexps.split(",") + exclude_regexp.strip() if exclude_regexp else None + for exclude_regexp in options.exclude_regexps.split(",") ] ) ) except ValueError: - self.excludeRegexps = [] + self.exclude_regexps = [] else: - self.excludeRegexps = [] + self.exclude_regexps = [] - if options.excludeRedirects: + if options.exclude_redirects: try: - self.excludeRedirects = list( + self.exclude_redirects = list( oset( [ - excludeRedirect.strip() if excludeRedirect else None - for excludeRedirect in options.excludeRedirects.split(",") + exclude_redirect.strip() if exclude_redirect else None + for exclude_redirect in options.exclude_redirects.split(",") ] ) ) except ValueError: - self.excludeRedirects = [] + self.exclude_redirects = [] else: - self.excludeRedirects = [] + self.exclude_redirects = [] self.prefixes = [] if not options.prefixes else list(oset([prefix.strip() for prefix in options.prefixes.split(",")])) self.suffixes = [] if not options.suffixes else list(oset([suffix.strip() for suffix in options.suffixes.split(",")])) @@ -348,43 +348,43 @@ def __init__(self, script_path): self.lowercase = options.lowercase self.uppercase = options.uppercase self.capitalization = options.capitalization - self.forceExtensions = options.forceExtensions + self.force_extensions = options.force_extensions self.data = options.data - self.excludeContent = options.excludeContent + self.exclude_content = options.exclude_content self.color = options.color self.delay = options.delay self.timeout = options.timeout self.ip = options.ip - self.maxRetries = options.maxRetries + self.max_retries = options.max_retries self.recursive = options.recursive self.deep_recursive = options.deep_recursive self.force_recursive = options.force_recursive - self.minimumResponseSize = options.minimumResponseSize - self.maximumResponseSize = options.maximumResponseSize - self.noExtension = options.noExtension - self.onlySelected = options.onlySelected - self.outputFile = options.outputFile - self.outputFormat = options.outputFormat - - if options.scanSubdirs: - self.scanSubdirs = list( + self.minimum_response_size = options.minimum_response_size + self.maximum_response_size = options.maximum_response_size + self.no_extension = options.no_extension + self.only_selected = options.only_selected + self.output_file = options.output_file + self.output_format = options.output_format + + if options.scan_subdirs: + self.scan_subdirs = list( oset( - [subdir.strip(" /") + "/" for subdir in options.scanSubdirs.split(",")] + [subdir.strip(" /") + "/" for subdir in options.scan_subdirs.split(",")] ) ) else: - self.scanSubdirs = [] + self.scan_subdirs = [] - if options.excludeSubdirs: - self.excludeSubdirs = list( + if options.exclude_subdirs: + self.exclude_subdirs = list( oset( - [subdir.strip(" /") + "/" for subdir in options.excludeSubdirs.split(",")] + [subdir.strip(" /") + "/" for subdir in options.exclude_subdirs.split(",")] ) ) else: - self.excludeSubdirs = None + self.exclude_subdirs = [] if options.skip_on_status: try: @@ -411,16 +411,16 @@ def __init__(self, script_path): print("No authentication credential found") exit(1) - if len(set(self.extensions).intersection(self.excludeExtensions)): + if len(set(self.extensions).intersection(self.exclude_extensions)): print("Exclude extension list can not contain any extension that has already in the extension list") exit(1) self.auth_type = options.auth_type self.auth = options.auth - self.redirect = options.followRedirects + self.redirect = options.follow_redirects self.httpmethod = options.httpmethod self.scheme = options.scheme - self.requestByHostname = options.requestByHostname + self.request_by_hostname = options.request_by_hostname self.exit_on_error = options.exit_on_error self.maxrate = options.maxrate self.maxtime = options.maxtime @@ -431,37 +431,37 @@ def __init__(self, script_path): print("Invalid URI scheme: {0}".format(self.scheme)) exit(1) - if self.outputFormat and self.outputFormat not in ["simple", "plain", "json", "xml", "md", "csv", "html"]: + if self.output_format and self.output_format not in ["simple", "plain", "json", "xml", "md", "csv", "html"]: print("Select one of the following output formats: simple, plain, json, xml, md, csv, html") exit(1) - def parseConfig(self): + def parse_config(self): config = DefaultConfigParser() - configPath = FileUtils.build_path(self.script_path, "default.conf") - config.read(configPath) + config_path = FileUtils.build_path(self.script_path, "default.conf") + config.read(config_path) # Mandatory - self.defaultExtensions = config.safe_get("mandatory", "default-extensions", str()) - self.excludeExtensions = config.safe_get("mandatory", "exclude-extensions", None) - self.forceExtensions = config.safe_getboolean("mandatory", "force-extensions", False) + self.default_extensions = config.safe_get("mandatory", "default-extensions", str()) + self.exclude_extensions = config.safe_get("mandatory", "exclude-extensions", None) + self.force_extensions = config.safe_getboolean("mandatory", "force-extensions", False) # General - self.threadsCount = config.safe_getint( + self.threads_count = config.safe_getint( "general", "threads", 30, list(range(1, 300)) ) - self.includeStatusCodes = config.safe_get("general", "include-status", None) - self.excludeStatusCodes = config.safe_get("general", "exclude-status", None) - self.excludeSizes = config.safe_get("general", "exclude-sizes", None) - self.excludeTexts = config.safe_get("general", "exclude-texts", None) - self.excludeRegexps = config.safe_get("general", "exclude-regexps", None) - self.excludeRedirects = config.safe_get("general", "exclude-redirects", None) - self.excludeContent = config.safe_get("general", "exclude-content", "") + self.include_status_codes = config.safe_get("general", "include-status", None) + self.exclude_status_codes = config.safe_get("general", "exclude-status", None) + self.exclude_sizes = config.safe_get("general", "exclude-sizes", None) + self.exclude_texts = config.safe_get("general", "exclude-texts", None) + self.exclude_regexps = config.safe_get("general", "exclude-regexps", None) + self.exclude_redirects = config.safe_get("general", "exclude-redirects", None) + self.exclude_content = config.safe_get("general", "exclude-content", "") self.recursive = config.safe_getboolean("general", "recursive", False) self.deep_recursive = config.safe_getboolean("general", "deep-recursive", False) self.force_recursive = config.safe_getboolean("general", "force-recursive", False) self.recursion_depth = config.safe_getint("general", "recursion-depth", 0) - self.recursionStatusCodes = config.safe_get("general", "recursion-status", None) - self.excludeSubdirs = config.safe_get("general", "exclude-subdirs", None) + self.recursion_status_codes = config.safe_get("general", "recursion-status", None) + self.exclude_subdirs = config.safe_get("general", "exclude-subdirs", None) self.skip_on_status = config.safe_get("general", "skip-on-status", None) self.maxtime = config.safe_getint("general", "max-time", 0) self.full_url = config.safe_getboolean("general", "full-url", False) @@ -469,10 +469,10 @@ def parseConfig(self): self.quiet = config.safe_getboolean("general", "quiet-mode", False) # Reports - self.outputLocation = config.safe_get("reports", "report-output-folder", None) - self.autosaveReport = config.safe_getboolean("reports", "autosave-report", False) - self.logsLocation = config.safe_get("reports", "logs-location", None) - self.outputFormat = config.safe_get( + self.output_location = config.safe_get("reports", "report-output-folder", None) + self.autosave_report = config.safe_getboolean("reports", "autosave-report", False) + self.logs_location = config.safe_get("reports", "logs-location", None) + self.output_format = config.safe_get( "reports", "report-format", "plain", ["plain", "simple", "json", "xml", "md", "csv", "html"] ) @@ -492,27 +492,27 @@ def parseConfig(self): self.httpmethod = config.safe_get( "request", "httpmethod", "get" ) - self.headerList = config.safe_get("request", "headers-file", None) + self.header_list = config.safe_get("request", "headers-file", None) self.redirect = config.safe_getboolean("request", "follow-redirects", False) - self.useRandomAgents = config.safe_get("request", "random-user-agents", False) + self.use_random_agents = config.safe_get("request", "random-user-agents", False) self.useragent = config.safe_get("request", "user-agent", "") self.cookie = config.safe_get("request", "cookie", "") # Connection self.delay = config.safe_getfloat("connection", "delay", 0) self.timeout = config.safe_getint("connection", "timeout", 10) - self.maxRetries = config.safe_getint("connection", "retries", 2) + self.max_retries = config.safe_getint("connection", "retries", 2) self.maxrate = config.safe_getint("connection", "max-rate", 0) self.proxy = config.safe_get("connection", "proxy", None) self.proxylist = config.safe_get("connection", "proxy-list", None) self.scheme = config.safe_get("connection", "scheme", "http", ["http", "https"]) self.replay_proxy = config.safe_get("connection", "replay-proxy", None) - self.requestByHostname = config.safe_getboolean( + self.request_by_hostname = config.safe_getboolean( "connection", "request-by-hostname", False ) self.exit_on_error = config.safe_getboolean("connection", "exit-on-error", False) - def parseArguments(self): + def parse_arguments(self): usage = "Usage: %prog [-u|--url] target [-e|--extensions] extensions [options]" parser = OptionParser(usage, version="dirsearch v0.4.1", epilog=""" @@ -523,17 +523,17 @@ def parseArguments(self): # Mandatory arguments mandatory = OptionGroup(parser, "Mandatory") mandatory.add_option("-u", "--url", help="Target URL", action="store", type="string", dest="url", default=None) - mandatory.add_option("-l", "--url-list", help="Target URL list file", action="store", type="string", dest="urlList", + mandatory.add_option("-l", "--url-list", help="Target URL list file", action="store", type="string", dest="url_list", default=None, metavar="FILE") mandatory.add_option("--stdin", help="Target URL list from STDIN", action="store_true", dest="stdin_urls") mandatory.add_option("--cidr", help="Target CIDR", action="store", type="string", dest="cidr", default=None) mandatory.add_option("--raw", help="Load raw HTTP request from file (use `--scheme` flag to set the scheme)", action="store", dest="raw_file", metavar="FILE") mandatory.add_option("-e", "--extensions", help="Extension list separated by commas (Example: php,asp)", - action="store", dest="extensions", default=self.defaultExtensions) - mandatory.add_option("-X", "--exclude-extensions", action="store", dest="excludeExtensions", default=self.excludeExtensions, + action="store", dest="extensions", default=self.default_extensions) + mandatory.add_option("-X", "--exclude-extensions", action="store", dest="exclude_extensions", default=self.exclude_extensions, help="Exclude extension list separated by commas (Example: asp,jsp)", metavar="EXTENSIONS") - mandatory.add_option("-f", "--force-extensions", action="store_true", dest="forceExtensions", default=self.forceExtensions, + mandatory.add_option("-f", "--force-extensions", action="store_true", dest="force_extensions", default=self.force_extensions, help="Add extensions to every wordlist entry. By default dirsearch only replaces the %EXT% keyword with extensions") # Dictionary Settings @@ -545,9 +545,9 @@ def parseArguments(self): help="Add custom prefixes to all wordlist entries (separated by commas)") dictionary.add_option("--suffixes", action="store", dest="suffixes", default=self.suffixes, help="Add custom suffixes to all wordlist entries, ignore directories (separated by commas)") - dictionary.add_option("--only-selected", dest="onlySelected", action="store_true", + dictionary.add_option("--only-selected", dest="only_selected", action="store_true", help="Remove paths have different extensions from selected ones via `-e` (keep entries don't have extensions)") - dictionary.add_option("--remove-extensions", dest="noExtension", action="store_true", + dictionary.add_option("--remove-extensions", dest="no_extension", action="store_true", help="Remove extensions in all paths (Example: admin.php -> admin)") dictionary.add_option("-U", "--uppercase", action="store_true", dest="uppercase", default=self.uppercase, help="Uppercase wordlist") @@ -558,8 +558,8 @@ def parseArguments(self): # Optional Settings general = OptionGroup(parser, "General Settings") - general.add_option("-t", "--threads", help="Number of threads", action="store", type="int", dest="threadsCount", - default=self.threadsCount, metavar="THREADS") + general.add_option("-t", "--threads", help="Number of threads", action="store", type="int", dest="threads_count", + default=self.threads_count, metavar="THREADS") general.add_option("-r", "--recursive", help="Brute-force recursively", action="store_true", dest="recursive", default=self.recursive) general.add_option("--deep-recursive", help="Perform recursive scan on every directory depth (Example: api/users -> api/)", action="store_true", dest="deep_recursive", @@ -569,30 +569,30 @@ def parseArguments(self): general.add_option("--recursion-depth", help="Maximum recursion depth", action="store", type="int", dest="recursion_depth", default=self.recursion_depth, metavar="DEPTH") general.add_option("--recursion-status", help="Valid status codes to perform recursive scan, support ranges (separated by commas)", - action="store", dest="recursionStatusCodes", default=self.recursionStatusCodes, metavar="CODES") + action="store", dest="recursion_status_codes", default=self.recursion_status_codes, metavar="CODES") general.add_option("--subdirs", help="Scan sub-directories of the given URL[s] (separated by commas)", action="store", - dest="scanSubdirs", default=None, metavar="SUBDIRS") + dest="scan_subdirs", default=None, metavar="SUBDIRS") general.add_option("--exclude-subdirs", help="Exclude the following subdirectories during recursive scan (separated by commas)", - action="store", dest="excludeSubdirs", default=self.excludeSubdirs, metavar="SUBDIRS") + action="store", dest="exclude_subdirs", default=self.exclude_subdirs, metavar="SUBDIRS") general.add_option("-i", "--include-status", help="Include status codes, separated by commas, support ranges (Example: 200,300-399)", - action="store", dest="includeStatusCodes", default=self.includeStatusCodes, metavar="CODES") + action="store", dest="include_status_codes", default=self.include_status_codes, metavar="CODES") general.add_option("-x", "--exclude-status", help="Exclude status codes, separated by commas, support ranges (Example: 301,500-599)", - action="store", dest="excludeStatusCodes", default=self.excludeStatusCodes, metavar="CODES") + action="store", dest="exclude_status_codes", default=self.exclude_status_codes, metavar="CODES") general.add_option("--exclude-sizes", help="Exclude responses by sizes, separated by commas (Example: 123B,4KB)", - action="store", dest="excludeSizes", default=self.excludeSizes, metavar="SIZES") + action="store", dest="exclude_sizes", default=self.exclude_sizes, metavar="SIZES") general.add_option("--exclude-texts", help="Exclude responses by texts, separated by commas (Example: 'Not found', 'Error')", - action="store", dest="excludeTexts", default=self.excludeTexts, metavar="TEXTS") + action="store", dest="exclude_texts", default=self.exclude_texts, metavar="TEXTS") general.add_option("--exclude-regexps", help="Exclude responses by regexps, separated by commas (Example: 'Not foun[a-z]{1}', '^Error$')", - action="store", dest="excludeRegexps", default=self.excludeRegexps, metavar="REGEXPS") + action="store", dest="exclude_regexps", default=self.exclude_regexps, metavar="REGEXPS") general.add_option("--exclude-redirects", help="Exclude responses by redirect regexps or texts, separated by commas (Example: 'https://okta.com/*')", - action="store", dest="excludeRedirects", default=self.excludeRedirects, metavar="REGEXPS") + action="store", dest="exclude_redirects", default=self.exclude_redirects, metavar="REGEXPS") general.add_option("--exclude-content", help="Exclude responses by response content of this path", action="store", - dest="excludeContent", default=self.excludeContent, metavar="PATH") + dest="exclude_content", default=self.exclude_content, metavar="PATH") general.add_option("--skip-on-status", action="store", dest="skip_on_status", default=self.skip_on_status, help="Skip target whenever hit one of these status codes, separated by commas", metavar="CODES") - general.add_option("--minimal", action="store", dest="minimumResponseSize", type="int", default=None, + general.add_option("--minimal", action="store", dest="minimum_response_size", type="int", default=None, help="Minimal response length", metavar="LENGTH") - general.add_option("--maximal", action="store", dest="maximumResponseSize", type="int", default=None, + general.add_option("--maximal", action="store", dest="maximum_response_size", type="int", default=None, help="Maximal response length", metavar="LENGTH") general.add_option("--max-time", action="store", dest="maxtime", type="int", default=self.maxtime, help="Maximal runtime for the scan", metavar="SECONDS") @@ -612,11 +612,11 @@ def parseArguments(self): request.add_option("-H", "--header", help="HTTP request header, support multiple flags (Example: -H 'Referer: example.com')", action="append", type="string", dest="headers", default=None) request.add_option("--header-list", help="File contains HTTP request headers", type="string", - dest="headerList", default=self.headerList, metavar="FILE") + dest="header_list", default=self.header_list, metavar="FILE") request.add_option("-F", "--follow-redirects", help="Follow HTTP redirects", - action="store_true", dest="followRedirects", default=self.redirect) + action="store_true", dest="follow_redirects", default=self.redirect) request.add_option("--random-agent", help="Choose a random User-Agent for each request", - default=self.useRandomAgents, action="store_true", dest="useRandomAgents") + default=self.use_random_agents, action="store_true", dest="use_random_agents") request.add_option("--auth-type", help="Authentication type (basic, digest, bearer, ntlm)", action="store", dest="auth_type", metavar="TYPE") request.add_option("--auth", help="Authentication credential (user:password or bearer token)", @@ -633,7 +633,7 @@ def parseArguments(self): type="float", default=self.delay) connection.add_option("--proxy", action="store", dest="proxy", type="string", default=self.proxy, help="Proxy URL, support HTTP and SOCKS proxies (Example: localhost:8080, socks5://localhost:8088)", metavar="PROXY") - connection.add_option("--proxy-list", action="store", dest="proxyList", type="string", + connection.add_option("--proxy-list", action="store", dest="proxy_list", type="string", default=self.proxylist, help="File contains proxy servers", metavar="FILE") connection.add_option("--replay-proxy", action="store", dest="replay_proxy", type="string", default=self.replay_proxy, help="Proxy to replay with found paths", metavar="PROXY") @@ -642,10 +642,10 @@ def parseArguments(self): connection.add_option("--max-rate", help="Max requests per second", action="store", dest="maxrate", type="int", default=self.maxrate, metavar="RATE") connection.add_option("--retries", help="Number of retries for failed requests", action="store", - dest="maxRetries", type="int", default=self.maxRetries, metavar="RETRIES") + dest="max_retries", type="int", default=self.max_retries, metavar="RETRIES") connection.add_option("-b", "--request-by-hostname", help="By default dirsearch requests by IP for speed. This will force dirsearch to request by hostname", - action="store_true", dest="requestByHostname", default=self.requestByHostname) + action="store_true", dest="request_by_hostname", default=self.request_by_hostname) connection.add_option("--ip", action="store", dest="ip", default=None, help="Server IP address") connection.add_option("--exit-on-error", action="store_true", dest="exit_on_error", default=self.exit_on_error, @@ -653,8 +653,8 @@ def parseArguments(self): # Report Settings reports = OptionGroup(parser, "Reports") - reports.add_option("-o", "--output", action="store", dest="outputFile", default=None, metavar="FILE", help="Output file") - reports.add_option("--format", action="store", dest="outputFormat", default=self.outputFormat, metavar="FORMAT", + reports.add_option("-o", "--output", action="store", dest="output_file", default=None, metavar="FILE", help="Output file") + reports.add_option("--format", action="store", dest="output_format", default=self.output_format, metavar="FORMAT", help="Report format (Available: simple, plain, json, xml, md, csv, html)") parser.add_option_group(mandatory) diff --git a/lib/core/dictionary.py b/lib/core/dictionary.py index 57f93bc1f..56f830d3d 100755 --- a/lib/core/dictionary.py +++ b/lib/core/dictionary.py @@ -34,27 +34,27 @@ def __init__( lowercase=False, uppercase=False, capitalization=False, - forcedExtensions=False, - excludeExtensions=[], - noExtension=False, - onlySelected=False, + forced_extensions=False, + exclude_extensions=[], + no_extension=False, + only_selected=False, ): self.entries = [] - self.currentIndex = 0 + self.current_index = 0 self.condition = threading.Lock() self._extensions = extensions - self._excludeExtensions = excludeExtensions + self._exclude_extensions = exclude_extensions self._prefixes = prefixes self._suffixes = suffixes self._paths = paths - self._forcedExtensions = forcedExtensions - self._noExtension = noExtension - self._onlySelected = onlySelected + self._forced_extensions = forced_extensions + self._no_extension = no_extension + self._only_selected = only_selected self.lowercase = lowercase self.uppercase = uppercase self.capitalization = capitalization - self.dictionaryFiles = [File(path) for path in self.paths] + self.dictionary_files = [File(path) for path in self.paths] self.generate() @property @@ -102,8 +102,8 @@ def generate(self): result = [] # Enable to use multiple dictionaries at once - for dictFile in self.dictionaryFiles: - for line in list(filter(None, dict.fromkeys(dictFile.get_lines()))): + for dict_file in self.dictionary_files: + for line in list(filter(None, dict.fromkeys(dict_file.get_lines()))): # Skip comments if line.lstrip().startswith("#"): continue @@ -111,7 +111,7 @@ def generate(self): if line.startswith("/"): line = line[1:] - if self._noExtension: + if self._no_extension: line = line[0] + line[1:].split(".")[0] if line == ".": continue @@ -124,9 +124,9 @@ def generate(self): noforce = False # Skip if the path contains excluded extensions - if self._excludeExtensions: + if self._exclude_extensions: if any( - [find("." + extension, line) for extension in self._excludeExtensions] + [find("." + extension, line) for extension in self._exclude_extensions] ): continue @@ -140,7 +140,7 @@ def generate(self): # If forced extensions is used and the path is not a directory ... (terminated by /) # process line like a forced extension. - elif self._forcedExtensions and not line.rstrip().endswith("/") and not noforce: + elif self._forced_extensions and not line.rstrip().endswith("/") and not noforce: quoted = self.quote(line) for extension in self._extensions: @@ -157,7 +157,7 @@ def generate(self): else: quoted = self.quote(line) - if self._onlySelected and not line.rstrip().endswith("/") and "." in line: + if self._only_selected and not line.rstrip().endswith("/") and "." in line: for extension in self._extensions: if line.endswith("." + extension): result.append(quoted) @@ -202,28 +202,28 @@ def regenerate(self): self.generate() self.reset() - def nextWithIndex(self, basePath=None): + def next_with_index(self, base_path=None): self.condition.acquire() try: - result = self.entries[self.currentIndex] + result = self.entries[self.current_index] except IndexError: self.condition.release() raise StopIteration - self.currentIndex = self.currentIndex + 1 - currentIndex = self.currentIndex + self.current_index = self.current_index + 1 + current_index = self.current_index self.condition.release() - return currentIndex, result + return current_index, result - def __next__(self, basePath=None): - _, path = self.nextWithIndex(basePath) + def __next__(self, base_path=None): + _, path = self.next_with_index(base_path) return path def reset(self): self.condition.acquire() - self.currentIndex = 0 + self.current_index = 0 self.condition.release() def __len__(self): diff --git a/lib/core/fuzzer.py b/lib/core/fuzzer.py index 62f5ef75e..190f17b67 100755 --- a/lib/core/fuzzer.py +++ b/lib/core/fuzzer.py @@ -31,23 +31,23 @@ def __init__( dictionary, suffixes=None, prefixes=None, - excludeContent=None, + exclude_content=None, threads=1, delay=0, maxrate=0, - matchCallbacks=[], - notFoundCallbacks=[], - errorCallbacks=[], + match_callbacks=[], + not_found_callbacks=[], + error_callbacks=[], ): self.requester = requester self.dictionary = dictionary self.suffixes = suffixes if suffixes else [] self.prefixes = prefixes if prefixes else [] - self.excludeContent = excludeContent - self.basePath = self.requester.basePath + self.exclude_content = exclude_content + self.base_path = self.requester.base_path self.threads = [] - self.threadsCount = ( + self.threads_count = ( threads if len(self.dictionary) >= threads else len(self.dictionary) ) self.delay = delay @@ -55,10 +55,10 @@ def __init__( self.running = False self.stopped = 0 self.calibration = None - self.defaultScanner = None - self.matchCallbacks = matchCallbacks - self.notFoundCallbacks = notFoundCallbacks - self.errorCallbacks = errorCallbacks + self.default_scanner = None + self.match_callbacks = match_callbacks + self.not_found_callbacks = not_found_callbacks + self.error_callbacks = error_callbacks self.matches = [] self.scanners = { "prefixes": {}, @@ -74,7 +74,7 @@ def wait(self, timeout=None): return True - def setupScanners(self): + def setup_scanners(self): if len(self.scanners): self.scanners = { "prefixes": {}, @@ -82,7 +82,7 @@ def setupScanners(self): } # Default scanners (wildcard testers) - self.defaultScanner = Scanner(self.requester) + self.default_scanner = Scanner(self.requester) self.prefixes.append(".") self.suffixes.append("/") @@ -102,25 +102,25 @@ def setupScanners(self): self.requester, suffix="." + extension ) - if self.excludeContent: - if self.excludeContent.startswith("/"): - self.excludeContent = self.excludeContent[1:] - self.calibration = Scanner(self.requester, calibration=self.excludeContent) + if self.exclude_content: + if self.exclude_content.startswith("/"): + self.exclude_content = self.exclude_content[1:] + self.calibration = Scanner(self.requester, calibration=self.exclude_content) - def setupThreads(self): + def setup_threads(self): if len(self.threads): self.threads = [] - for thread in range(self.threadsCount): - newThread = threading.Thread(target=self.thread_proc) - newThread.daemon = True - self.threads.append(newThread) + for thread in range(self.threads_count): + new_thread = threading.Thread(target=self.thread_proc) + new_thread.daemon = True + self.threads.append(new_thread) - def getScannerFor(self, path): + def get_scanner_for(self, path): # Clean the path, so can check for extensions/suffixes path = path.split("?")[0].split("#")[0] - if self.excludeContent: + if self.exclude_content: yield self.calibration for prefix in self.prefixes: @@ -135,22 +135,22 @@ def getScannerFor(self, path): if path.endswith("." + extension): yield self.scanners["suffixes"]["." + extension] - yield self.defaultScanner + yield self.default_scanner def start(self): # Setting up testers - self.setupScanners() + self.setup_scanners() # Setting up threads - self.setupThreads() + self.setup_threads() self.index = 0 self.rate = 0 self.dictionary.reset() - self.runningThreadsCount = len(self.threads) + self.running_threads_count = len(self.threads) self.running = True self.paused = False - self.playEvent = threading.Event() - self.pausedSemaphore = threading.Semaphore(0) - self.playEvent.clear() + self.play_event = threading.Event() + self.paused_semaphore = threading.Semaphore(0) + self.play_event.clear() self.exit = False for thread in self.threads: @@ -159,18 +159,18 @@ def start(self): self.play() def play(self): - self.playEvent.set() + self.play_event.set() def pause(self): self.paused = True - self.playEvent.clear() + self.play_event.clear() for thread in self.threads: if thread.is_alive(): - self.pausedSemaphore.acquire() + self.paused_semaphore.acquire() def resume(self): self.paused = False - self.pausedSemaphore.release() + self.paused_semaphore.release() self.play() def stop(self): @@ -181,34 +181,34 @@ def scan(self, path): response = self.requester.request(path) result = response.status - for tester in list(set(self.getScannerFor(path))): + for tester in list(set(self.get_scanner_for(path))): if not tester.scan(path, response): result = None break return result, response - def isPaused(self): + def is_paused(self): return self.paused - def isRunning(self): + def is_running(self): return self.running - def finishThreads(self): + def finish_threads(self): self.running = False - self.finishedEvent.set() + self.finished_event.set() - def isFinished(self): - return self.runningThreadsCount == 0 + def is_finished(self): + return self.running_threads_count == 0 - def stopThread(self): - self.runningThreadsCount -= 1 + def stop_thread(self): + self.running_threads_count -= 1 - def reduceRate(self): + def reduce_rate(self): self.rate -= 1 def thread_proc(self): - self.playEvent.wait() + self.play_event.wait() try: path = next(self.dictionary) @@ -216,33 +216,33 @@ def thread_proc(self): while path: try: # Pause if the request rate exceeded the maximum - while self.maxrate and self.rate > self.maxrate: + while self.maxrate and self.rate >= self.maxrate: pass self.rate += 1 - threading.Timer(1, self.reduceRate).start() + threading.Timer(1, self.reduce_rate).start() status, response = self.scan(path) result = Path(path=path, status=status, response=response) if status: self.matches.append(result) - for callback in self.matchCallbacks: + for callback in self.match_callbacks: callback(result) else: - for callback in self.notFoundCallbacks: + for callback in self.not_found_callbacks: callback(result) except RequestException as e: - for callback in self.errorCallbacks: + for callback in self.error_callbacks: callback(path, e.args[0]["message"]) continue finally: - if not self.playEvent.isSet(): + if not self.play_event.is_set(): self.stopped += 1 - self.pausedSemaphore.release() - self.playEvent.wait() + self.paused_semaphore.release() + self.play_event.wait() path = next(self.dictionary) # Raises StopIteration when finishes @@ -255,4 +255,4 @@ def thread_proc(self): pass finally: - self.stopThread() + self.stop_thread() diff --git a/lib/core/raw.py b/lib/core/raw.py index 69c5a5c43..804074054 100644 --- a/lib/core/raw.py +++ b/lib/core/raw.py @@ -63,10 +63,10 @@ def parse(self): print("Can't find the Host header in the raw request") exit(1) - self.basePath = self.startline.split(" ")[1] + self.base_path = self.startline.split(" ")[1] def url(self): - return "{0}://{1}{2}".format(self.scheme, self.host, self.basePath) + return "{0}://{1}{2}".format(self.scheme, self.host, self.base_path) def method(self): return self.startline.split(" ")[0] diff --git a/lib/core/report_manager.py b/lib/core/report_manager.py index 93c22b746..1a3523465 100755 --- a/lib/core/report_manager.py +++ b/lib/core/report_manager.py @@ -26,49 +26,49 @@ def __init__(self, path, status, response): self.status = status self.response = response - def getContentLength(self): + def get_content_length(self): try: - contentLength = int(self.response.headers["content-length"]) + content_length = int(self.response.headers["content-length"]) except (KeyError, ValueError): - contentLength = len(self.response.body) - return contentLength + content_length = len(self.response.body) + return content_length class Report(object): - def __init__(self, host, port, protocol, basePath): + def __init__(self, host, port, protocol, base_path): self.host = host self.port = port self.protocol = protocol - self.basePath = basePath + self.base_path = base_path self.results = [] self.completed = False - if self.basePath.endswith("/"): - self.basePath = self.basePath[:-1] + if self.base_path.endswith("/"): + self.base_path = self.base_path[:-1] - if self.basePath.startswith("/"): - self.basePath = self.basePath[1:] + if self.base_path.startswith("/"): + self.base_path = self.base_path[1:] - def addResult(self, path, status, response): + def add_result(self, path, status, response): result = Result(path, status, response) self.results.append(result) class ReportManager(object): - def __init__(self, saveFormat, outputFile): - self.format = saveFormat + def __init__(self, save_format, output_file): + self.format = save_format self.reports = [] - self.reportObj = None - self.output = outputFile + self.report_obj = None + self.output = output_file self.lock = threading.Lock() - def updateReport(self, report): + def update_report(self, report): if report not in self.reports: self.reports.append(report) - self.writeReport() + self.write_report() - def writeReport(self): - if self.reportObj is None: + def write_report(self): + if self.report_obj is None: if self.format == "simple": report = SimpleReport(self.output, self.reports) elif self.format == "json": @@ -84,10 +84,10 @@ def writeReport(self): else: report = PlainTextReport(self.output, self.reports) - self.reportObj = report + self.report_obj = report with self.lock: - self.reportObj.save() + self.report_obj.save() def save(self): with self.lock: diff --git a/lib/core/scanner.py b/lib/core/scanner.py index 69243d118..16302b0c5 100755 --- a/lib/core/scanner.py +++ b/lib/core/scanner.py @@ -34,109 +34,109 @@ def __init__(self, requester, calibration=None, suffix=None, prefix=None): self.prefix = prefix if prefix else "" self.requester = requester self.tester = None - self.redirectRegExp = None - self.invalidStatus = None - self.dynamicParser = None + self.redirect_reg_exp = None + self.invalid_status = None + self.dynamic_parser = None self.sign = None self.ratio = 0.98 self.setup() def setup(self): - firstPath = self.prefix + ( - self.calibration if self.calibration else RandomUtils.randString() + first_path = self.prefix + ( + self.calibration if self.calibration else RandomUtils.rand_string() ) + self.suffix - firstResponse = self.requester.request(firstPath) - self.invalidStatus = firstResponse.status + first_response = self.requester.request(first_path) + self.invalid_status = first_response.status - if self.invalidStatus == 404: + if self.invalid_status == 404: # Using the response status code is enough :-} return - secondPath = self.prefix + ( - self.calibration if self.calibration else RandomUtils.randString(omit=firstPath) + second_path = self.prefix + ( + self.calibration if self.calibration else RandomUtils.rand_string(omit=first_path) ) + self.suffix - secondResponse = self.requester.request(secondPath) + second_response = self.requester.request(second_path) # Look for redirects - if firstResponse.redirect and secondResponse.redirect: - self.redirectRegExp = self.generateRedirectRegExp( - firstResponse.redirect, firstPath, - secondResponse.redirect, secondPath, + if first_response.redirect and second_response.redirect: + self.redirect_reg_exp = self.generate_redirect_reg_exp( + first_response.redirect, first_path, + second_response.redirect, second_path, ) # Analyze response bodies - if firstResponse.body is not None and secondResponse.body is not None: - self.dynamicParser = DynamicContentParser( - self.requester, firstPath, firstResponse.body, secondResponse.body + if first_response.body is not None and second_response.body is not None: + self.dynamic_parser = DynamicContentParser( + self.requester, first_path, first_response.body, second_response.body ) else: - self.dynamicParser = None + self.dynamic_parser = None - baseRatio = float( - "{0:.2f}".format(self.dynamicParser.comparisonRatio) + base_ratio = float( + "{0:.2f}".format(self.dynamic_parser.comparisonRatio) ) # Rounding to 2 decimals # If response length is small, adjust ratio - if len(firstResponse) < 2000: - baseRatio -= 0.1 + if len(first_response) < 2000: + base_ratio -= 0.1 - if baseRatio < self.ratio: - self.ratio = baseRatio + if base_ratio < self.ratio: + self.ratio = base_ratio - def generateRedirectRegExp(self, firstLoc, firstPath, secondLoc, secondPath): + def generate_redirect_reg_exp(self, first_loc, first_path, second_loc, second_path): # Use a unique sign to locate where the path gets reflected in the redirect - self.sign = RandomUtils.randString(n=20) - firstLoc = firstLoc.replace(firstPath, self.sign) - secondLoc = secondLoc.replace(secondPath, self.sign) - regExpStart = "^" - regExpEnd = "$" + self.sign = RandomUtils.rand_string(n=20) + first_loc = first_loc.replace(first_path, self.sign) + second_loc = second_loc.replace(second_path, self.sign) + reg_exp_start = "^" + reg_exp_end = "$" - for f, s in zip(firstLoc, secondLoc): + for f, s in zip(first_loc, second_loc): if f == s: - regExpStart += re.escape(f) + reg_exp_start += re.escape(f) else: - regExpStart += ".*" + reg_exp_start += ".*" break - if regExpStart.endswith(".*"): - for f, s in zip(firstLoc[::-1], secondLoc[::-1]): + if reg_exp_start.endswith(".*"): + for f, s in zip(first_loc[::-1], second_loc[::-1]): if f == s: - regExpEnd = re.escape(f) + regExpEnd + reg_exp_end = re.escape(f) + reg_exp_end else: break - return unquote(regExpStart + regExpEnd) + return unquote(reg_exp_start + reg_exp_end) def scan(self, path, response): - if self.invalidStatus == response.status == 404: + if self.invalid_status == response.status == 404: return False - if self.invalidStatus != response.status: + if self.invalid_status != response.status: return True - if self.redirectRegExp and response.redirect: + if self.redirect_reg_exp and response.redirect: path = re.escape(unquote(path)) # A lot of times, '#' or '?' will be removed in the redirect, cause false positives for char in ["\\#", "\\?"]: if char in path: path = path.replace(char, "(|" + char) + ")" - redirectRegExp = self.redirectRegExp.replace(self.sign, path) + redirect_reg_exp = self.redirect_reg_exp.replace(self.sign, path) # Redirect sometimes encodes/decodes characters in URL, which may confuse the # rule check and make noise in the output, so we need to unquote() everything - redirectToInvalid = re.match(redirectRegExp, unquote(response.redirect)) + redirect_to_invalid = re.match(redirect_reg_exp, unquote(response.redirect)) # If redirection doesn't match the rule, mark as found - if redirectToInvalid is None: + if redirect_to_invalid is None: return True - ratio = self.dynamicParser.compareTo(response.body) + ratio = self.dynamic_parser.compareTo(response.body) if ratio >= self.ratio: return False - elif "redirectToInvalid" in locals() and ratio >= (self.ratio - 0.15): + elif "redirect_to_invalid" in locals() and ratio >= (self.ratio - 0.15): return False return True diff --git a/lib/output/cli_output.py b/lib/output/cli_output.py index 2461ebe29..87d99a5e6 100755 --- a/lib/output/cli_output.py +++ b/lib/output/cli_output.py @@ -37,25 +37,25 @@ class NoColor: class CLIOutput(object): def __init__(self, color): init() - self.lastLength = 0 - self.lastOutput = "" - self.lastInLine = False + self.last_length = 0 + self.last_output = "" + self.last_in_line = False self.mutex = threading.Lock() self.blacklists = {} - self.basePath = None + self.base_path = None self.errors = 0 if not color: - self.disableColors() + self.disable_colors() @staticmethod def percentage(x, y): return float(x) / float(y) * 100 - def inLine(self, string): + def in_line(self, string): self.erase() sys.stdout.write(string) sys.stdout.flush() - self.lastInLine = True + self.last_in_line = True def erase(self): if sys.platform in ["win32", "msys"]: @@ -72,8 +72,8 @@ def erase(self): sys.stdout.write("\033[1K") sys.stdout.write("\033[0G") - def newLine(self, string=''): - if self.lastInLine: + def new_line(self, string=''): + if self.last_in_line: self.erase() if sys.platform in ["win32", "msys"]: @@ -86,11 +86,11 @@ def newLine(self, string=''): sys.stdout.write(string + "\n") sys.stdout.flush() - self.lastInLine = False + self.last_in_line = False sys.stdout.flush() - def statusReport(self, path, response, full_url, addedToQueue): - contentLength = None + def status_report(self, path, response, full_url, added_to_queue): + content_length = None status = response.status # Format message @@ -101,19 +101,19 @@ def statusReport(self, path, response, full_url, addedToQueue): size = len(response.body) finally: - contentLength = FileUtils.size_human(size) + content_length = FileUtils.size_human(size) - showPath = "/" + self.basePath + path + show_path = "/" + self.base_path + path if full_url: parsed = urllib.parse.urlparse(self.target) - showPath = "{0}://{1}{2}".format(parsed.scheme, parsed.netloc, showPath) + show_path = "{0}://{1}{2}".format(parsed.scheme, parsed.netloc, show_path) message = "[{0}] {1} - {2} - {3}".format( time.strftime("%H:%M:%S"), status, - contentLength.rjust(6, " "), - showPath, + content_length.rjust(6, " "), + show_path, ) if status in [200, 201, 204]: @@ -136,19 +136,19 @@ def statusReport(self, path, response, full_url, addedToQueue): else: message = Fore.MAGENTA + message + Style.RESET_ALL - if addedToQueue: + if added_to_queue: message += " (Added to queue)" with self.mutex: - self.newLine(message) + self.new_line(message) - def lastPath(self, path, index, length, currentJob, allJobs, rate): + def last_path(self, path, index, length, current_job, all_jobs, rate): l, h = get_terminal_size() message = "{0:.2f}% | {1} req/s - ".format(self.percentage(index, length), rate) - if allJobs > 1: - message += "Job: {0}/{1} - ".format(currentJob, allJobs) + if all_jobs > 1: + message += "Job: {0}/{1} - ".format(current_job, all_jobs) if self.errors: message += "Errors: {0} - ".format(self.errors) @@ -159,9 +159,9 @@ def lastPath(self, path, index, length, currentJob, allJobs, rate): message = message[:l - 1] with self.mutex: - self.inLine(message) + self.in_line(message) - def addConnectionError(self): + def add_connection_error(self): self.errors += 1 def error(self, reason): @@ -170,18 +170,18 @@ def error(self, reason): message = "\n" if reason.startswith("\n") else "" message += Style.BRIGHT + Fore.WHITE + Back.RED + stripped + Style.RESET_ALL - self.newLine(message) + self.new_line(message) def warning(self, message): with self.mutex: message = Style.BRIGHT + Fore.YELLOW + message + Style.RESET_ALL - self.newLine(message) + self.new_line(message) def header(self, message): message = Style.BRIGHT + Fore.MAGENTA + message + Style.RESET_ALL - self.newLine(message) + self.new_line(message) - def addConfig(self, key, value, msg): + def add_config(self, key, value, msg): l, _ = get_terminal_size() # Escape colours in text to get the real length escape = re.compile(r"\x1B(?:[@-Z\\-_]|\[[0-?]*[ -/]*[@-~])|\n") @@ -207,46 +207,46 @@ def config( ): config = Style.BRIGHT - config += self.addConfig("Extensions", extensions, config) + config += self.add_config("Extensions", extensions, config) if prefixes: - config += self.addConfig("Prefixes", prefixes, config) + config += self.add_config("Prefixes", prefixes, config) if suffixes: - config += self.addConfig("Suffixes", suffixes, config) + config += self.add_config("Suffixes", suffixes, config) - config += self.addConfig("HTTP method", method.upper(), config) - config += self.addConfig("Threads", threads, config) - config += self.addConfig("Wordlist size", wordlist_size, config) + config += self.add_config("HTTP method", method.upper(), config) + config += self.add_config("Threads", threads, config) + config += self.add_config("Wordlist size", wordlist_size, config) config += Style.RESET_ALL config += "\n" - self.newLine(config) + self.new_line(config) - def setTarget(self, target, scheme): + def set_target(self, target, scheme): if not target.startswith(("http://", "https://")) and "://" not in target: target = "{0}://{1}".format(scheme, target) self.target = target config = Style.BRIGHT - config += self.addConfig("Target", target, config) + "\n" + config += self.add_config("Target", target, config) + "\n" config += Style.RESET_ALL - self.newLine(config) + self.new_line(config) - def outputFile(self, target): - self.newLine("Output File: {0}\n".format(target)) + def output_file(self, target): + self.new_line("Output File: {0}\n".format(target)) - def errorLogFile(self, target): - self.newLine("Error Log: {0}\n".format(target)) + def error_log_file(self, target): + self.new_line("Error Log: {0}\n".format(target)) def debug(self, info): with self.mutex: line = "[{0}] - {1}".format(time.strftime("%H:%M:%S"), info) - self.newLine(line) + self.new_line(line) - def disableColors(self): + def disable_colors(self): global Fore global Style global Back diff --git a/lib/output/print_output.py b/lib/output/print_output.py index 37da51d73..c6df92e77 100644 --- a/lib/output/print_output.py +++ b/lib/output/print_output.py @@ -36,16 +36,16 @@ def __init__(self, color): init() self.mutex = threading.Lock() self.blacklists = {} - self.mutexCheckedPaths = threading.Lock() - self.basePath = None + self.mutex_checked_paths = threading.Lock() + self.base_path = None self.errors = 0 if not color: - self.disableColors() + self.disable_colors() def header(self, text): pass - def inLine(self, string): + def in_line(self, string): self.erase() sys.stdout.write(string) sys.stdout.flush() @@ -65,12 +65,12 @@ def erase(self): sys.stdout.write("\033[1K") sys.stdout.write("\033[0G") - def newLine(self, string=''): + def new_line(self, string=''): sys.stdout.write(string + "\n") sys.stdout.flush() - def statusReport(self, path, response, full_url, addedToQueue): - contentLength = None + def status_report(self, path, response, full_url, added_to_queue): + content_length = None status = response.status # Format message @@ -81,15 +81,15 @@ def statusReport(self, path, response, full_url, addedToQueue): size = len(response.body) finally: - contentLength = FileUtils.size_human(size) + content_length = FileUtils.size_human(size) - showPath = "/" + self.basePath + path + show_path = "/" + self.base_path + path parsed = urllib.parse.urlparse(self.target) - showPath = "{0}://{1}{2}".format(parsed.scheme, parsed.netloc, showPath) + show_path = "{0}://{1}{2}".format(parsed.scheme, parsed.netloc, show_path) message = "{0} - {1} - {2}".format( - status, contentLength.rjust(6, " "), showPath + status, content_length.rjust(6, " "), show_path ) if status in [200, 201, 204]: @@ -112,16 +112,16 @@ def statusReport(self, path, response, full_url, addedToQueue): else: message = Fore.MAGENTA + message + Style.RESET_ALL - if addedToQueue: + if added_to_queue: message += " (Added to queue)" with self.mutex: - self.newLine(message) + self.new_line(message) - def lastPath(self, path, index, length, currentJob, allJobs, rate): + def last_path(self, path, index, length, current_job, all_jobs, rate): pass - def addConnectionError(self): + def add_connection_error(self): self.errors += 1 def error(self, reason): @@ -141,23 +141,23 @@ def config( ): pass - def setTarget(self, target, scheme): + def set_target(self, target, scheme): if not target.startswith("http://") and not target.startswith("https://") and "://" not in target: target = "{0}://{1}".format(scheme, target) self.target = target - def outputFile(self, target): + def output_file(self, target): pass - def errorLogFile(self, target): + def error_log_file(self, target): pass def debug(self, info): with self.mutex: - self.newLine(info) + self.new_line(info) - def disableColors(self): + def disable_colors(self): global Fore global Style global Back diff --git a/lib/reports/base_report.py b/lib/reports/base_report.py index aad1da109..38d62b874 100755 --- a/lib/reports/base_report.py +++ b/lib/reports/base_report.py @@ -25,11 +25,11 @@ def close(self): class FileBaseReport(BaseReport): - def __init__(self, outputFileName, entries=[]): - self.output = outputFileName + def __init__(self, output_file_name, entries=[]): + self.output = output_file_name self.entries = entries - self.headerWritten = False - self.writtenEntries = [] + self.header_written = False + self.written_entries = [] self.open() diff --git a/lib/reports/csv_report.py b/lib/reports/csv_report.py index c32797339..706498d44 100644 --- a/lib/reports/csv_report.py +++ b/lib/reports/csv_report.py @@ -20,37 +20,37 @@ class CSVReport(FileBaseReport): - def generateHeader(self): - if self.headerWritten is False: - self.headerWritten = True + def generate_header(self): + if self.header_written is False: + self.header_written = True return "URL,Status,Size,Redirection\n" else: return "" def generate(self): - result = self.generateHeader() - insecureChars = ("+", "-", "=", "@") + result = self.generate_header() + insecure_chars = ("+", "-", "=", "@") for entry in self.entries: for e in entry.results: - if (entry.protocol, entry.host, entry.port, entry.basePath, e.path) not in self.writtenEntries: + if (entry.protocol, entry.host, entry.port, entry.base_path, e.path) not in self.written_entries: path = e.path status = e.status - contentLength = e.getContentLength() + content_length = e.get_content_length() redirect = e.response.redirect - result += "{0}://{1}:{2}/{3}{4},".format(entry.protocol, entry.host, entry.port, entry.basePath, path) + result += "{0}://{1}:{2}/{3}{4},".format(entry.protocol, entry.host, entry.port, entry.base_path, path) result += "{0},".format(status) - result += "{0},".format(contentLength) + result += "{0},".format(content_length) if redirect: # Preventing CSV injection. More info: https://www.exploit-db.com/exploits/49370 - if redirect.startswith(insecureChars): + if redirect.startswith(insecure_chars): redirect = "'" + redirect redirect = redirect.replace("\"", "\"\"") result += "\"{0}\"".format(redirect) result += "\n" - self.writtenEntries.append((entry.protocol, entry.host, entry.port, entry.basePath, e.path)) + self.written_entries.append((entry.protocol, entry.host, entry.port, entry.base_path, e.path)) return result diff --git a/lib/reports/html_report.py b/lib/reports/html_report.py index 5e3019046..1bfb85601 100644 --- a/lib/reports/html_report.py +++ b/lib/reports/html_report.py @@ -36,24 +36,24 @@ def generate(self): results = [] for entry in self.entries: for e in entry.results: - headerName = "{0}://{1}:{2}/{3}".format( - entry.protocol, entry.host, entry.port, entry.basePath + header_name = "{0}://{1}:{2}/{3}".format( + entry.protocol, entry.host, entry.port, entry.base_path ) - statusColorClass = '' + status_color_class = '' if e.status >= 200 and e.status <= 299: - statusColorClass = 'text-success' + status_color_class = 'text-success' elif e.status >= 300 and e.status <= 399: - statusColorClass = 'text-warning' + status_color_class = 'text-warning' elif e.status >= 400 and e.status <= 599: - statusColorClass = 'text-danger' + status_color_class = 'text-danger' results.append({ - "url": headerName + e.path, + "url": header_name + e.path, "path": e.path, "status": e.status, - "statusColorClass": statusColorClass, - "contentLength": FileUtils.size_human(e.getContentLength()), + "status_color_class": status_color_class, + "contentLength": FileUtils.size_human(e.get_content_length()), "contentType": e.response.headers.get("content-type"), "redirect": e.response.redirect }) diff --git a/lib/reports/json_report.py b/lib/reports/json_report.py index 719b14050..45a8ff703 100755 --- a/lib/reports/json_report.py +++ b/lib/reports/json_report.py @@ -29,19 +29,19 @@ def generate(self): for entry in self.entries: result = {} - headerName = "{0}://{1}:{2}/{3}".format( - entry.protocol, entry.host, entry.port, entry.basePath + header_name = "{0}://{1}:{2}/{3}".format( + entry.protocol, entry.host, entry.port, entry.base_path ) - result[headerName] = [] + result[header_name] = [] for e in entry.results: - pathEntry = { + path_entry = { "status": e.status, "path": "/" + e.path, - "content-length": e.getContentLength(), + "content-length": e.get_content_length(), "redirect": e.response.redirect, } - result[headerName].append(pathEntry) + result[header_name].append(path_entry) report["results"].append(result) diff --git a/lib/reports/markdown_report.py b/lib/reports/markdown_report.py index 8e0e2a85e..294746414 100644 --- a/lib/reports/markdown_report.py +++ b/lib/reports/markdown_report.py @@ -22,19 +22,19 @@ class MarkdownReport(FileBaseReport): - def __init__(self, outputFileName, entries=[]): - self.output = outputFileName + def __init__(self, output_file_name, entries=[]): + self.output = output_file_name self.entries = entries - self.headerWritten = False - self.writtenEntries = [] - self.printedTargetHeaderList = [] - self.completedHosts = [] + self.header_written = False + self.written_entries = [] + self.printed_target_header_list = [] + self.completed_hosts = [] self.open() - def generateHeader(self): - if self.headerWritten is False: - self.headerWritten = True + def generate_header(self): + if self.header_written is False: + self.header_written = True result = "### Info\n" result += "Args: {0}\n".format(' '.join(sys.argv)) result += "Time: {0}\n".format(time.ctime()) @@ -44,29 +44,29 @@ def generateHeader(self): return "" def generate(self): - result = self.generateHeader() + result = self.generate_header() for entry in self.entries: - headerName = "{0}://{1}:{2}/{3}".format( - entry.protocol, entry.host, entry.port, entry.basePath + header_name = "{0}://{1}:{2}/{3}".format( + entry.protocol, entry.host, entry.port, entry.base_path ) - if (entry.protocol, entry.host, entry.port, entry.basePath) not in self.printedTargetHeaderList: - result += "### Target: {0}\n\n".format(headerName) + if (entry.protocol, entry.host, entry.port, entry.base_path) not in self.printed_target_header_list: + result += "### Target: {0}\n\n".format(header_name) result += "Path | Status | Size | Redirection\n" result += "-----|--------|------|------------\n" - self.printedTargetHeaderList.append((entry.protocol, entry.host, entry.port, entry.basePath)) + self.printed_target_header_list.append((entry.protocol, entry.host, entry.port, entry.base_path)) for e in entry.results: - if (entry.protocol, entry.host, entry.port, entry.basePath, e.path) not in self.writtenEntries: - result += "[/{0}]({1}) | ".format(e.path, headerName + e.path) + if (entry.protocol, entry.host, entry.port, entry.base_path, e.path) not in self.written_entries: + result += "[/{0}]({1}) | ".format(e.path, header_name + e.path) result += "{0} | ".format(e.status) - result += "{0} | ".format(e.getContentLength()) + result += "{0} | ".format(e.get_content_length()) result += "{0}\n".format(e.response.redirect) - self.writtenEntries.append((entry.protocol, entry.host, entry.port, entry.basePath, e.path)) + self.written_entries.append((entry.protocol, entry.host, entry.port, entry.base_path, e.path)) - if entry.completed and entry not in self.completedHosts: + if entry.completed and entry not in self.completed_hosts: result += "\n" - self.completedHosts.append(entry) + self.completed_hosts.append(entry) return result diff --git a/lib/reports/plain_text_report.py b/lib/reports/plain_text_report.py index e202ea3fd..49f0cc34d 100755 --- a/lib/reports/plain_text_report.py +++ b/lib/reports/plain_text_report.py @@ -24,32 +24,32 @@ class PlainTextReport(FileBaseReport): - def generateHeader(self): - if self.headerWritten is False: - self.headerWritten = True + def generate_header(self): + if self.header_written is False: + self.header_written = True return "# Dirsearch started {0} as: {1}\n\n".format(time.ctime(), ' '.join(sys.argv)) else: return "" def generate(self): - result = self.generateHeader() + result = self.generate_header() for entry in self.entries: for e in entry.results: - if (entry.protocol, entry.host, entry.port, entry.basePath, e.path) not in self.writtenEntries: + if (entry.protocol, entry.host, entry.port, entry.base_path, e.path) not in self.written_entries: result += "{0} ".format(e.status) - result += "{0} ".format(FileUtils.size_human(e.getContentLength()).rjust(6, " ")) + result += "{0} ".format(FileUtils.size_human(e.get_content_length()).rjust(6, " ")) result += "{0}://{1}:{2}/".format(entry.protocol, entry.host, entry.port) result += ( "{0}".format(e.path) - if entry.basePath == "" - else "{0}/{1}".format(entry.basePath, e.path) + if entry.base_path == "" + else "{0}/{1}".format(entry.base_path, e.path) ) location = e.response.redirect if location: result += " -> REDIRECTS TO: {0}".format(location) result += "\n" - self.writtenEntries.append((entry.protocol, entry.host, entry.port, entry.basePath, e.path)) + self.written_entries.append((entry.protocol, entry.host, entry.port, entry.base_path, e.path)) return result diff --git a/lib/reports/simple_report.py b/lib/reports/simple_report.py index bdfbfc35d..4bef49ed9 100755 --- a/lib/reports/simple_report.py +++ b/lib/reports/simple_report.py @@ -25,13 +25,13 @@ def generate(self): for entry in self.entries: for e in entry.results: - if (entry.protocol, entry.host, entry.port, entry.basePath, e.path) not in self.writtenEntries: + if (entry.protocol, entry.host, entry.port, entry.base_path, e.path) not in self.written_entries: result += "{0}://{1}:{2}/".format(entry.protocol, entry.host, entry.port) result += ( "{0}\n".format(e.path) - if entry.basePath == "" - else "{0}/{1}\n".format(entry.basePath, e.path) + if entry.base_path == "" + else "{0}/{1}\n".format(entry.base_path, e.path) ) - self.writtenEntries.append((entry.protocol, entry.host, entry.port, entry.basePath, e.path)) + self.written_entries.append((entry.protocol, entry.host, entry.port, entry.base_path, e.path)) return result diff --git a/lib/reports/xml_report.py b/lib/reports/xml_report.py index 7dae8ea19..c39f87c1f 100644 --- a/lib/reports/xml_report.py +++ b/lib/reports/xml_report.py @@ -17,32 +17,31 @@ # Author: Mauro Soria from lib.reports import * +from xml.dom import minidom + +import xml.etree.cElementTree as ET import time import sys class XMLReport(FileBaseReport): def generate(self): - result = "\n" - result += "\n".format(" ".join(sys.argv), time.ctime()) + result = ET.Element("dirsearchscan", args=" ".join(sys.argv), time=time.ctime()) for entry in self.entries: - headerName = "{0}://{1}:{2}/{3}".format( - entry.protocol, entry.host, entry.port, entry.basePath + header_name = "{0}://{1}:{2}/{3}".format( + entry.protocol, entry.host, entry.port, entry.base_path ) - result += " \n".format(headerName) + target = ET.SubElement(result, "target", url=header_name) for e in entry.results: - result += " \n".format(e.path) - result += " {0}\n".format(e.status) - result += " {0}\n".format(e.getContentLength()) - result += " {0}\n".format("" if e.response.redirect is None else e.response.redirect) - result += " \n" - - result += " \n" - result += "\n" + path = ET.SubElement(target, "info", path="/" + e.path) + ET.SubElement(path, "status").text = str(e.status) + ET.SubElement(path, "contentlength").text = str(e.get_content_length()) + ET.SubElement(path, "redirect").text = e.response.redirect if e.response.redirect else "" - return result + result = ET.tostring(result, encoding="utf-8", method="xml") + return minidom.parseString(result).toprettyxml() def save(self): self.file.seek(0) diff --git a/lib/utils/random_utils.py b/lib/utils/random_utils.py index c21f0512d..d390d0c10 100644 --- a/lib/utils/random_utils.py +++ b/lib/utils/random_utils.py @@ -22,7 +22,7 @@ class RandomUtils(object): @classmethod - def randString(cls, n=12, omit=None): + def rand_string(cls, n=12, omit=None): seq = string.ascii_lowercase + string.ascii_uppercase + string.digits if omit: seq = list(set(seq) - set(omit)) diff --git a/lib/utils/terminal_size.py b/lib/utils/terminal_size.py index 294e377c1..5430b928d 100644 --- a/lib/utils/terminal_size.py +++ b/lib/utils/terminal_size.py @@ -87,7 +87,7 @@ def _get_terminal_size_tput(): def _get_terminal_size_linux(): - def ioctl_GWINSZ(fd): + def ioctl_gwinsz(fd): try: import fcntl import termios @@ -97,11 +97,11 @@ def ioctl_GWINSZ(fd): except Exception: pass - cr = ioctl_GWINSZ(0) or ioctl_GWINSZ(1) or ioctl_GWINSZ(2) + cr = ioctl_gwinsz(0) or ioctl_gwinsz(1) or ioctl_gwinsz(2) if not cr: try: fd = os.open(os.ctermid(), os.O_RDONLY) - cr = ioctl_GWINSZ(fd) + cr = ioctl_gwinsz(fd) os.close(fd) except Exception: pass