From 930cd439ca796ebbd12729fbda3549d8bfdfcfeb Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 19:19:39 +0530 Subject: [PATCH 01/30] Update --- README.md | 143 ++++++++++++++++++++++++++++++++++---------- core/repl_prompt.py | 85 +++++++++++++++++++++----- reconspider.py | 34 ++++++----- setup.py | 2 +- 4 files changed, 200 insertions(+), 64 deletions(-) diff --git a/README.md b/README.md index 5856e97..625641f 100644 --- a/README.md +++ b/README.md @@ -61,7 +61,7 @@ A Web crawler, sometimes called a spider or spiderbot and often shortened to cra # Overview of the tool: * Performs OSINT scan on a IP Address, Emails, Websites, Organizations and find out information from different sources. -* Correlates and collaborate the results, show them in a consolidated manner. +* Correlates and collaborate the results, show them in a consolidated manner. * Use specific script / launch automated OSINT for consolidated data. * Currently available in only Command Line Interface (CLI). @@ -87,7 +87,7 @@ ReconSpider and its documents are covered under GPL-3.0 (General Public License ``` __________ _________ __ ___ -\______ \ ____ ____ ____ ____ / _____/_____ |__| __| _/___________ +\______ \ ____ ____ ____ ____ / _____/_____ |__| __| _/___________ | _// __ \_/ ___\/ _ \ / \ \_____ \\____ \| |/ __ |/ __ \_ __ \ | | \ ___/\ \__( <_> ) | \ / \ |_> > / /_/ \ ___/| | \/ |____|_ /\___ >\___ >____/|___| / /_______ / __/|__\____ |\___ >__| @@ -96,18 +96,24 @@ __________ _________ __ ___ developer: https://bhavkaran.com -ENTER 0 - 7 TO SELECT OPTIONS +ENTER 0 - 11 TO SELECT OPTIONS -1. IP Enumerate information from IP Address -2. URL Gather information about given Website -3. WHOIS Gather domain registration information -4. DNS MAP Map DNS records associated with target -5. PORT SCAN Discover hosts and services on a network -6. NS LOOKUP Obtain domain name or IP address mapping -7. HONEYPOT Check if it's honeypot or a real system -8. UPDATE Update ReconSpider to its latest version +1. IP Enumerate information from IP Address +2. DOMAIN Gather information about given DOMAIN +3. PHONENUMBER Gather information about Phonenumber +4. DNS MAP Map DNS records associated with target +5. METADATA Extract all metadata of the given file +6. REVERSE IMAGE SEARCH Obtain domain name or IP address mapping +7. HONEYPOT Check if it's honeypot or a real system +8. MAC ADDRESS LOOKUP Obtain information about give Macaddress +9. IPHEATMAP Draw out heatmap of locations of IP +10. TORRENT Gather torrent download history of IP +11. USERNAME Extract Account info. from social media +12. IP2PROXY Check whether IP uses any VPN / PROXY +13. MAIL BREACH Check whethers given mail is breached +99. UPDATE Update ReconSpider to its latest version -0. EXIT Exit from ReconSpider to your terminal +0. EXIT Exit from ReconSpider to your terminal ``` @@ -164,12 +170,24 @@ After unzipping, go to that directory using Command Prompt and type the followin python setup.py install ``` +Step 3 - Database +**Geolite2 City Database** +``` +https://github.com/texnikru/GeoLite2-Database/blob/master/GeoLite2-City.mmdb.gz +``` + +**IP2Proxy Database** +``` +https://lite.ip2location.com/database/px8-ip-proxytype-country-region-city-isp-domain-usagetype-asn-lastseen +``` +Download both database and move it to reconspier/plugins/. -# Usage +# Usage -ReconSpider is very handy tool and easy to use. All you have to do is just have to pass values to parameter. + +ReconSpider is very handy tool and easy to use. All you have to do is just have to pass values to parameter. In order to start ReconSpider just type: ``` python reconspider.py @@ -183,20 +201,21 @@ ReconSpider >> 1 IP >> 8.8.8.8 ``` -**2. URL** +**2. DOMAIN** -This option gathers all the information of given URL Address from public sources and give you in depth-information of IP address, country, city, organization, ISP, open ports and so more. +This option gathers all the information of given URL Address and check for vulneribility. ``` -ReconSpider >> 2 -URL >> vulnweb.com +Reconspider >> 2 +HOST (URL / IP) >> vulnweb.com +PORT >> 443 ``` -**3. WHOIS** +**3. PHONENUMBER** -This option allows you to search for domain name availability and WHOIS information including name, organisation, address, city, country, zipcode, registrar, name servers etc. +This option allows you to gather information of given phonenumber. ``` -ReconSpider >> 3 -WHOIS (URL) >> google.com +Reconspider >> 3 +PHONE NUMBER (919485247632) >> ``` **4. DNS MAP** @@ -207,20 +226,21 @@ ReconSpider >> 4 DNS MAP (URL) >> vulnweb.com ``` -**5. PORT SCAN** +**5. METADATA** -This option allows you to determine what hosts are available on the network, what services (application name and version) those hosts are offering, what operating systems (and OS versions) they are running, what type of packet filters/firewalls are in use, and dozens of other characteristics. +This option allows you to extract all metadat of the file. ``` -ReconSpider >> 5 -PORT SCAN (URL / IP) >> vulnweb.com +Reconspider >> 5 +Metadata (PATH) >> /root/Downloads/images.jpeg ``` -**6. NS LOOKUP** +**6. REVERSE IMAGE SEARCH** -This option allows you to obtain information about internet servers. It finds name server information for domains by querying the Domain Name System. +This option allows you to obtain information and similar image that are available in internet. ``` -ReconSpider >> 6 -NS LOOKUP (URL) >> google.com +Reconspider >> 6 +REVERSE IMAGE SEARCH (PATH) >> /root/Downloads/images.jpeg +Open Search Result in web broser? (Y/N) : y ``` **7. HONEYPOT** @@ -231,11 +251,68 @@ ReconSpider >> 7 HONEYPOT (IP) >> 1.1.1.1 ``` -**8. UPDATE** +**8. MAC ADDRESS LOOKUP** + +This option allows you to identify Mac address details who is manufacturer, address, country, etc. + +``` +Reconspider >> 8 +MAC ADDRESS LOOKUP (Eg:08:00:69:02:01:FC) >> +``` + +**9. IPHEATMAP** + +This option provided you heatmap of the provided ip or single ip, if connect all the provided ip location with accurate Coordinator. +``` +Reconspider >> 9 + + 1) Trace single IP + 2) Trace multiple IPs +OPTIONS >> +``` + +**10. TORRENT** + +This option allows you to gathers history of Torrent download history. +``` +Reconspider >> 10 +IPADDRESS (Eg:192.168.1.1) >> +``` + +**11. USERNAME** + +This option allows you to gathers account information of the provided username from social media like Instagram, Twitter, Facebook. +``` +Reconspider >> 11 + +1.Facebook +2.Twitter +3.Instagram + +Username >> +``` + +**12. IP2PROXY** + +This option allows you to identify whether IP address uses any kind of VPN / Proxy to hide his identify. +``` +Reconspider >> 12 +IPADDRESS (Eg:192.168.1.1) >> +``` + +**12. MAIL BREACH** + +This option allows you to identify whether provided mail has been breach in some website. +``` +Reconspider >> 13 +MAIL ADDRESS (Eg:temp@gmail.com) >> +``` + +**99. UPDATE** This option allows you to check for updates. If a newer version will available, ReconSpider will download and merge the updates into the current directory without overwriting other files. ``` -ReconSpider >> 8 +ReconSpider >> 99 Checking for updates.. ``` @@ -269,7 +346,7 @@ Please go through the [ReconSpider Wiki Guide](https://github.com/bhavsec/recons # Frequent & Seamless Updates -ReconSpider is under heavy development and updates for fixing bugs. optimizing performance & new features are being rolled regularly. Custom error handling is also not implemented, and all the focus is to create required functionality. +ReconSpider is under heavy development and updates for fixing bugs. optimizing performance & new features are being rolled regularly. Custom error handling is also not implemented, and all the focus is to create required functionality. If you would like to see features and issues that are being worked on, you can do that on [Development Progress](https://github.com/bhavsec/reconspider/projects/1) project board. diff --git a/core/repl_prompt.py b/core/repl_prompt.py index 2da4d29..1250d90 100644 --- a/core/repl_prompt.py +++ b/core/repl_prompt.py @@ -5,11 +5,17 @@ from plugins.censys import censys_ip from plugins.dnsdump import dnsmap from plugins.honeypot import honeypot -from plugins.nslookup import nslookup -from plugins.portscan import portscan from plugins.shodan_io import shodan_host -from plugins.shodan_io import shodan_ip -from plugins.whois import whois +from plugins.domain import domain +from plugins.Phonenumber import Phonenumber +from plugins.reverseimagesearch import reverseimagesearch +from plugins.metadata import gps_analyzer +from plugins.macaddress import MacAddressLookup +from plugins.ipaddress import IPHeatmap +from plugins.torrent import torrent +from plugins.proxy import ip2Proxy +from plugins.maildb import maildb +from plugins.Username import user from core.updater import update from prompt_toolkit import prompt @@ -17,7 +23,7 @@ def repl(): # Read–eval–print loop while 1: user_input = prompt("\nReconspider >> ") - if len(user_input) != 1: + if len(user_input)==0: print("ENTER 1 - 7 TO SELECT OPTIONS") continue try: @@ -36,17 +42,17 @@ def repl(): # Read–eval–print loop elif choice == 2: while 1: - url_inp = prompt("URL >> ") + host = input("HOST (URL / IP) >> ") + port = int(input("PORT >> ")) break - url = socket.gethostbyname(url_inp) # URL to IP address conversion - shodan_ip(url) + domain(host,port) continue elif choice == 3: while 1: - whois_inp = prompt("WHOIS (URL) >> ") + ph = prompt("PHONE NUMBER (919485247632) >> ") break - whois(whois_inp) + Phonenumber(ph) continue elif choice == 4: @@ -58,16 +64,16 @@ def repl(): # Read–eval–print loop elif choice == 5: while 1: - port_inp = prompt("PORT SCAN (URL / IP) >> ") + img_path = prompt("Metadata (PATH) >> ") break - portscan(port_inp) + gps_analyzer(img_path) continue elif choice == 6: while 1: - ns_inp = prompt("NS LOOKUP (URL) >> ") + img = prompt("REVERSE IMAGE SEARCH (PATH) >> ") break - nslookup(ns_inp) + reverseimagesearch(img) continue elif choice == 7: @@ -79,15 +85,62 @@ def repl(): # Read–eval–print loop elif choice == 8: while 1: + mac = prompt("MAC ADDRESS LOOKUP (Eg:08:00:69:02:01:FC) >> ") break - update() + MacAddressLookup(mac) + continue + + elif choice == 9: + while 1: + IPHeatmap() + break + continue + + elif choice == 10: + while 1: + IP = prompt("IPADDRESS (Eg:192.168.1.1) >> ") + break + torrent(IP) + continue + + elif choice == 11: + while 1: + print("\n1.Facebook \n2.Twitter \n3.Instagram\n") + username = input("Username >> ") + choice = input("choice >> ") + break + user(choice,username) + continue + + elif choice == 12: + while 1: + IP = prompt("IPADDRESS (Eg:192.168.1.1) >> ") + break + ip2Proxy(IP) continue + elif choice == 13: + while 1: + emailaddress = prompt("MAIL ADDRESS (Eg:temp@gmail.com) >> ") + break + maildb(emailaddress) + continue + + elif choice == 99: + while 1: + break + update() + continue + elif choice == 0: exit('\nBye, See ya again..') + else: + pass + + # Handling ctrl+c try: repl() except KeyboardInterrupt: - quit("") + quit('\nBye, See ya again..') diff --git a/reconspider.py b/reconspider.py index c2ec9c5..d09bf9f 100644 --- a/reconspider.py +++ b/reconspider.py @@ -2,28 +2,34 @@ def banner(): return(""" -__________ _________ __ ___ -\______ \ ____ ____ ____ ____ / _____/_____ |__| __| _/___________ +__________ _________ __ ___ +\______ \ ____ ____ ____ ____ / _____/_____ |__| __| _/___________ | _// __ \_/ ___\/ _ \ / \ \_____ \\\____ \| |/ __ |/ __ \_ __ \\ | | \ ___/\ \__( <_> ) | \ / \ |_> > / /_/ \ ___/| | \/ - |____|_ /\___ >\___ >____/|___| / /_______ / __/|__\____ |\___ >__| - \/ \/ \/ \/ \/|__| \/ \/ + |____|_ /\___ >\___ >____/|___| / /_______ / __/|__\____ |\___ >__| + \/ \/ \/ \/ \/|__| \/ \/ developer: https://bhavkaran.com -ENTER 0 - 7 TO SELECT OPTIONS +ENTER 0 - 11 TO SELECT OPTIONS -1. IP Enumerate information from IP Address -2. URL Gather information about given Website -3. WHOIS Gather domain registration information -4. DNS MAP Map DNS records associated with target -5. PORT SCAN Discover hosts and services on a network -6. NS LOOKUP Obtain domain name or IP address mapping -7. HONEYPOT Check if it's honeypot or a real system -8. UPDATE Update ReconSpider to its latest version +1. IP Enumerate information from IP Address +2. DOMAIN Gather information about given DOMAIN +3. PHONENUMBER Gather information about Phonenumber +4. DNS MAP Map DNS records associated with target +5. METADATA Extract all metadata of the given file +6. REVERSE IMAGE SEARCH Obtain domain name or IP address mapping +7. HONEYPOT Check if it's honeypot or a real system +8. MAC ADDRESS LOOKUP Obtain information about give Macaddress +9. IPHEATMAP Draw out heatmap of locations of IP +10. TORRENT Gather torrent download history of IP +11. USERNAME Extract Account info. from social media +12. IP2PROXY Check whether IP uses any VPN / PROXY +13. MAIL BREACH Check whethers given mail is breached +99. UPDATE Update ReconSpider to its latest version -0. EXIT Exit from ReconSpider to your terminal +0. EXIT Exit from ReconSpider to your terminal """) if sys.version_info[0] > 2: diff --git a/setup.py b/setup.py index 8e73fdc..0ecd9d2 100644 --- a/setup.py +++ b/setup.py @@ -12,6 +12,6 @@ author="BhavKaran (@bhavsec)", author_email="contact@bhavkaran.com", license="GPL-3.0", - install_requires=["shodan", "requests", "prompt_toolkit"], + install_requires=["shodan", "requests", "prompt_toolkit","beautifulsoup4","urllib3","IP2proxy"], console=["reconspider.py"], ) From 89e1acfea53401b801203b175a75404f89ebe8c4 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 19:23:24 +0530 Subject: [PATCH 02/30] files --- .gitignore | 9 + plugins/IP2Proxy.py | 496 ++++++++++++++++++++++++++++++++ plugins/Phonenumber.py | 26 ++ plugins/Username.py | 225 +++++++++++++++ plugins/api.py | 9 + plugins/domain.py | 128 +++++++++ plugins/ipaddress.py | 96 +++++++ plugins/macaddress.py | 14 + plugins/maildb.py | 14 + plugins/metadata.py | 62 ++++ plugins/proxy.py | 48 ++++ plugins/reverseimagesearch.py | 17 ++ plugins/torrent.py | 24 ++ plugins/webosint/cmsdetect.py | 16 ++ plugins/webosint/crawler.py | 200 +++++++++++++ plugins/webosint/header.py | 15 + plugins/webosint/nslookup.py | 11 + plugins/webosint/portscan.py | 38 +++ plugins/webosint/reverseip.py | 15 + plugins/webosint/subdomain.py | 20 ++ plugins/webosint/who/whoami.py | 32 +++ plugins/webosint/who/whois.py | 8 + plugins/webvuln/bruteforce.py | 184 ++++++++++++ plugins/webvuln/clickjacking.py | 22 ++ plugins/webvuln/cors.py | 121 ++++++++ plugins/webvuln/hostheader.py | 16 ++ plugins/webvuln/src/ftp.ini | 66 +++++ plugins/webvuln/src/ssh.ini | 123 ++++++++ 28 files changed, 2055 insertions(+) create mode 100644 plugins/IP2Proxy.py create mode 100644 plugins/Phonenumber.py create mode 100644 plugins/Username.py create mode 100644 plugins/api.py create mode 100644 plugins/domain.py create mode 100644 plugins/ipaddress.py create mode 100644 plugins/macaddress.py create mode 100644 plugins/maildb.py create mode 100644 plugins/metadata.py create mode 100644 plugins/proxy.py create mode 100644 plugins/reverseimagesearch.py create mode 100644 plugins/torrent.py create mode 100644 plugins/webosint/cmsdetect.py create mode 100644 plugins/webosint/crawler.py create mode 100644 plugins/webosint/header.py create mode 100644 plugins/webosint/nslookup.py create mode 100644 plugins/webosint/portscan.py create mode 100644 plugins/webosint/reverseip.py create mode 100644 plugins/webosint/subdomain.py create mode 100644 plugins/webosint/who/whoami.py create mode 100644 plugins/webosint/who/whois.py create mode 100644 plugins/webvuln/bruteforce.py create mode 100644 plugins/webvuln/clickjacking.py create mode 100644 plugins/webvuln/cors.py create mode 100644 plugins/webvuln/hostheader.py create mode 100644 plugins/webvuln/src/ftp.ini create mode 100644 plugins/webvuln/src/ssh.ini diff --git a/.gitignore b/.gitignore index 1f4fd4d..2bd4df5 100644 --- a/.gitignore +++ b/.gitignore @@ -34,7 +34,11 @@ MANIFEST # Installer logs pip-log.txt +output.txt pip-delete-this-directory.txt +plugins/log +plugins/output.csv +plugins/output.log # Unit test / coverage reports htmlcov/ @@ -109,3 +113,8 @@ core/config.py # vscode settings .vscode/ + +# database +plugins/GeoLite2-City.mmdb +plugins/IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN + diff --git a/plugins/IP2Proxy.py b/plugins/IP2Proxy.py new file mode 100644 index 0000000..93838e2 --- /dev/null +++ b/plugins/IP2Proxy.py @@ -0,0 +1,496 @@ +import sys +import struct +import socket + +if sys.version < '3': + def u(x): + return x.decode('utf-8') + def b(x): + return str(x) +else: + def u(x): + if isinstance(x, bytes): + return x.decode() + return x + def b(x): + if isinstance(x, bytes): + return x + return x.encode('ascii') + +# Windows version of Python does not provide it +# for compatibility with older versions of Windows. +if not hasattr(socket, 'inet_pton'): + def inet_pton(t, addr): + import ctypes + a = ctypes.WinDLL('ws2_32.dll') + in_addr_p = ctypes.create_string_buffer(b(addr)) + if t == socket.AF_INET: + out_addr_p = ctypes.create_string_buffer(4) + elif t == socket.AF_INET6: + out_addr_p = ctypes.create_string_buffer(16) + n = a.inet_pton(t, in_addr_p, out_addr_p) + if n == 0: + raise ValueError('Invalid address') + return out_addr_p.raw + socket.inet_pton = inet_pton + +_VERSION = '2.2.0' +_NO_IP = 'MISSING IP ADDRESS' +_FIELD_NOT_SUPPORTED = 'NOT SUPPORTED' +_INVALID_IP_ADDRESS = 'INVALID IP ADDRESS' +MAX_IPV4_RANGE = 4294967295 +MAX_IPV6_RANGE = 340282366920938463463374607431768211455 + +class IP2ProxyRecord: + ''' IP2Proxy record with all fields from the database ''' + ip = None + country_short = _FIELD_NOT_SUPPORTED + country_long = _FIELD_NOT_SUPPORTED + region = _FIELD_NOT_SUPPORTED + city = _FIELD_NOT_SUPPORTED + isp = _FIELD_NOT_SUPPORTED + proxy_type = _FIELD_NOT_SUPPORTED + usage_type = _FIELD_NOT_SUPPORTED + as_name = _FIELD_NOT_SUPPORTED + asn = _FIELD_NOT_SUPPORTED + last_seen = _FIELD_NOT_SUPPORTED + domain = _FIELD_NOT_SUPPORTED + + def __str__(self): + return str(self.__dict__) + + def __repr__(self): + return repr(self.__dict__) + +_COUNTRY_POSITION = (0, 2, 3, 3, 3, 3, 3, 3, 3) +_REGION_POSITION = (0, 0, 0, 4, 4, 4, 4, 4, 4) +_CITY_POSITION = (0, 0, 0, 5, 5, 5, 5, 5, 5) +_ISP_POSITION = (0, 0, 0, 0, 6, 6, 6, 6, 6) +_PROXYTYPE_POSITION = (0, 0, 2, 2, 2, 2, 2, 2, 2) +_DOMAIN_POSITION = (0, 0, 0, 0, 0, 7, 7, 7, 7) +_USAGETYPE_POSITION = (0, 0, 0, 0, 0, 0, 8, 8, 8) +_ASN_POSITION = (0, 0, 0, 0, 0, 0, 0, 9, 9) +_AS_POSITION = (0, 0, 0, 0, 0, 0, 0, 10, 10) +_LASTSEEN_POSITION = (0, 0, 0, 0, 0, 0, 0, 0, 11) + +class IP2Proxy(object): + ''' IP2Proxy database ''' + + def __init__(self, filename=None): + ''' Creates a database object and opens a file if filename is given ''' + if filename: + self.open(filename) + + def __enter__(self): + if not hasattr(self, '_f') or self._f.closed: + raise ValueError("Cannot enter context with closed file") + return self + + def __exit__(self, exc_type, exc_value, traceback): + self.close() + + def open(self, filename): + ''' Opens a database file ''' + # Ensure old file is closed before opening a new one + self.close() + + self._f = open(filename, 'rb') + self._dbtype = struct.unpack('B', self._f.read(1))[0] + self._dbcolumn = struct.unpack('B', self._f.read(1))[0] + self._dbyear = 2000 + struct.unpack('B', self._f.read(1))[0] + self._dbmonth = struct.unpack('B', self._f.read(1))[0] + self._dbday = struct.unpack('B', self._f.read(1))[0] + self._ipv4dbcount = struct.unpack('= 281470681743360) and (ipnum <= 281474976710655)): + ipv = 4 + ipnum = ipnum - 281470681743360 + else: + ipv = 6 + else: + # ipv = 6 + if ((ipnum >= 42545680458834377588178886921629466624) and (ipnum <= 42550872755692912415807417417958686719)): + ipv = 4 + ipnum = ipnum >> 80 + ipnum = ipnum % 4294967296 + elif ((ipnum >= 42540488161975842760550356425300246528) and (ipnum <= 42540488241204005274814694018844196863)): + ipv = 4 + # ipnum = ipnum % 100000000000000000000000000000000 + ipnum = ~ ipnum + ipnum = ipnum % 4294967296 + else: + ipv = 6 + except: + ipnum = struct.unpack('!L', socket.inet_pton(socket.AF_INET, addr))[0] + # socket.inet_pton(socket.AF_INET, addr) + ipv = 4 + return ipv, ipnum + + def _get_record(self, ip): + low = 0 + ipv = self._parse_addr(ip)[0] + ipnum = self._parse_addr(ip)[1] + if ipv == 4: + # ipnum = struct.unpack('!L', socket.inet_pton(socket.AF_INET, ip))[0] + if (ipnum == MAX_IPV4_RANGE): + ipno = ipnum - 1 + else: + ipno = ipnum + off = 0 + baseaddr = self._ipv4dbaddr + high = self._ipv4dbcount + if self._ipv4indexbaseaddr > 0: + indexpos = ((ipno >> 16) << 3) + self._ipv4indexbaseaddr + low = self._readi(indexpos) + high = self._readi(indexpos + 4) + + elif ipv == 6: + # a, b = struct.unpack('!QQ', socket.inet_pton(socket.AF_INET6, ip)) + # ipnum = (a << 64) | b + if (ipnum == MAX_IPV6_RANGE): + ipno = ipnum - 1 + else: + ipno = ipnum + off = 12 + baseaddr = self._ipv6dbaddr + high = self._ipv6dbcount + if self._ipv6indexbaseaddr > 0: + indexpos = ((ipno >> 112) << 3) + self._ipv6indexbaseaddr + low = self._readi(indexpos) + high = self._readi(indexpos + 4) + + elif ipnum == '': + rec = IP2ProxyRecord() + rec.country_short = _NO_IP + rec.country_long = _NO_IP + rec.region = _NO_IP + rec.city = _NO_IP + rec.isp = _NO_IP + rec.proxy_type = _NO_IP + rec.domain = _NO_IP + rec.usage_type = _NO_IP + rec.asn = _NO_IP + rec.as_name = _NO_IP + rec.last_seen = _NO_IP + return rec + + while low <= high: + # mid = int((low + high) / 2) + mid = int((low + high) >> 1) + ipfrom = self._readip(baseaddr + (mid) * (self._dbcolumn * 4 + off), ipv) + ipto = self._readip(baseaddr + (mid + 1) * (self._dbcolumn * 4 + off), ipv) + + if ipfrom <= ipno < ipto: + return self._read_record(mid, ipv) + else: + if ipno < ipfrom: + high = mid - 1 + else: + low = mid + 1 diff --git a/plugins/Phonenumber.py b/plugins/Phonenumber.py new file mode 100644 index 0000000..d63e991 --- /dev/null +++ b/plugins/Phonenumber.py @@ -0,0 +1,26 @@ +from plugins.api import phoneapis +import requests +import json + +def Phonenumber(ph): + print ('[+]' + ' Fetching Phonenumber Details...' + '\n') + api_key=phoneapis() + if api_key == "": + print("Add you phoneapis api key to src/api.py") + exit() + url = ("http://apilayer.net/api/validate?access_key="+api_key+"&number="+str(ph)) + response=requests.get(url) + if "91" not in str(ph): + print("Error: CountryCode is missing") + else: + if response.status_code ==200: + get=response.json() + print("Number: "+get['number']) + print("Type: "+get['line_type']) + print("CountryCode: "+get['country_code']) + print("Country: "+get['country_name']) + print("Location: "+get['location']) + print("Carrier: "+get['carrier']) + print("") + else: + print("Error: Invalid Mobile Number") diff --git a/plugins/Username.py b/plugins/Username.py new file mode 100644 index 0000000..0db5697 --- /dev/null +++ b/plugins/Username.py @@ -0,0 +1,225 @@ +import requests +from bs4 import BeautifulSoup +from urllib.request import urlopen as uReq + +out=[] + +def user(choice,username): + if choice == '1': + pass + elif choice == '2': + ScrapTweets(username) + return() + elif choice == '3': + Instagram(username) + return() + else: + exit() + + search_string = "https://en-gb.facebook.com/" + username + + #response is stored after request is made + response = requests.get(search_string) + + #Response is stored and parsed to implement beautifulsoup + soup = BeautifulSoup(response.text, 'html.parser') + + #List that will store the data that is to be fetched + data = {'Name': "null", + 'Photo_link': "null", + 'Work':{'Company': "null", 'Position': "null", 'time_period': "null", 'Location': "null"}, + 'Education': {'Institute': "null", 'time_period': "null", 'Location': "null"}, + 'Address': {'Current_city': "null", 'Home_town': "null"}, + 'Favouriate': {}, + 'Contact_info': {} + } + + ###Finding Name of the user + #Min div element is found which contains all the information + main_div = soup.div.find(id="globalContainer") + + #finding name of the user + def find_name(): + name = main_div.find(id="fb-timeline-cover-name").get_text() + print("\n"+"Name:"+name) + + ###Finding About the user details + #finding work details of the user + def find_eduwork_details(): + education = soup.find(id="pagelet_eduwork") + apple=education.find(attrs={"class":"_4qm1"}) + if (apple.get_text() != " "): + for category in education.find_all(attrs={"class":"_4qm1"}): + print(category.find('span').get_text() + " : ") + for company in category.find_all(attrs={"class":"_2tdc"}): + if (company.get_text() != " "): + print(company.get_text()) + else: + continue + else: + print("No work details found") + + #finding home details of the user + def find_home_details(): + if(soup.find(id="pagelet_hometown") !=" "): + home = soup.find(id="pagelet_hometown") + for category in home.find_all(attrs={"class":"_4qm1"}): + print(category.find('span').get_text() + " : ") + for company in category.find_all(attrs={"class":"_42ef"}): + if (company.get_text() != " "): + print(company.get_text()) + else: + continue + else: + print("No Home details found") + + #finding contact details of the user + def find_contact_details(): + contact = soup.find(id="pagelet_contact") + orange = contact.find(attrs={"class":"_4qm1"}) + if (orange.get_text() !=" "): + for category in contact.find_all(attrs={"class":"_4qm1"}): + print(category.find('span').get_text() + " : ") + for company in category.find_all(attrs={"class":"_2iem"}): + if (company.get_text() != " "): + print(company.get_text()) + else: + continue + else: + print("No Contact details found") + + ###Logic for finding the status of the response + if ("200" in str(response)): + find_name() + find_eduwork_details() + find_home_details() + + elif ("404" in str(response)): + print("Error: Profile not found") + else: + print("Error: some other response") + return() + +def Instagram(username): + + r = requests.get("https://www.instagram.com/"+ username +"/?__a=1") + if r.status_code == 200: + res = r.json()['graphql']['user'] + print("\nUsername: " + res['username']) + print("Full Name: "+res['full_name']) + try: + print("Business Category: "+res['edge_follow']['business_category_name']) + except: + print("Account :"+" Private") + finally: + print("Biograph: " + res['biography']) + print("URL: "+ str(res['external_url'])) + print("Followers: "+str(res['edge_followed_by']['count'])) + print("Following: "+str(res['edge_follow']['count'])) + print("Profile Picture HD: " + res['profile_pic_url_hd']) + elif r.status_code == 404: + print("Error: Profile Not Found") + else: + print("Error: Something Went Wrong") + +def ScrapTweets(username): + + link = "https://twitter.com/" + username + the_client = uReq(link) + page_html = the_client.read() + the_client.close() + + soup = BeautifulSoup(page_html, 'html.parser') + + try: + full_name = soup.find('a', attrs={"class": "ProfileHeaderCard-nameLink u-textInheritColor js-nav"}) + print("User Name --> " + full_name.text) + except: + print("User Name -->"+" Not Found") + print() + + try: + user_id = soup.find('b', attrs={"class": "u-linkComplex-target"}) + print("User Id --> " + user_id.text) + except: + print("User Id --> "+"Not Found") + print() + + try: + decription = soup.find('p', attrs={"class": "ProfileHeaderCard-bio u-dir"}) + print("Description --> " + decription.text) + except: + print("Decription not provided by the user") + print() + + try: + user_location = soup.find('span', attrs={"class": "ProfileHeaderCard-locationText u-dir"}) + print("Location --> " + user_location.text.strip()) + except: + print("Location not provided by the user") + print() + + try: + connectivity = soup.find('span', attrs={"class": "ProfileHeaderCard-urlText u-dir"}) + tittle = connectivity.a["title"] + print("Link provided by the user --> " + tittle) + except: + print("No contact link is provided by the user") + print() + + try: + join_date = soup.find('span', attrs={"class": "ProfileHeaderCard-joinDateText js-tooltip u-dir"}) + print("The user joined twitter on --> " + join_date.text) + except: + print("The joined date is not provided by the user") + print() + + try: + birth = soup.find('span', attrs={"class": "ProfileHeaderCard-birthdateText u-dir"}) + birth_date = birth.span.text + print("Date of Birth:"+birth_date.strip()) + except: + print("Birth Date not provided by the user") + print() + + try: + span_box = soup.findAll('span', attrs={"class": "ProfileNav-value"}) + print("Total tweets --> " + span_box[0].text) + except: + print("Total Tweets --> Zero") + print() + + try: + print("Following --> " +span_box[1].text) + except: + print("Following --> Zero") + print() + + try: + print("Followers --> " + span_box[2].text) + except: + print("Followers --> Zero") + print() + + try: + print("Likes send by him --> " + span_box[3].text) + except: + print("Likes send by him --> Zero") + print() + + try: + if span_box[4].text != "More ": + print("No. of parties he is Subscribed to --> " + span_box[4].text) + else: + print("No. of parties he is Subscribed to --> Zero") + except: + print("No. of parties he is Subscribed to --> Zero") + print() + + spana = soup.findAll('span', attrs={"class": "ProfileNav-value"}) + + print("Tweets by "+ username + " are --> ") + # TweetTextSize TweetTextSize--normal js-tweet-text tweet-text + for tweets in soup.findAll('p', attrs={"class": "TweetTextSize TweetTextSize--normal js-tweet-text tweet-text"}): + print(tweets.text) + print() diff --git a/plugins/api.py b/plugins/api.py new file mode 100644 index 0000000..e39815d --- /dev/null +++ b/plugins/api.py @@ -0,0 +1,9 @@ +def phoneapis(): + api= "2f8c8e865a0b25bbf4da08c4db039b8d" + return str(api) +def ipstack(): + api="276cfee2c31729505691e515e8321a02" + return str(api) +def gmap(): + api="AIzaSyAKGik6Fok3_mbIsgquaAnDGNy-h_AjhVw" + return str(api) diff --git a/plugins/domain.py b/plugins/domain.py new file mode 100644 index 0000000..40bb352 --- /dev/null +++ b/plugins/domain.py @@ -0,0 +1,128 @@ +import socket +from .webosint.cmsdetect import CMSdetect +from .webosint.nslookup import nsLookup +from .webosint.portscan import DefaultPort,Customrange +from .webosint.reverseip import ReverseIP +from .webosint.subdomain import SubDomain +from .webvuln.bruteforce import ssh,ftp +from .webvuln.clickjacking import ClickJacking +from .webvuln.cors import Cors +from .webvuln.hostheader import HostHeader +from .webosint.header import header +from .webosint.crawler import crawler +from .webosint.who.whoami import whoami + +global host +global port + +# Checking whether the target host is alive or dead +def CheckTarget(): + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + result = s.connect_ex((host, port)) + + if result == 0: + return True + else: + return False + +# Main Method +def domain(h,p): + global host + global port + host=h + port=p + + if CheckTarget()==True: + print("\nTarget Alive \n") + Menu() + else: + print("The Host is Unreachable \n") + exit() + + +NmapFunctions = { + 1: DefaultPort, + 2: Customrange, +} + + +def nmaprec(host,port): + + Choice = 1 + while True: + print("1. Scan Default Ports (22-443)") + print("2. Enter Custom Range") + print("3. Back to Main Menu") + print('') + Choice = int(input(">> ")) + if (Choice >= 0) and (Choice < 3): + NmapFunctions[Choice](host, port) + elif Choice == 3: + Menu() + else: + print("Please choose an Appropriate option") + +BruteFunctions = { + 1: ssh, + 2: ftp + } + +def BruteForce(host, port): + Selection = 1 + while True: + print('') + print("1. SSH") + print("2. FTP") + print("3. Main Menu") + print('') + Selection = int(input("BruteForce >> ")) + print('') + if (Selection >= 0) and (Selection < 3): + BruteFunctions[Selection](host, port) + elif Selection == 3: + Menu() + else: + print("Please choose an Appropriate option") + + +MainFunctions = { + 1: ReverseIP, + 2: SubDomain, + 3: nsLookup, + 4: CMSdetect, + 5: nmaprec, + 6: BruteForce, + 7: ClickJacking, + 8: Cors, + 9: HostHeader, + 10:header, + 11:crawler, + 12:whoami +} + +def Menu(): + Selection = 1 + while True: + print('') + print("1."+" ReverseIP") + print("2."+" SubDomain") + print("3."+" nsLookup") + print("4."+" CMSDetect") + print("5."+" PortScan") + print("6."+" Bruteforce") + print("7."+" ClickJacking") + print("8."+" CORS") + print("9."+" Host Header Injection") + print("10."+" Header") + print("11."+" Crawler") + print("12."+" Whoami") + print("99."+" Exit") + print('') + Selection = int(input("Domain >> ")) + if (Selection >= 0) and (Selection <=12): + MainFunctions[Selection](host, port) + elif Selection == 99: + exit() + else: + print("Error: Please choose an Appropriate option") + print('') diff --git a/plugins/ipaddress.py b/plugins/ipaddress.py new file mode 100644 index 0000000..b2cf1ac --- /dev/null +++ b/plugins/ipaddress.py @@ -0,0 +1,96 @@ +import requests +import gmplot +from plugins.api import ipstack +import webbrowser +import re +from plugins.api import gmap +from ipaddress import * +from plugins.webosint.who.whois import * + +api_key = ipstack() +if api_key == "" : + print("Add you ipstack api key to src/api.py") + exit() +if gmap() == "" : + print("Add you Google Heatmap api key to src/api.py") + exit() + +def IPHeatmap(): + print(''' + 1) Trace single IP + 2) Trace multiple IPs''') + choice = input("OPTIONS >> ") + + if choice == '1': + IP = input("Enter the IP : ") + read_single_IP(IP) + elif choice == '2': + IP_file = input("Enter the IP File Location : ") + read_multiple_IP(IP_file) + else: + print("\nError: Please choose an appropriate option") + +def read_single_IP(IP): + print ('[+]' + "Processing IP: %s ..." %IP + '\n') + if not re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",IP): + print("Invalid IP Address") + IPHeatmap() + lats = [] + lons = [] + r = requests.get("http://api.IPstack.com/" + IP + "?access_key=" + api_key) + response = r.json() + print('') + print("IP :"+response['ip']) + print("Location : " + response['region_name']) + print("Country : " +response['country_name']) + print("Latitude :"+" {latitude}".format(**response)) + print("Longitude :"+" {longitude}".format(**response)) + if input("Want More Whois Details (Y/N): ") in ("Y","y"): + whois_more(IP) + if response['latitude'] and response['longitude']: + lats = response['latitude'] + lons = response['longitude'] + maps_url = "https://maps.google.com/maps?q=%s,+%s" % (lats, lons) + print("") + openWeb = input("Open GPS location in web broser? (Y/N) ") + if openWeb.upper() == 'Y': + webbrowser.open(maps_url, new=2) + else: + pass + +def read_multiple_IP(IP_file): + lats = [] + lons = [] + try: + f = open(IP_file, "r") + f1 = f.readlines() + print ('[+]' + " Processing....." + '\n') + for line in f1: + IP=re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",line) + IP=IP.group() + r = requests.get("http://api.IPstack.com/" + IP + "?access_key=" + api_key) + response = r.json() + if response['latitude'] and response['longitude']: + lats.append(response['latitude']) + lons.append(response['longitude']) + heat_map(lats,lons) + except IOError: + print("ERROR : File Does not Exist\n") + IPHeatmap() + + +def heat_map(lats,lons): + gmap3 = gmplot.GoogleMapPlotter(20.5937, 78.9629, 5) + gmap3.heatmap(lats,lons) + gmap3.scatter(lats,lons, '#FF0000', size=50, marker=False) + gmap3.plot(lats,lons, 'cornflowerblue', edge_width = 3.0) + save_location = input("Enter the location to save file : ") + gmap3.apikey = gmap() + location = save_location + "/heatmap.html" + gmap3.draw(location) + print("Heatmap saved at " + location) + openWeb = input("Open Heatmap in web broser? (Y/N) : ") + if openWeb.upper() == 'Y': + webbrowser.open(url=("file:///"+location)) + else: + pass diff --git a/plugins/macaddress.py b/plugins/macaddress.py new file mode 100644 index 0000000..a29d15d --- /dev/null +++ b/plugins/macaddress.py @@ -0,0 +1,14 @@ +import requests + +def MacAddressLookup(mac): + url = ("https://macvendors.co/api/" + mac) + response=requests.get(url) + result=response.json() + if result["result"]: + final=result['result'] + print("Company:" + final["company"]) + print("Address:" + final["address"]) + print("Country:" + final["country"]) + print("") + else: + print("Error: Something Went Wrong") diff --git a/plugins/maildb.py b/plugins/maildb.py new file mode 100644 index 0000000..9882ca1 --- /dev/null +++ b/plugins/maildb.py @@ -0,0 +1,14 @@ +import os + +def maildb(emailaddress): + if ("@" and ".com") or ("@" and ".in") in emailaddress: + os.system("h8mail -t "+emailaddress+" -o "+os.getcwd()+"/plugins/output.csv > " +os.getcwd()+"/plugins/output.log") + f=open(os.getcwd()+"/plugins/output.csv","r") + line=f.readlines() + if len(line) > 1: + for i in line: + print(i) + else: + print("Data breached is Not Compromised") + else: + print("Error: Invalid Email Address") diff --git a/plugins/metadata.py b/plugins/metadata.py new file mode 100644 index 0000000..3dcc0ca --- /dev/null +++ b/plugins/metadata.py @@ -0,0 +1,62 @@ +import webbrowser +from PIL import Image +from PIL.ExifTags import * + +def get_exif(fn): + try: + ret = {} + print ('[+]' + 'Checking the Metadata...' + '\n') + i = Image.open(fn) + info = i._getexif() + if str(info) == "None": + print("Metadata is not Much Informative:") + return -1 + for tag, value in info.items(): + decoded = TAGS.get(tag, tag) + ret[decoded] = value + return ret + except IOError: + print('') + print("ERROR : File not found") + exit() + +def gps_analyzer(img_path): + + a = get_exif(img_path) + + if a==-1: + return + for x,y in a.items(): + print("%s : %s" %(x, y)) + + if "GPSInfo" in a: + lat = [float(x) / float(y) for x, y in a['GPSInfo'][2]] + latref = a['GPSInfo'][1] + lon = [float(x) / float(y) for x, y in a['GPSInfo'][4]] + lonref = a['GPSInfo'][3] + + lat = lat[0] + lat[1] / 60 + lat[2] / 3600 + lon = lon[0] + lon[1] / 60 + lon[2] / 3600 + if latref == 'S': + lat = -lat + if lonref == 'W': + lon = -lon + map_it(lat, lon) + + else: + print('') + print("GPS location not found") + + +def map_it(lat, lon): + # Prints latitude and longitude values + print('') + print("Accurate Latitude : %s" % lat) + print("Accurate Longitude : %s" % lon) + print('') + # Creates the URL for the map using the latitude and longitude values + maps_url = "https://maps.google.com/maps?q=%s,+%s" % (lat, lon) + # Prompts the user to launch a web browser with the map + openWeb = input("Open GPS location in web broser? (Y/N) ") + if openWeb.upper() == 'Y': + webbrowser.open(maps_url, new=2) diff --git a/plugins/proxy.py b/plugins/proxy.py new file mode 100644 index 0000000..8cee420 --- /dev/null +++ b/plugins/proxy.py @@ -0,0 +1,48 @@ +import IP2Proxy +import re +import requests +from plugins.api import * +from plugins.webosint.who.whois import * + + +def ip2Proxy(IP): + + if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",IP): + db = IP2Proxy.IP2Proxy() + db.open("/root/Downloads/reconspider/plugins/IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN") + record = db.get_all(IP) + db.close() + if record['is_proxy']!=0: + #print(record) + print("Proxy: " + "Enabled") + print("Proxy Type:" + record['proxy_type']) + print("Country Code:" + record['country_short']) + print("Country:" + record['country_long']) + print("Region Name:" + record['region']) + print("City:" + record['city']) + print("Isp:" + record['isp']) + print("Domain:" + record['domain']) + print("Usage:" + record['usage_type']) + print("ASN:" + record['asn']) + print("Name:" + record['as_name']) + api_key = ipstack() + if api_key == "": + print("Add you ipstack api key to src/api.py") + exit() + r = requests.get("http://api.IPstack.com/" + IP + "?access_key=" + api_key) + response = r.json() + print("Latitude :"+" {latitude}".format(**response)) + print("Longitude :"+" {longitude}".format(**response)) + if input("Want More Whois Details (Y/N): "): + whois_more(IP) + if response['latitude'] and response['longitude']: + lats = response['latitude'] + lons = response['longitude'] + url = "https://maps.google.com/maps?q=%s,+%s" % (lats, lons) + print("Google Map Link :" + url) + else: + print("IP does not use any Proxy or VPN") + else: + print("\nEnter a Valid IP Address") + ip2Proxy() + print("") diff --git a/plugins/reverseimagesearch.py b/plugins/reverseimagesearch.py new file mode 100644 index 0000000..b73af19 --- /dev/null +++ b/plugins/reverseimagesearch.py @@ -0,0 +1,17 @@ +import requests +import webbrowser + +def reverseimagesearch(img): + try: + surl='https://www.google.co.in/searchbyimage/upload' + murl={'encoded_image': (img, open(img, 'rb')), 'image_content': ''} + response = requests.post(surl, files=murl, allow_redirects=False) + fetchUrl = response.headers['Location'] + openWeb = input("Open Search Result in web broser? (Y/N) : ") + if openWeb.upper() == 'Y': + webbrowser.open(fetchUrl) + else: + pass + except IOError: + print() + print("ERROR : File Does not Exist\n") diff --git a/plugins/torrent.py b/plugins/torrent.py new file mode 100644 index 0000000..e2900f5 --- /dev/null +++ b/plugins/torrent.py @@ -0,0 +1,24 @@ +import requests +import json + + +def torrent(IP): + + r = requests.get("https://api.antitor.com/history/peer/?ip="+ IP +"&key=3cd6463b477d46b79e9eeec21342e4c7") + res = r.json() + print ( '[+]' + " Processing Torrent....." + '\n') + if len(res)>4: + print("IP Address: "+res['ip']) + print("ISP: "+res['isp']) + print("Country: "+res['geoData']['country']) + print("Latitude: "+str(res['geoData']['latitude'])) + print("Longitude: "+str(res['geoData']['longitude'])+"\n") + for i in res['contents']: + print("Category:"+i['category']) + print("Name:"+i['name']) + print("Start:" + i['startDate']) + print("End:" + i['endDate']) + print("Size:"+str(i['torrent']['size'])) + print("") + else: + print("Error: Something Went Wrong") diff --git a/plugins/webosint/cmsdetect.py b/plugins/webosint/cmsdetect.py new file mode 100644 index 0000000..6993c94 --- /dev/null +++ b/plugins/webosint/cmsdetect.py @@ -0,0 +1,16 @@ +import requests + +def CMSdetect(domain, port): + payload = {'key': '1641c3b9f2b1c8676ceaba95d00f7cf2e3531830c5fa9a6cc5e2d922b2ed7165dcce66', 'url': domain} + cms_url = "https://whatcms.org/APIEndpoint/Detect" + response = requests.get(cms_url, params=payload) + cms_data = response.json() + cms_info = cms_data['result'] + if cms_info['code'] == 200: + print('Detected CMS : %s' % cms_info['name']) + print('Detected Version : %s' % cms_info['version']) + print('Confidence : %s' % cms_info['confidence']) + else: + print(cms_info['msg']) + print('Detected CMS : %s' % cms_info['name']) + print('Detected Version : %s' % cms_info['version']) diff --git a/plugins/webosint/crawler.py b/plugins/webosint/crawler.py new file mode 100644 index 0000000..59fdcc6 --- /dev/null +++ b/plugins/webosint/crawler.py @@ -0,0 +1,200 @@ +import os +import bs4 +import lxml +import requests + +user_agent = {'User-Agent' : 'Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/60.0'} + +def crawler(target,port): + if port == 80: + port="http://" + elif port == 443: + port="https://" + else: + print("Could'nt fetch data for the given PORT") + + total = [] + r_total = [] + sm_total = [] + js_total = [] + css_total = [] + int_total = [] + ext_total = [] + img_total = [] + print ('\n' + '[+]' + ' Crawling Target...'+ '\n') + try: + target=port+target + rqst = requests.get(target, headers=user_agent, verify=False, timeout=10) + sc = rqst.status_code + if sc == 200: + domain = target.split('//') + domain = domain[1] + page = rqst.content + soup = bs4.BeautifulSoup(page, 'lxml') + file = '{}.dump'.format(domain) + path = os.getcwd() + r_url = 'http://{}/robots.txt'.format(domain) + sm_url = 'http://{}/sitemap.xml'.format(domain) + + print( '[+]' + ' Looking for robots.txt' , end = '') + r_rqst = requests.get(r_url, headers=user_agent, verify=False, timeout=10) + r_sc = r_rqst.status_code + + if r_sc == 200: + print('['.rjust(9, '.') + ' Found ]' ) + print('[+]' + ' Extracting robots Links', end = '') + r_page = r_rqst.text + r_scrape = r_page.split('\n') + for entry in r_scrape: + if 'Disallow' in entry: + url = entry.split(':') + try: + url = url[1] + url = url.strip() + total.append(url) + r_total.append(target + url) + except: + pass + elif 'Allow' in entry: + url = entry.split(':') + try: + url = url[1] + url = url.strip() + total.append(url) + r_total.append(target + url) + except: + pass + r_total = set(r_total) + print('['.rjust(8, '.') + ' {} ]'.format(str(len(r_total)))) + + elif r_sc == 404: + print( '['.rjust(9, '.') + ' Not Found ]' ) + else: + print( '['.rjust(9, '.') + ' {} ]'.format(r_sc) ) + + print('[+]' + ' Looking for sitemap.xml' , end = '') + sm_rqst = requests.get(sm_url, headers=user_agent, verify=False, timeout=10) + sm_sc = sm_rqst.status_code + if sm_sc == 200: + print('['.rjust(8, '.') + ' Found ]' ) + print('[+]' + ' Extracting sitemap Links', end = '') + sm_page = sm_rqst.content + sm_soup = bs4.BeautifulSoup(sm_page, 'xml') + links = sm_soup.find_all('loc') + for url in links: + url = url.get_text() + if url is not None: + total.append(url) + sm_total.append(url) + sm_total = set(sm_total) + print('['.rjust(7, '.') + ' {} ]'.format(str(len(sm_total)))) + + elif sm_sc == 404: + print( '['.rjust(8, '.') + ' Not Found ]' ) + else: + print( '['.rjust(8, '.') + ' {} ]'.format(sm_sc) ) + + print('[+]' + ' Extracting CSS Links' , end = '') + css = soup.find_all('link') + for link in css: + url = link.get('href') + if url is not None and '.css' in url: + total.append(url) + css_total.append(url) + css_total = set(css_total) + print('['.rjust(11, '.') + ' {} ]'.format(str(len(css_total)))) + + print('[+]' + ' Extracting Javascript Links' , end = '') + js = soup.find_all('script') + for link in js: + url = link.get('src') + if url is not None and '.js' in url: + total.append(url) + js_total.append(url) + js_total = set(js_total) + print('['.rjust(4, '.') + ' {} ]'.format(str(len(js_total)))) + + print('[+]' + ' Extracting Internal Links' , end = '') + links = soup.find_all('a') + for link in links: + url = link.get('href') + if url is not None: + if domain in url: + total.append(url) + int_total.append(url) + int_total = set(int_total) + print('['.rjust(6, '.') + ' {} ]'.format(str(len(int_total)))) + + print('[+]' + ' Extracting External Links' , end = '') + for link in links: + url = link.get('href') + if url is not None: + if domain not in url and 'http' in url: + total.append(url) + ext_total.append(url) + ext_total = set(ext_total) + print('['.rjust(6, '.') + ' {} ]'.format(str(len(ext_total)))) + + print('[+]' + ' Extracting Images' , end = '') + images = soup.find_all('img') + for link in images: + src = link.get('src') + if src is not None and len(src) > 1: + total.append(src) + img_total.append(src) + img_total = set(img_total) + print('['.rjust(14, '.') + ' {} ]'.format(str(len(img_total)))) + + total = set(total) + print('\n' + '[+]' + ' Total Links Extracted : ' + str(len(total)) + '\n') + + if len(total) is not 0: + print('[+]' + ' Dumping Links in ' + '{}/dumps/{}'.format(path, file)) + with open(path+'/dumps/{}'.format('{}.dump'.format(domain)), 'w') as dumpfile: + dumpfile.write('URL : {}'.format(target) + '\n\n') + try: + dumpfile.write('Title : {}'.format(soup.title.string) + '\n') + except AttributeError: + dumpfile.write('Title : None' + '\n') + dumpfile.write('\nrobots Links : ' + str(len(r_total))) + dumpfile.write('\nsitemap Links : ' + str(len(sm_total))) + dumpfile.write('\nCSS Links : ' + str(len(css_total))) + dumpfile.write('\nJS Links : ' + str(len(js_total))) + dumpfile.write('\nInternal Links : ' + str(len(int_total))) + dumpfile.write('\nExternal Links : ' + str(len(ext_total))) + dumpfile.write('\nImages Links : ' + str(len(img_total))) + dumpfile.write('\nTotal Links Found : ' + str(len(total)) + '\n') + + if len(r_total) is not 0: + dumpfile.write('\nrobots :\n\n') + for item in r_total: + dumpfile.write(str(item) + '\n') + if len(sm_total) is not 0: + dumpfile.write('\nsitemap :\n\n') + for item in sm_total: + dumpfile.write(str(item) + '\n') + if len(css_total) is not 0: + dumpfile.write('\nCSS :\n\n') + for item in css_total: + dumpfile.write(str(item) + '\n') + if len(js_total) is not 0: + dumpfile.write('\nJavascript :\n\n') + for item in js_total: + dumpfile.write(str(item) + '\n') + if len(int_total) is not 0: + dumpfile.write('\nInternal Links :\n\n') + for item in int_total: + dumpfile.write(str(item) + '\n') + if len(ext_total) is not 0: + dumpfile.write('\nExternal Links :\n\n') + for item in ext_total: + dumpfile.write(str(item) + '\n') + if len(img_total) is not 0: + dumpfile.write('\nImages :\n\n') + for item in img_total: + dumpfile.write(str(item) + '\n') + + else: + print ( '[-]' + ' Error : ' + str(sc)) + except Exception as e: + print( '[-] Error : ' + str(e)) diff --git a/plugins/webosint/header.py b/plugins/webosint/header.py new file mode 100644 index 0000000..2f8641e --- /dev/null +++ b/plugins/webosint/header.py @@ -0,0 +1,15 @@ +import requests +requests.packages.urllib3.disable_warnings() + +def header(target,port): + if port == 80: + port="http://" + elif port == 443: + port="https://" + else: + print("Could'nt fetch data for the given PORT") + exit() + print ('\n' + '[+]' + ' Headers :' + '\n') + rqst = requests.get(port+target, verify=True, timeout=10) + for k, v in rqst.headers.items(): + print ('[+]' + ' {} : '.format(k) + v) diff --git a/plugins/webosint/nslookup.py b/plugins/webosint/nslookup.py new file mode 100644 index 0000000..1a5fe3a --- /dev/null +++ b/plugins/webosint/nslookup.py @@ -0,0 +1,11 @@ +from requests import get + +R = '\033[31m' # red +G = '\033[32m' # green +C = '\033[36m' # cyan +W = '\033[0m' # white + +def nsLookup(host, port): + print ( '[+]' + 'Fetching Details...' + '\n') + result = get('http://api.hackertarget.com/dnslookup/?q=' + host).text + print(result) diff --git a/plugins/webosint/portscan.py b/plugins/webosint/portscan.py new file mode 100644 index 0000000..d328272 --- /dev/null +++ b/plugins/webosint/portscan.py @@ -0,0 +1,38 @@ +import nmap +import json + +def DefaultPort(Xhost, Yport): + print('') + print("Starting port scan with range 22-443") + nm = nmap.PortScanner() + result = nm.scan(Xhost, '22-443') + display(result) + +def Customrange(Xhost, Yport): + print('') + port_range = input("Enter the range : ") + print('') + print("Starting port scan with range %s"%port_range) + nm = nmap.PortScanner() + result = nm.scan(Xhost, port_range) + display(result) + +def display(result): + new = next(iter(result['scan'].values())) + ip_add = new['addresses'] + print('') + print("IP Address : %s" % ip_add['ipv4']) + hosting = new['hostnames'] + hostname0 = hosting[0] + hostname1 = hosting[1] + print('') + print("Hostname 1 : %s" % hostname0['name']) + print("Hostname 2 : %s" % hostname1['name']) + print('') + print("Open Ports : ") + print('') + ports = new['tcp'] + json_scan = json.dumps(ports) + parsed = json.loads(json_scan) + print(json.dumps(parsed, indent=4, sort_keys=True)) +print('') diff --git a/plugins/webosint/reverseip.py b/plugins/webosint/reverseip.py new file mode 100644 index 0000000..e11c103 --- /dev/null +++ b/plugins/webosint/reverseip.py @@ -0,0 +1,15 @@ +from requests import get + +R = '\033[31m' # red +G = '\033[32m' # green +C = '\033[36m' # cyan +W = '\033[0m' # white + +def ReverseIP(host, port): + print ( '[+]' + 'Checking whether the Target is reachable ...' + '\n') + lookup = 'https://api.hackertarget.com/reverseiplookup/?q=%s' % host + try: + result = get(lookup).text + print(result) + except: + print(R+'Error: Invalid IP address') diff --git a/plugins/webosint/subdomain.py b/plugins/webosint/subdomain.py new file mode 100644 index 0000000..da85e21 --- /dev/null +++ b/plugins/webosint/subdomain.py @@ -0,0 +1,20 @@ +import json +import requests + +R = '\033[31m' # red +G = '\033[32m' # green +C = '\033[36m' # cyan +W = '\033[0m' # white + +def SubDomain(host, port): + print ('[+]' + 'Fetching Subdomains of Target...' + '\n') + url = 'https://www.virustotal.com/vtapi/v2/domain/report' + + params = {'apikey':'1af37bfeb7b1628ba10695fb187987a6651793e37df006a5cdf8786b0e4f6453','domain':host} + + response = requests.get(url, params=params) + + subdomains = response.json() + + for x in subdomains['domain_siblings']: + print(x) diff --git a/plugins/webosint/who/whoami.py b/plugins/webosint/who/whoami.py new file mode 100644 index 0000000..4c4a719 --- /dev/null +++ b/plugins/webosint/who/whoami.py @@ -0,0 +1,32 @@ +import whois +from pythonping import ping +import re + +def whoami(target,post): + #target=input("Enter the IP Address/Domain:") + getweb=str(ping(target)) + ip = re.compile('(([2][5][0-5]\.)|([2][0-4][0-9]\.)|([0-1]?[0-9]?[0-9]\.)){3}' + +'(([2][5][0-5])|([2][0-4][0-9])|([0-1]?[0-9]?[0-9]))') + match = ip.search(getweb) + #target=match.group() + w = whois.whois(target) + print("Domain Name:"+ str(w['domain_name'])) + print("Register:"+str(w['registrar'])) + try: + print("Whois Server:"+str(w['whois_server'])) + except: + pass + print("Server:"+str(w['name_servers'])) + print("Emails:"+str(w['emails'])) + try: + print("Organisation:"+str(w['org'])) + except: + print("Organisation:"+str(w['organization'])) + try: + print("Address:"+str(w['address'])) + print("City:"+str(w['city'])) + print("State:"+str(w['state'])) + print("Zipcode:"+str(w['zipcode'])) + except: + pass + print("Country:"+str(w['country'])) diff --git a/plugins/webosint/who/whois.py b/plugins/webosint/who/whois.py new file mode 100644 index 0000000..536f928 --- /dev/null +++ b/plugins/webosint/who/whois.py @@ -0,0 +1,8 @@ +import os +def whois_more(IP): + os.system("whois "+IP+" > output.txt") + f=open("output.txt","r") + f1=f.readlines() + for line in f1: + if "%" not in line and line.strip(): + print(line) diff --git a/plugins/webvuln/bruteforce.py b/plugins/webvuln/bruteforce.py new file mode 100644 index 0000000..15e7245 --- /dev/null +++ b/plugins/webvuln/bruteforce.py @@ -0,0 +1,184 @@ +import paramiko,requests,socket +from ftplib import FTP + +def ssh(host, port): + print("1. Default Port (22)") + print("2. Custom Port") + choice = int(input("BruteForce >>")) + if choice == 2: + port = int(input("Enter the Custom Telnet Port : ")) + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.settimeout(10) + try: + connect = s.connect_ex((host, port)) + if connect != 0: + print("[+] Port %s: Closed" %port) + s.close() + + elif connect == 0: + print("[+] Port %s: Open" %port) + s.close() + wordlist = input("Enter Wordlist location (Press Enter for Default Wordlist) : ") + if wordlist == '': + f = open("src/telnet.ini", "r") + f1 = f.readlines() + else: + f = open(wordlist, "r") + f1 = f.readlines() + for x in f1: + y = x.split(':') + username = y[0].strip(":") + password = y[1].strip("\n") + ssh = paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + print("Checking with Username : %s , Password : %s" % (username, password)) + try: + ssh.connect(host, port=port, username=username, password=password, timeout=10) + flag = 0 + pass + + except paramiko.AuthenticationException: + flag = 1 + + except socket.error as e: + flag = 2 + print(e) + + except KeyboardInterrupt: + print("\n User Interrupt! Exitting...") + exit() + + ssh.close() + + if flag == 0: + print('') + print("Credentials Found") + print("Username : %s" % username) + print(("Password : %s") % password) + print('') + elif flag == 1: + print("Invalid Credentials") + + else: + pass + except socket.error as e: + print("Error : %s" %e) + + elif choice == 1 | choice!= 2: + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + s.settimeout(10) + try: + connect = s.connect_ex((host, 22)) + if connect != 0: + print("[+] Port 22: Closed") + s.close() + + elif connect == 0: + print("[+] Port 22: Open") + s.close() + wordlist = input("Enter Wordlist location (Press Enter for Default Wordlist) : ") + if wordlist == '': + f = open("src/ssh.ini", "r") + f1 = f.readlines() + else: + f = open(wordlist, "r") + f1 = f.readlines() + for x in f1: + y = x.split(':') + username = y[0].strip(":") + password = y[1].strip("\n") + ssh = paramiko.SSHClient() + ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy()) + print("Checking with Username : %s , Password : %s" % (username, password)) + try: + ssh.connect(host, port=22, username=username, password=password, timeout=10) + flag = 0 + pass + + except paramiko.AuthenticationException: + flag = 1 + + except socket.error as e: + flag = 2 + print(e) + + except KeyboardInterrupt: + print("\n User Interrupt! Exitting...") + exit() + + ssh.close() + + if flag == 0: + print('') + print("Credentials Found") + print("Username : %s" % username) + print(("Password : %s") % password) + print('') + elif flag == 1: + print("Invalid Credentials") + + else: + pass + except socket.error as e: + print("Error : %s" % e) + + + + + +def ftp(host, port): + s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) + port = 21 + s.settimeout(10) + try: + connect = s.connect_ex((host, port)) + if connect != 0: + print("[+] Port %s: Closed" % port) + s.close() + + elif connect == 0: + print("[+] Port %s: Open" % port) + s.close() + wordlist = input("Enter Wordlist location (Press Enter for Default Wordlist) : ") + if wordlist == '': + f = open("src/ftp.ini", "r") + f1 = f.readlines() + else: + f = open(wordlist, "r") + f1 = f.readlines() + for x in f1: + y = x.split(':') + username = y[0].strip(":") + password = y[1].strip("\n") + ftp = FTP(host) + print("Checking with Username : %s , Password : %s" % (username, password)) + try: + ftp.login(user='username', passwd='password') + flag = 0 + pass + + except Exception as e: + flag = 1 + + except socket.error as e: + flag = 2 + print(e) + + except KeyboardInterrupt: + print("\n User Interrupt! Exitting...") + exit() + + if flag == 0: + print('') + print("Credentials Found") + print("Username : %s" % username) + print("Password : %s" % password) + print('') + elif flag == 1: + print("Invalid Credentials") + + else: + pass + + except socket.error as e: + print("Error : %s" %e) diff --git a/plugins/webvuln/clickjacking.py b/plugins/webvuln/clickjacking.py new file mode 100644 index 0000000..47cee35 --- /dev/null +++ b/plugins/webvuln/clickjacking.py @@ -0,0 +1,22 @@ +from urllib.request import urlopen + +def ClickJacking(host, port): + + if port == 80: + port = 'http://' + elif port == 443: + port = 'https://' + else: + print("Could'nt fetch data for the given PORT") + + + url = (port+host) + + data = urlopen(url) + headers = data.info() + + if not "X-Frame-Options" in headers: + print("Website is vulnerable to ClickJacking") + + else: + print("Website is not Vulnerable to ClickJacking") diff --git a/plugins/webvuln/cors.py b/plugins/webvuln/cors.py new file mode 100644 index 0000000..3442335 --- /dev/null +++ b/plugins/webvuln/cors.py @@ -0,0 +1,121 @@ +import requests + + +header1 = None +domain2 = None +header2 = None +domain3 = None +header3 = None + + +def Cors(host, port): + if port == 80: + port = 'http://' + elif port == 443: + port = 'https://' + else: + print("Could'nt fetch data for the given PORT") + exit() + print("1. CORS check in Default Host") + print("2. CORS check in Host's Custom Endpoint") + print('') + choice = int(input('root@osint:~/Domain/CORS#')) + print('') + cookies = input("Paste the Cookies (If None,then hit enter) : ") + global header1 + global domain2 + global header2 + global domain3 + global header3 + if cookies == '': + + header1 = {'Origin': 'http://evil.com'} + + domain2 = host + '.evil.com' + + header2 = {'Origin': port + domain2} + + domain3 = host + '%60cdl.evil.com' + + header3 = {'Origin': port + domain3} + + Choices(host, port, choice) + else: + + header1 = {'Origin': 'http://evil.com', 'Cookie': cookies} + + domain2 = host + '.evil.com' + + header2 = {'Origin': port + domain2,'Cookie': cookies} + + domain3 = host + '%60cdl.evil.com' + + header3 = {'Origin': port + domain3,'Cookie': cookies} + + Choices(host, port, choice) + + +def Choices(host, port, choice): + if choice == 2: + endpoint = input("Enter the Custom Endpoint : ") + host = endpoint + WrongChoice(host, port) + + elif choice == 1: + print("Checking Default Host ") + url = (port + host) + print("Testing with Payload %s" % header1) + response = requests.get(url, headers=header1) + if 'evil.com' in response.headers: + print("Vulnerable to Cross Origin Resource Sharing") + else: + print("Not Vulnerable to Cross Origin Resource Sharing") + print('') + + print("Testing with Payload %s" % header2) + response = requests.get(url, headers=header2) + + if domain2 in response.headers: + print("Vulnerable to Cross Origin Resource Sharing") + else: + print("Not Vulnerable to Cross Origin Resource Sharing") + print('') + + print("Testing with Payload %s" % header3) + response = requests.get(url, headers=header3) + if domain2 in response.headers: + print("Vulnerable to Cross Origin Resource Sharing") + else: + print("Not Vulnerable to Cross Origin Resource Sharing") + print('') + else: + print("Wrong Choice") + print("Checking Default Host") + WrongChoice(host, port) + +def WrongChoice(host, port): + url = (port + host) + print("Testing with Payload %s" % header1) + response = requests.get(url, headers=header1) + if 'evil.com' in response.headers: + print("Vulnerable to Cross Origin Resource Sharing") + else: + print("Not Vulnerable to Cross Origin Resource Sharing") + print('') + + print("Testing with Payload %s" % header2) + response = requests.get(url, headers=header2) + + if domain2 in response.headers: + print("Vulnerable to Cross Origin Resource Sharing") + else: + print("Not Vulnerable to Cross Origin Resource Sharing") + print('') + + print("Testing with Payload %s" % header3) + response = requests.get(url, headers=header3) + if domain2 in response.headers: + print("Vulnerable to Cross Origin Resource Sharing") + else: + print("Not Vulnerable to Cross Origin Resource Sharing") + print('') diff --git a/plugins/webvuln/hostheader.py b/plugins/webvuln/hostheader.py new file mode 100644 index 0000000..0bc000f --- /dev/null +++ b/plugins/webvuln/hostheader.py @@ -0,0 +1,16 @@ +import requests + +def HostHeader(host, port): + if port == 80: + port = 'http://' + elif port == 443: + port = 'https://' + else: + print("Could'nt fetch data for the given PORT") + url = (port + host) + headers = {'Host': 'http://evil.com'} + response = requests.get(url, headers=headers) + if 'evil.com' in response.headers: + print("Vulnerable to Host Header Injection") + else: + print("Not Vulnerable to Host header injection") diff --git a/plugins/webvuln/src/ftp.ini b/plugins/webvuln/src/ftp.ini new file mode 100644 index 0000000..5156d0f --- /dev/null +++ b/plugins/webvuln/src/ftp.ini @@ -0,0 +1,66 @@ +anonymous:anonymous +root:rootpasswd +root:12hrs37 +ftp:b1uRR3 +admin:admin +localadmin:localadmin +admin:1234 +apc:apc +admin:nas +Root:wago +Admin:wago +User:user +Guest:guest +ftp:ftp +admin:password +a:avery +admin:123456 +adtec:none +admin:admin12345 +none:dpstelecom +instrument:instrument +user:password +root:password +default:default +admin:default +nmt:1234 +admin:Janitza +supervisor:supervisor +user1:pass1 +avery:avery +IEIeMerge:eMerge +ADMIN:12345 +beijer:beijer +Admin:admin +admin:1234 +admin:1111 +root:admin +se:1234 +admin:stingray +device:apc +apc:apc +dm:ftp +dmftp:ftp +httpadmin:fhttpadmin +user:system +MELSEC:MELSEC +QNUDECPU:QNUDECPU +ftp_boot:ftp_boot +uploader:ZYPCOM +ftpuser:password +USER:USER +qbf77101:hexakisoctahedron +ntpupdate:ntpupdate +sysdiag:factorycast@schneider +wsupgrade:wsupgrade +pcfactory:pcfactory +loader:fwdownload +test:testingpw +webserver:webpages +fdrusers:sresurdf +nic2212:poiuypoiuy +user:user00 +su:ko2003wa +MayGion:maygion.com +admin:9999 +PlcmSpIp:PlcmSpIp \ No newline at end of file diff --git a/plugins/webvuln/src/ssh.ini b/plugins/webvuln/src/ssh.ini new file mode 100644 index 0000000..4f0bc1a --- /dev/null +++ b/plugins/webvuln/src/ssh.ini @@ -0,0 +1,123 @@ +root:calvin +root:root +adithya:toor +root:toor +administrator:password +NetLinx:password +administrator:Amx1234! +adithya:toor +amx:password +amx:Amx1234! +admin:1988 +admin:admin +Administrator:Vision2 +cisco:cisco +c-comatic:xrtwk318 +root:qwasyx21 +admin:insecure +pi:raspberry +user:user +root:default +root:leostream +leo:leo +localadmin:localadmin +fwupgrade:fwupgrade +root:rootpasswd +admin:password +root:timeserver +admin:password +admin:motorola +cloudera:cloudera +root:p@ck3tf3nc3 +apc:apc +device:apc +eurek:eurek +netscreen:netscreen +admin:avocent +root:linux +sconsole:12345 +root:5up +cirros:cubswin:) +root:uClinux +root:alpine +root:dottie +root:arcsight +root:unitrends1 +vagrant:vagrant +root:vagrant +m202:m202 +demo:fai +root:fai +root:ceadmin +maint:password +root:palosanto +root:ubuntu1404 +root:cubox-i +debian:debian +root:debian +root:xoa +root:sipwise +debian:temppwd +root:sixaola +debian:sixaola +myshake:shakeme +stackato:stackato +root:screencast +root:stxadmin +root:nosoup4u +root:indigo +root:video +default:video +default: +ftp:video +nexthink:123456 +ubnt:ubnt +root:ubnt +sansforensics:forensics +elk_user:forensics +osboxes:osboxes.org +root:osboxes.org +sans:training +user:password +misp:Password1234 +hxeadm:HXEHana1 +acitoolkit:acitoolkit +osbash:osbash +enisa:enisa +geosolutions:Geos +pyimagesearch:deeplearning +root:NM1$88 +remnux:malware +hunter:hunter +plexuser:rasplex +root:openelec +root:rasplex +root:plex +root:openmediavault +root:ys123456 +root:libreelec +openhabian:openhabian +admin:ManagementConsole2015 +public:publicpass +admin:hipchat +nao:nao +support:symantec +root:max2play +admin:pfsense +root:root01 +root:nas4free +USERID:PASSW0RD +Administrator:p@ssw0rd +root:freenas +root:cxlinux +admin:symbol +admin:Symbol +admin:superuser +admin:admin123 +root:D13HH[ +root:blackarch +root:dasdec1 +root:7ujMko0admin +root:7ujMko0vizxv +root:Zte521 +root:zlxx \ No newline at end of file From 602907825fea7b7c10922c3c1c622bcb26a910b3 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 19:26:09 +0530 Subject: [PATCH 03/30] UPDATE README.md --- README.md | 1 + 1 file changed, 1 insertion(+) diff --git a/README.md b/README.md index 625641f..880b5a4 100644 --- a/README.md +++ b/README.md @@ -356,3 +356,4 @@ If you would like to see features and issues that are being worked on, you can d * [S0md3v](https://github.com/s0md3v/) * [Parshant](mailto:parshant.dhall@gmail.com) +* [Adithyan AK](https://github.com/adithyan-ak) From 6b42fed23481008a49d5b83f26ac94b6988c3d05 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 19:34:37 +0530 Subject: [PATCH 04/30] Error Fixed --- plugins/proxy.py | 1 - plugins/webvuln/bruteforce.py | 6 +++--- 2 files changed, 3 insertions(+), 4 deletions(-) diff --git a/plugins/proxy.py b/plugins/proxy.py index 8cee420..8173dfe 100644 --- a/plugins/proxy.py +++ b/plugins/proxy.py @@ -44,5 +44,4 @@ def ip2Proxy(IP): print("IP does not use any Proxy or VPN") else: print("\nEnter a Valid IP Address") - ip2Proxy() print("") diff --git a/plugins/webvuln/bruteforce.py b/plugins/webvuln/bruteforce.py index 15e7245..dae472d 100644 --- a/plugins/webvuln/bruteforce.py +++ b/plugins/webvuln/bruteforce.py @@ -157,9 +157,6 @@ def ftp(host, port): flag = 0 pass - except Exception as e: - flag = 1 - except socket.error as e: flag = 2 print(e) @@ -168,6 +165,9 @@ def ftp(host, port): print("\n User Interrupt! Exitting...") exit() + except Exception as e: + flag = 1 + if flag == 0: print('') print("Credentials Found") From a179b3a016aaba39e3675a39d521d1565dfbaa03 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 21:33:51 +0530 Subject: [PATCH 05/30] Standard Improved --- plugins/IP2Proxy.py | 30 +++++++++++----------- plugins/Phonenumber.py | 3 +-- plugins/Username.py | 48 ++++++++++------------------------- plugins/domain.py | 4 +-- plugins/torrent.py | 2 +- plugins/webosint/reverseip.py | 8 ++---- plugins/webosint/subdomain.py | 5 ---- 7 files changed, 35 insertions(+), 65 deletions(-) diff --git a/plugins/IP2Proxy.py b/plugins/IP2Proxy.py index 93838e2..51e7314 100644 --- a/plugins/IP2Proxy.py +++ b/plugins/IP2Proxy.py @@ -127,7 +127,7 @@ def get_country_short(self, ip): try: rec = self._get_record(ip) country_short = rec.country_short - except: + except Exception as e: country_short = _INVALID_IP_ADDRESS return country_short @@ -136,7 +136,7 @@ def get_country_long(self, ip): try: rec = self._get_record(ip) country_long = rec.country_long - except: + except Exception as e: country_long = _INVALID_IP_ADDRESS return country_long @@ -145,7 +145,7 @@ def get_region(self, ip): try: rec = self._get_record(ip) region = rec.region - except: + except Exception as e: region = _INVALID_IP_ADDRESS return region @@ -154,7 +154,7 @@ def get_city(self, ip): try: rec = self._get_record(ip) city = rec.city - except: + except Exception as e: city = _INVALID_IP_ADDRESS return city @@ -163,7 +163,7 @@ def get_isp(self, ip): try: rec = self._get_record(ip) isp = rec.isp - except: + except Exception as e: isp = _INVALID_IP_ADDRESS return isp @@ -172,7 +172,7 @@ def get_proxy_type(self, ip): try: rec = self._get_record(ip) proxy_type = rec.proxy_type - except: + except Exception as e: proxy_type = _INVALID_IP_ADDRESS return proxy_type @@ -184,7 +184,7 @@ def is_proxy(self, ip): is_proxy = 0 if (rec.country_short == '-') else ( 2 if ((rec.proxy_type == 'DCH') | (rec.proxy_type == 'SES')) else 1) else: is_proxy = 0 if (rec.proxy_type == '-') else ( 2 if ((rec.proxy_type == 'DCH') | (rec.proxy_type == 'SES')) else 1) - except: + except Exception as e: is_proxy = -1 return is_proxy @@ -193,7 +193,7 @@ def get_domain(self, ip): try: rec = self._get_record(ip) domain = rec.domain - except: + except Exception as e: domain = _INVALID_IP_ADDRESS return domain @@ -202,7 +202,7 @@ def get_usage_type(self, ip): try: rec = self._get_record(ip) usage_type = rec.usage_type - except: + except Exception as e: usage_type = _INVALID_IP_ADDRESS return usage_type @@ -211,7 +211,7 @@ def get_asn(self, ip): try: rec = self._get_record(ip) asn = rec.asn - except: + except Exception as e: asn = _INVALID_IP_ADDRESS return asn @@ -220,7 +220,7 @@ def get_as_name(self, ip): try: rec = self._get_record(ip) as_name = rec.as_name - except: + except Exception as e: as_name = _INVALID_IP_ADDRESS return as_name @@ -229,7 +229,7 @@ def get_last_seen(self, ip): try: rec = self._get_record(ip) last_seen = rec.last_seen - except: + except Exception as e: last_seen = _INVALID_IP_ADDRESS return last_seen @@ -253,7 +253,7 @@ def get_all(self, ip): is_proxy = 0 if (rec.country_short == '-') else ( 2 if ((rec.proxy_type == 'DCH') | (rec.proxy_type == 'SES')) else 1) else: is_proxy = 0 if (rec.proxy_type == '-') else ( 2 if ((rec.proxy_type == 'DCH') | (rec.proxy_type == 'SES')) else 1) - except: + except Exception as e: country_short = _INVALID_IP_ADDRESS country_long = _INVALID_IP_ADDRESS region = _INVALID_IP_ADDRESS @@ -407,7 +407,7 @@ def _parse_addr(self, addr): # ipnum = struct.unpack('!L', socket.inet_pton(socket.AF_INET, addr))[0] socket.inet_pton(socket.AF_INET, addr) ipv = 4 - except: + except Exception as e: # reformat ipv4 address in ipv6 if ((ipnum >= 281470681743360) and (ipnum <= 281474976710655)): ipv = 4 @@ -427,7 +427,7 @@ def _parse_addr(self, addr): ipnum = ipnum % 4294967296 else: ipv = 6 - except: + except Exception as e: ipnum = struct.unpack('!L', socket.inet_pton(socket.AF_INET, addr))[0] # socket.inet_pton(socket.AF_INET, addr) ipv = 4 diff --git a/plugins/Phonenumber.py b/plugins/Phonenumber.py index d63e991..7b83f7f 100644 --- a/plugins/Phonenumber.py +++ b/plugins/Phonenumber.py @@ -1,6 +1,5 @@ from plugins.api import phoneapis -import requests -import json +import def Phonenumber(ph): print ('[+]' + ' Fetching Phonenumber Details...' + '\n') diff --git a/plugins/Username.py b/plugins/Username.py index 0db5697..e22367f 100644 --- a/plugins/Username.py +++ b/plugins/Username.py @@ -25,14 +25,6 @@ def user(choice,username): soup = BeautifulSoup(response.text, 'html.parser') #List that will store the data that is to be fetched - data = {'Name': "null", - 'Photo_link': "null", - 'Work':{'Company': "null", 'Position': "null", 'time_period': "null", 'Location': "null"}, - 'Education': {'Institute': "null", 'time_period': "null", 'Location': "null"}, - 'Address': {'Current_city': "null", 'Home_town': "null"}, - 'Favouriate': {}, - 'Contact_info': {} - } ###Finding Name of the user #Min div element is found which contains all the information @@ -74,19 +66,7 @@ def find_home_details(): print("No Home details found") #finding contact details of the user - def find_contact_details(): - contact = soup.find(id="pagelet_contact") - orange = contact.find(attrs={"class":"_4qm1"}) - if (orange.get_text() !=" "): - for category in contact.find_all(attrs={"class":"_4qm1"}): - print(category.find('span').get_text() + " : ") - for company in category.find_all(attrs={"class":"_2iem"}): - if (company.get_text() != " "): - print(company.get_text()) - else: - continue - else: - print("No Contact details found") + ###Logic for finding the status of the response if ("200" in str(response)): @@ -109,7 +89,7 @@ def Instagram(username): print("Full Name: "+res['full_name']) try: print("Business Category: "+res['edge_follow']['business_category_name']) - except: + except Exception as e: print("Account :"+" Private") finally: print("Biograph: " + res['biography']) @@ -134,28 +114,28 @@ def ScrapTweets(username): try: full_name = soup.find('a', attrs={"class": "ProfileHeaderCard-nameLink u-textInheritColor js-nav"}) print("User Name --> " + full_name.text) - except: + except Exception as e: print("User Name -->"+" Not Found") print() try: user_id = soup.find('b', attrs={"class": "u-linkComplex-target"}) print("User Id --> " + user_id.text) - except: + except Exception as e: print("User Id --> "+"Not Found") print() try: decription = soup.find('p', attrs={"class": "ProfileHeaderCard-bio u-dir"}) print("Description --> " + decription.text) - except: + except Exception as e: print("Decription not provided by the user") print() try: user_location = soup.find('span', attrs={"class": "ProfileHeaderCard-locationText u-dir"}) print("Location --> " + user_location.text.strip()) - except: + except Exception as e: print("Location not provided by the user") print() @@ -163,14 +143,14 @@ def ScrapTweets(username): connectivity = soup.find('span', attrs={"class": "ProfileHeaderCard-urlText u-dir"}) tittle = connectivity.a["title"] print("Link provided by the user --> " + tittle) - except: + except Exception as e: print("No contact link is provided by the user") print() try: join_date = soup.find('span', attrs={"class": "ProfileHeaderCard-joinDateText js-tooltip u-dir"}) print("The user joined twitter on --> " + join_date.text) - except: + except Exception as e: print("The joined date is not provided by the user") print() @@ -178,32 +158,32 @@ def ScrapTweets(username): birth = soup.find('span', attrs={"class": "ProfileHeaderCard-birthdateText u-dir"}) birth_date = birth.span.text print("Date of Birth:"+birth_date.strip()) - except: + except Exception as e: print("Birth Date not provided by the user") print() try: span_box = soup.findAll('span', attrs={"class": "ProfileNav-value"}) print("Total tweets --> " + span_box[0].text) - except: + except Exception as e: print("Total Tweets --> Zero") print() try: print("Following --> " +span_box[1].text) - except: + except Exception as e: print("Following --> Zero") print() try: print("Followers --> " + span_box[2].text) - except: + except Exception as e: print("Followers --> Zero") print() try: print("Likes send by him --> " + span_box[3].text) - except: + except Exception as e: print("Likes send by him --> Zero") print() @@ -212,7 +192,7 @@ def ScrapTweets(username): print("No. of parties he is Subscribed to --> " + span_box[4].text) else: print("No. of parties he is Subscribed to --> Zero") - except: + except Exception as e: print("No. of parties he is Subscribed to --> Zero") print() diff --git a/plugins/domain.py b/plugins/domain.py index 40bb352..1317471 100644 --- a/plugins/domain.py +++ b/plugins/domain.py @@ -12,8 +12,8 @@ from .webosint.crawler import crawler from .webosint.who.whoami import whoami -global host -global port +global host = None +global port = None # Checking whether the target host is alive or dead def CheckTarget(): diff --git a/plugins/torrent.py b/plugins/torrent.py index e2900f5..3172057 100644 --- a/plugins/torrent.py +++ b/plugins/torrent.py @@ -1,5 +1,5 @@ import requests -import json + def torrent(IP): diff --git a/plugins/webosint/reverseip.py b/plugins/webosint/reverseip.py index e11c103..5cf19ff 100644 --- a/plugins/webosint/reverseip.py +++ b/plugins/webosint/reverseip.py @@ -1,9 +1,5 @@ from requests import get -R = '\033[31m' # red -G = '\033[32m' # green -C = '\033[36m' # cyan -W = '\033[0m' # white def ReverseIP(host, port): print ( '[+]' + 'Checking whether the Target is reachable ...' + '\n') @@ -11,5 +7,5 @@ def ReverseIP(host, port): try: result = get(lookup).text print(result) - except: - print(R+'Error: Invalid IP address') + except Exception as e: + print('Error: Invalid IP address') diff --git a/plugins/webosint/subdomain.py b/plugins/webosint/subdomain.py index da85e21..04ddb9b 100644 --- a/plugins/webosint/subdomain.py +++ b/plugins/webosint/subdomain.py @@ -1,11 +1,6 @@ import json import requests -R = '\033[31m' # red -G = '\033[32m' # green -C = '\033[36m' # cyan -W = '\033[0m' # white - def SubDomain(host, port): print ('[+]' + 'Fetching Subdomains of Target...' + '\n') url = 'https://www.virustotal.com/vtapi/v2/domain/report' From ebe8448227c7bdd9dd40c4dc884d9e056cdd5634 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 21:40:47 +0530 Subject: [PATCH 06/30] Update --- plugins/domain.py | 4 ++-- plugins/torrent.py | 1 - plugins/webosint/reverseip.py | 2 +- 3 files changed, 3 insertions(+), 4 deletions(-) diff --git a/plugins/domain.py b/plugins/domain.py index 1317471..fc8d025 100644 --- a/plugins/domain.py +++ b/plugins/domain.py @@ -12,8 +12,8 @@ from .webosint.crawler import crawler from .webosint.who.whoami import whoami -global host = None -global port = None +global host +global port # Checking whether the target host is alive or dead def CheckTarget(): diff --git a/plugins/torrent.py b/plugins/torrent.py index 3172057..b0b7496 100644 --- a/plugins/torrent.py +++ b/plugins/torrent.py @@ -1,7 +1,6 @@ import requests - def torrent(IP): r = requests.get("https://api.antitor.com/history/peer/?ip="+ IP +"&key=3cd6463b477d46b79e9eeec21342e4c7") diff --git a/plugins/webosint/reverseip.py b/plugins/webosint/reverseip.py index 5cf19ff..593dda1 100644 --- a/plugins/webosint/reverseip.py +++ b/plugins/webosint/reverseip.py @@ -8,4 +8,4 @@ def ReverseIP(host, port): result = get(lookup).text print(result) except Exception as e: - print('Error: Invalid IP address') + print('Error: Invalid IP address '+e) From 1092f84d1397748d0ce221063f90a176d6f5f886 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 21:44:15 +0530 Subject: [PATCH 07/30] Standard Improved --- plugins/IP2Proxy.py | 30 +++++++++++----------- plugins/Phonenumber.py | 1 - plugins/Username.py | 48 ++++++++++------------------------- plugins/domain.py | 2 +- plugins/torrent.py | 1 - plugins/webosint/reverseip.py | 8 ++---- plugins/webosint/subdomain.py | 6 ----- 7 files changed, 32 insertions(+), 64 deletions(-) diff --git a/plugins/IP2Proxy.py b/plugins/IP2Proxy.py index 93838e2..51e7314 100644 --- a/plugins/IP2Proxy.py +++ b/plugins/IP2Proxy.py @@ -127,7 +127,7 @@ def get_country_short(self, ip): try: rec = self._get_record(ip) country_short = rec.country_short - except: + except Exception as e: country_short = _INVALID_IP_ADDRESS return country_short @@ -136,7 +136,7 @@ def get_country_long(self, ip): try: rec = self._get_record(ip) country_long = rec.country_long - except: + except Exception as e: country_long = _INVALID_IP_ADDRESS return country_long @@ -145,7 +145,7 @@ def get_region(self, ip): try: rec = self._get_record(ip) region = rec.region - except: + except Exception as e: region = _INVALID_IP_ADDRESS return region @@ -154,7 +154,7 @@ def get_city(self, ip): try: rec = self._get_record(ip) city = rec.city - except: + except Exception as e: city = _INVALID_IP_ADDRESS return city @@ -163,7 +163,7 @@ def get_isp(self, ip): try: rec = self._get_record(ip) isp = rec.isp - except: + except Exception as e: isp = _INVALID_IP_ADDRESS return isp @@ -172,7 +172,7 @@ def get_proxy_type(self, ip): try: rec = self._get_record(ip) proxy_type = rec.proxy_type - except: + except Exception as e: proxy_type = _INVALID_IP_ADDRESS return proxy_type @@ -184,7 +184,7 @@ def is_proxy(self, ip): is_proxy = 0 if (rec.country_short == '-') else ( 2 if ((rec.proxy_type == 'DCH') | (rec.proxy_type == 'SES')) else 1) else: is_proxy = 0 if (rec.proxy_type == '-') else ( 2 if ((rec.proxy_type == 'DCH') | (rec.proxy_type == 'SES')) else 1) - except: + except Exception as e: is_proxy = -1 return is_proxy @@ -193,7 +193,7 @@ def get_domain(self, ip): try: rec = self._get_record(ip) domain = rec.domain - except: + except Exception as e: domain = _INVALID_IP_ADDRESS return domain @@ -202,7 +202,7 @@ def get_usage_type(self, ip): try: rec = self._get_record(ip) usage_type = rec.usage_type - except: + except Exception as e: usage_type = _INVALID_IP_ADDRESS return usage_type @@ -211,7 +211,7 @@ def get_asn(self, ip): try: rec = self._get_record(ip) asn = rec.asn - except: + except Exception as e: asn = _INVALID_IP_ADDRESS return asn @@ -220,7 +220,7 @@ def get_as_name(self, ip): try: rec = self._get_record(ip) as_name = rec.as_name - except: + except Exception as e: as_name = _INVALID_IP_ADDRESS return as_name @@ -229,7 +229,7 @@ def get_last_seen(self, ip): try: rec = self._get_record(ip) last_seen = rec.last_seen - except: + except Exception as e: last_seen = _INVALID_IP_ADDRESS return last_seen @@ -253,7 +253,7 @@ def get_all(self, ip): is_proxy = 0 if (rec.country_short == '-') else ( 2 if ((rec.proxy_type == 'DCH') | (rec.proxy_type == 'SES')) else 1) else: is_proxy = 0 if (rec.proxy_type == '-') else ( 2 if ((rec.proxy_type == 'DCH') | (rec.proxy_type == 'SES')) else 1) - except: + except Exception as e: country_short = _INVALID_IP_ADDRESS country_long = _INVALID_IP_ADDRESS region = _INVALID_IP_ADDRESS @@ -407,7 +407,7 @@ def _parse_addr(self, addr): # ipnum = struct.unpack('!L', socket.inet_pton(socket.AF_INET, addr))[0] socket.inet_pton(socket.AF_INET, addr) ipv = 4 - except: + except Exception as e: # reformat ipv4 address in ipv6 if ((ipnum >= 281470681743360) and (ipnum <= 281474976710655)): ipv = 4 @@ -427,7 +427,7 @@ def _parse_addr(self, addr): ipnum = ipnum % 4294967296 else: ipv = 6 - except: + except Exception as e: ipnum = struct.unpack('!L', socket.inet_pton(socket.AF_INET, addr))[0] # socket.inet_pton(socket.AF_INET, addr) ipv = 4 diff --git a/plugins/Phonenumber.py b/plugins/Phonenumber.py index d63e991..bab7645 100644 --- a/plugins/Phonenumber.py +++ b/plugins/Phonenumber.py @@ -1,6 +1,5 @@ from plugins.api import phoneapis import requests -import json def Phonenumber(ph): print ('[+]' + ' Fetching Phonenumber Details...' + '\n') diff --git a/plugins/Username.py b/plugins/Username.py index 0db5697..e22367f 100644 --- a/plugins/Username.py +++ b/plugins/Username.py @@ -25,14 +25,6 @@ def user(choice,username): soup = BeautifulSoup(response.text, 'html.parser') #List that will store the data that is to be fetched - data = {'Name': "null", - 'Photo_link': "null", - 'Work':{'Company': "null", 'Position': "null", 'time_period': "null", 'Location': "null"}, - 'Education': {'Institute': "null", 'time_period': "null", 'Location': "null"}, - 'Address': {'Current_city': "null", 'Home_town': "null"}, - 'Favouriate': {}, - 'Contact_info': {} - } ###Finding Name of the user #Min div element is found which contains all the information @@ -74,19 +66,7 @@ def find_home_details(): print("No Home details found") #finding contact details of the user - def find_contact_details(): - contact = soup.find(id="pagelet_contact") - orange = contact.find(attrs={"class":"_4qm1"}) - if (orange.get_text() !=" "): - for category in contact.find_all(attrs={"class":"_4qm1"}): - print(category.find('span').get_text() + " : ") - for company in category.find_all(attrs={"class":"_2iem"}): - if (company.get_text() != " "): - print(company.get_text()) - else: - continue - else: - print("No Contact details found") + ###Logic for finding the status of the response if ("200" in str(response)): @@ -109,7 +89,7 @@ def Instagram(username): print("Full Name: "+res['full_name']) try: print("Business Category: "+res['edge_follow']['business_category_name']) - except: + except Exception as e: print("Account :"+" Private") finally: print("Biograph: " + res['biography']) @@ -134,28 +114,28 @@ def ScrapTweets(username): try: full_name = soup.find('a', attrs={"class": "ProfileHeaderCard-nameLink u-textInheritColor js-nav"}) print("User Name --> " + full_name.text) - except: + except Exception as e: print("User Name -->"+" Not Found") print() try: user_id = soup.find('b', attrs={"class": "u-linkComplex-target"}) print("User Id --> " + user_id.text) - except: + except Exception as e: print("User Id --> "+"Not Found") print() try: decription = soup.find('p', attrs={"class": "ProfileHeaderCard-bio u-dir"}) print("Description --> " + decription.text) - except: + except Exception as e: print("Decription not provided by the user") print() try: user_location = soup.find('span', attrs={"class": "ProfileHeaderCard-locationText u-dir"}) print("Location --> " + user_location.text.strip()) - except: + except Exception as e: print("Location not provided by the user") print() @@ -163,14 +143,14 @@ def ScrapTweets(username): connectivity = soup.find('span', attrs={"class": "ProfileHeaderCard-urlText u-dir"}) tittle = connectivity.a["title"] print("Link provided by the user --> " + tittle) - except: + except Exception as e: print("No contact link is provided by the user") print() try: join_date = soup.find('span', attrs={"class": "ProfileHeaderCard-joinDateText js-tooltip u-dir"}) print("The user joined twitter on --> " + join_date.text) - except: + except Exception as e: print("The joined date is not provided by the user") print() @@ -178,32 +158,32 @@ def ScrapTweets(username): birth = soup.find('span', attrs={"class": "ProfileHeaderCard-birthdateText u-dir"}) birth_date = birth.span.text print("Date of Birth:"+birth_date.strip()) - except: + except Exception as e: print("Birth Date not provided by the user") print() try: span_box = soup.findAll('span', attrs={"class": "ProfileNav-value"}) print("Total tweets --> " + span_box[0].text) - except: + except Exception as e: print("Total Tweets --> Zero") print() try: print("Following --> " +span_box[1].text) - except: + except Exception as e: print("Following --> Zero") print() try: print("Followers --> " + span_box[2].text) - except: + except Exception as e: print("Followers --> Zero") print() try: print("Likes send by him --> " + span_box[3].text) - except: + except Exception as e: print("Likes send by him --> Zero") print() @@ -212,7 +192,7 @@ def ScrapTweets(username): print("No. of parties he is Subscribed to --> " + span_box[4].text) else: print("No. of parties he is Subscribed to --> Zero") - except: + except Exception as e: print("No. of parties he is Subscribed to --> Zero") print() diff --git a/plugins/domain.py b/plugins/domain.py index 40bb352..fc8d025 100644 --- a/plugins/domain.py +++ b/plugins/domain.py @@ -12,7 +12,7 @@ from .webosint.crawler import crawler from .webosint.who.whoami import whoami -global host +global host global port # Checking whether the target host is alive or dead diff --git a/plugins/torrent.py b/plugins/torrent.py index e2900f5..b0b7496 100644 --- a/plugins/torrent.py +++ b/plugins/torrent.py @@ -1,5 +1,4 @@ import requests -import json def torrent(IP): diff --git a/plugins/webosint/reverseip.py b/plugins/webosint/reverseip.py index e11c103..593dda1 100644 --- a/plugins/webosint/reverseip.py +++ b/plugins/webosint/reverseip.py @@ -1,9 +1,5 @@ from requests import get -R = '\033[31m' # red -G = '\033[32m' # green -C = '\033[36m' # cyan -W = '\033[0m' # white def ReverseIP(host, port): print ( '[+]' + 'Checking whether the Target is reachable ...' + '\n') @@ -11,5 +7,5 @@ def ReverseIP(host, port): try: result = get(lookup).text print(result) - except: - print(R+'Error: Invalid IP address') + except Exception as e: + print('Error: Invalid IP address '+e) diff --git a/plugins/webosint/subdomain.py b/plugins/webosint/subdomain.py index da85e21..1e2d5ee 100644 --- a/plugins/webosint/subdomain.py +++ b/plugins/webosint/subdomain.py @@ -1,11 +1,5 @@ -import json import requests -R = '\033[31m' # red -G = '\033[32m' # green -C = '\033[36m' # cyan -W = '\033[0m' # white - def SubDomain(host, port): print ('[+]' + 'Fetching Subdomains of Target...' + '\n') url = 'https://www.virustotal.com/vtapi/v2/domain/report' From ba5da1c0102ecefefe28373509505531f577987a Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 21:45:51 +0530 Subject: [PATCH 08/30] Phonenumber Fixed --- plugins/Phonenumber.py | 4 ---- 1 file changed, 4 deletions(-) diff --git a/plugins/Phonenumber.py b/plugins/Phonenumber.py index aa67c59..bab7645 100644 --- a/plugins/Phonenumber.py +++ b/plugins/Phonenumber.py @@ -1,9 +1,5 @@ from plugins.api import phoneapis -<<<<<<< HEAD import requests -======= -import ->>>>>>> ebe8448227c7bdd9dd40c4dc884d9e056cdd5634 def Phonenumber(ph): print ('[+]' + ' Fetching Phonenumber Details...' + '\n') From 31abed08b4ccd6bb5b9f9bfc9aafbe706eea53e0 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 1 Apr 2020 21:53:59 +0530 Subject: [PATCH 09/30] Updated --- .gitignore | 1 - plugins/proxy.py | 2 +- 2 files changed, 1 insertion(+), 2 deletions(-) diff --git a/.gitignore b/.gitignore index 2bd4df5..ef16840 100644 --- a/.gitignore +++ b/.gitignore @@ -117,4 +117,3 @@ core/config.py # database plugins/GeoLite2-City.mmdb plugins/IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN - diff --git a/plugins/proxy.py b/plugins/proxy.py index 8173dfe..4bf87bb 100644 --- a/plugins/proxy.py +++ b/plugins/proxy.py @@ -9,7 +9,7 @@ def ip2Proxy(IP): if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",IP): db = IP2Proxy.IP2Proxy() - db.open("/root/Downloads/reconspider/plugins/IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN") + db.open("/root/Downloads/RECONSPIDER/plugins/IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN") record = db.get_all(IP) db.close() if record['is_proxy']!=0: From 2b92204c19577c0bb352046bd6f54138a4114577 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Thu, 2 Apr 2020 10:52:47 +0530 Subject: [PATCH 10/30] Fixed --- .gitignore | 4 +- README.md | 8 +- core/repl_prompt.py | 3 +- plugins/IP2Proxy.py | 496 --------------------------------- plugins/Username.py | 28 +- plugins/domain.py | 22 +- plugins/proxy.py | 2 +- plugins/webosint/crawler.py | 12 +- plugins/webosint/who/whoami.py | 46 +-- plugins/webvuln/bruteforce.py | 15 +- setup.py | 19 +- 11 files changed, 76 insertions(+), 579 deletions(-) delete mode 100644 plugins/IP2Proxy.py diff --git a/.gitignore b/.gitignore index ef16840..cfa1dff 100644 --- a/.gitignore +++ b/.gitignore @@ -115,5 +115,5 @@ core/config.py .vscode/ # database -plugins/GeoLite2-City.mmdb -plugins/IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN +plugins/IP2PROXY-LITE-PX8.BIN.ZIP +plugins/IP2PROXY-LITE-PX8.BIN diff --git a/README.md b/README.md index 880b5a4..a3a7307 100644 --- a/README.md +++ b/README.md @@ -172,16 +172,11 @@ python setup.py install Step 3 - Database -**Geolite2 City Database** -``` -https://github.com/texnikru/GeoLite2-Database/blob/master/GeoLite2-City.mmdb.gz -``` - **IP2Proxy Database** ``` https://lite.ip2location.com/database/px8-ip-proxytype-country-region-city-isp-domain-usagetype-asn-lastseen ``` -Download both database and move it to reconspier/plugins/. +Download database, extract it and move to reconspider/plugins/ directory. # Usage @@ -344,7 +339,6 @@ Do you want to have a conversation in private? Please go through the [ReconSpider Wiki Guide](https://github.com/bhavsec/reconspider/wiki) for a detailed explanation of each and every option and feature. - # Frequent & Seamless Updates ReconSpider is under heavy development and updates for fixing bugs. optimizing performance & new features are being rolled regularly. Custom error handling is also not implemented, and all the focus is to create required functionality. diff --git a/core/repl_prompt.py b/core/repl_prompt.py index 1250d90..36cad71 100644 --- a/core/repl_prompt.py +++ b/core/repl_prompt.py @@ -1,6 +1,5 @@ # -*- coding: utf-8 -*- from __future__ import unicode_literals -import socket from plugins.censys import censys_ip from plugins.dnsdump import dnsmap @@ -118,7 +117,7 @@ def repl(): # Read–eval–print loop break ip2Proxy(IP) continue - + elif choice == 13: while 1: emailaddress = prompt("MAIL ADDRESS (Eg:temp@gmail.com) >> ") diff --git a/plugins/IP2Proxy.py b/plugins/IP2Proxy.py deleted file mode 100644 index 51e7314..0000000 --- a/plugins/IP2Proxy.py +++ /dev/null @@ -1,496 +0,0 @@ -import sys -import struct -import socket - -if sys.version < '3': - def u(x): - return x.decode('utf-8') - def b(x): - return str(x) -else: - def u(x): - if isinstance(x, bytes): - return x.decode() - return x - def b(x): - if isinstance(x, bytes): - return x - return x.encode('ascii') - -# Windows version of Python does not provide it -# for compatibility with older versions of Windows. -if not hasattr(socket, 'inet_pton'): - def inet_pton(t, addr): - import ctypes - a = ctypes.WinDLL('ws2_32.dll') - in_addr_p = ctypes.create_string_buffer(b(addr)) - if t == socket.AF_INET: - out_addr_p = ctypes.create_string_buffer(4) - elif t == socket.AF_INET6: - out_addr_p = ctypes.create_string_buffer(16) - n = a.inet_pton(t, in_addr_p, out_addr_p) - if n == 0: - raise ValueError('Invalid address') - return out_addr_p.raw - socket.inet_pton = inet_pton - -_VERSION = '2.2.0' -_NO_IP = 'MISSING IP ADDRESS' -_FIELD_NOT_SUPPORTED = 'NOT SUPPORTED' -_INVALID_IP_ADDRESS = 'INVALID IP ADDRESS' -MAX_IPV4_RANGE = 4294967295 -MAX_IPV6_RANGE = 340282366920938463463374607431768211455 - -class IP2ProxyRecord: - ''' IP2Proxy record with all fields from the database ''' - ip = None - country_short = _FIELD_NOT_SUPPORTED - country_long = _FIELD_NOT_SUPPORTED - region = _FIELD_NOT_SUPPORTED - city = _FIELD_NOT_SUPPORTED - isp = _FIELD_NOT_SUPPORTED - proxy_type = _FIELD_NOT_SUPPORTED - usage_type = _FIELD_NOT_SUPPORTED - as_name = _FIELD_NOT_SUPPORTED - asn = _FIELD_NOT_SUPPORTED - last_seen = _FIELD_NOT_SUPPORTED - domain = _FIELD_NOT_SUPPORTED - - def __str__(self): - return str(self.__dict__) - - def __repr__(self): - return repr(self.__dict__) - -_COUNTRY_POSITION = (0, 2, 3, 3, 3, 3, 3, 3, 3) -_REGION_POSITION = (0, 0, 0, 4, 4, 4, 4, 4, 4) -_CITY_POSITION = (0, 0, 0, 5, 5, 5, 5, 5, 5) -_ISP_POSITION = (0, 0, 0, 0, 6, 6, 6, 6, 6) -_PROXYTYPE_POSITION = (0, 0, 2, 2, 2, 2, 2, 2, 2) -_DOMAIN_POSITION = (0, 0, 0, 0, 0, 7, 7, 7, 7) -_USAGETYPE_POSITION = (0, 0, 0, 0, 0, 0, 8, 8, 8) -_ASN_POSITION = (0, 0, 0, 0, 0, 0, 0, 9, 9) -_AS_POSITION = (0, 0, 0, 0, 0, 0, 0, 10, 10) -_LASTSEEN_POSITION = (0, 0, 0, 0, 0, 0, 0, 0, 11) - -class IP2Proxy(object): - ''' IP2Proxy database ''' - - def __init__(self, filename=None): - ''' Creates a database object and opens a file if filename is given ''' - if filename: - self.open(filename) - - def __enter__(self): - if not hasattr(self, '_f') or self._f.closed: - raise ValueError("Cannot enter context with closed file") - return self - - def __exit__(self, exc_type, exc_value, traceback): - self.close() - - def open(self, filename): - ''' Opens a database file ''' - # Ensure old file is closed before opening a new one - self.close() - - self._f = open(filename, 'rb') - self._dbtype = struct.unpack('B', self._f.read(1))[0] - self._dbcolumn = struct.unpack('B', self._f.read(1))[0] - self._dbyear = 2000 + struct.unpack('B', self._f.read(1))[0] - self._dbmonth = struct.unpack('B', self._f.read(1))[0] - self._dbday = struct.unpack('B', self._f.read(1))[0] - self._ipv4dbcount = struct.unpack('= 281470681743360) and (ipnum <= 281474976710655)): - ipv = 4 - ipnum = ipnum - 281470681743360 - else: - ipv = 6 - else: - # ipv = 6 - if ((ipnum >= 42545680458834377588178886921629466624) and (ipnum <= 42550872755692912415807417417958686719)): - ipv = 4 - ipnum = ipnum >> 80 - ipnum = ipnum % 4294967296 - elif ((ipnum >= 42540488161975842760550356425300246528) and (ipnum <= 42540488241204005274814694018844196863)): - ipv = 4 - # ipnum = ipnum % 100000000000000000000000000000000 - ipnum = ~ ipnum - ipnum = ipnum % 4294967296 - else: - ipv = 6 - except Exception as e: - ipnum = struct.unpack('!L', socket.inet_pton(socket.AF_INET, addr))[0] - # socket.inet_pton(socket.AF_INET, addr) - ipv = 4 - return ipv, ipnum - - def _get_record(self, ip): - low = 0 - ipv = self._parse_addr(ip)[0] - ipnum = self._parse_addr(ip)[1] - if ipv == 4: - # ipnum = struct.unpack('!L', socket.inet_pton(socket.AF_INET, ip))[0] - if (ipnum == MAX_IPV4_RANGE): - ipno = ipnum - 1 - else: - ipno = ipnum - off = 0 - baseaddr = self._ipv4dbaddr - high = self._ipv4dbcount - if self._ipv4indexbaseaddr > 0: - indexpos = ((ipno >> 16) << 3) + self._ipv4indexbaseaddr - low = self._readi(indexpos) - high = self._readi(indexpos + 4) - - elif ipv == 6: - # a, b = struct.unpack('!QQ', socket.inet_pton(socket.AF_INET6, ip)) - # ipnum = (a << 64) | b - if (ipnum == MAX_IPV6_RANGE): - ipno = ipnum - 1 - else: - ipno = ipnum - off = 12 - baseaddr = self._ipv6dbaddr - high = self._ipv6dbcount - if self._ipv6indexbaseaddr > 0: - indexpos = ((ipno >> 112) << 3) + self._ipv6indexbaseaddr - low = self._readi(indexpos) - high = self._readi(indexpos + 4) - - elif ipnum == '': - rec = IP2ProxyRecord() - rec.country_short = _NO_IP - rec.country_long = _NO_IP - rec.region = _NO_IP - rec.city = _NO_IP - rec.isp = _NO_IP - rec.proxy_type = _NO_IP - rec.domain = _NO_IP - rec.usage_type = _NO_IP - rec.asn = _NO_IP - rec.as_name = _NO_IP - rec.last_seen = _NO_IP - return rec - - while low <= high: - # mid = int((low + high) / 2) - mid = int((low + high) >> 1) - ipfrom = self._readip(baseaddr + (mid) * (self._dbcolumn * 4 + off), ipv) - ipto = self._readip(baseaddr + (mid + 1) * (self._dbcolumn * 4 + off), ipv) - - if ipfrom <= ipno < ipto: - return self._read_record(mid, ipv) - else: - if ipno < ipfrom: - high = mid - 1 - else: - low = mid + 1 diff --git a/plugins/Username.py b/plugins/Username.py index e22367f..5cd4a5c 100644 --- a/plugins/Username.py +++ b/plugins/Username.py @@ -90,7 +90,7 @@ def Instagram(username): try: print("Business Category: "+res['edge_follow']['business_category_name']) except Exception as e: - print("Account :"+" Private") + print("Account :"+" Private" + str(e)) finally: print("Biograph: " + res['biography']) print("URL: "+ str(res['external_url'])) @@ -115,28 +115,28 @@ def ScrapTweets(username): full_name = soup.find('a', attrs={"class": "ProfileHeaderCard-nameLink u-textInheritColor js-nav"}) print("User Name --> " + full_name.text) except Exception as e: - print("User Name -->"+" Not Found") + print("User Name -->"+" Not Found" + str(e)) print() try: user_id = soup.find('b', attrs={"class": "u-linkComplex-target"}) print("User Id --> " + user_id.text) except Exception as e: - print("User Id --> "+"Not Found") + print("User Id --> "+"Not Found" + str(e)) print() try: decription = soup.find('p', attrs={"class": "ProfileHeaderCard-bio u-dir"}) print("Description --> " + decription.text) except Exception as e: - print("Decription not provided by the user") + print("Decription not provided by the user" + str(e)) print() try: user_location = soup.find('span', attrs={"class": "ProfileHeaderCard-locationText u-dir"}) print("Location --> " + user_location.text.strip()) except Exception as e: - print("Location not provided by the user") + print("Location not provided by the user" + str(e)) print() try: @@ -144,14 +144,14 @@ def ScrapTweets(username): tittle = connectivity.a["title"] print("Link provided by the user --> " + tittle) except Exception as e: - print("No contact link is provided by the user") + print("No contact link is provided by the user" + str(e)) print() try: join_date = soup.find('span', attrs={"class": "ProfileHeaderCard-joinDateText js-tooltip u-dir"}) print("The user joined twitter on --> " + join_date.text) except Exception as e: - print("The joined date is not provided by the user") + print("The joined date is not provided by the user" + str(e)) print() try: @@ -159,32 +159,32 @@ def ScrapTweets(username): birth_date = birth.span.text print("Date of Birth:"+birth_date.strip()) except Exception as e: - print("Birth Date not provided by the user") + print("Birth Date not provided by the user" + str(e)) print() try: span_box = soup.findAll('span', attrs={"class": "ProfileNav-value"}) print("Total tweets --> " + span_box[0].text) except Exception as e: - print("Total Tweets --> Zero") + print("Total Tweets --> Zero" + str(e)) print() try: print("Following --> " +span_box[1].text) except Exception as e: - print("Following --> Zero") + print("Following --> Zero" + str(e)) print() try: print("Followers --> " + span_box[2].text) except Exception as e: - print("Followers --> Zero") + print("Followers --> Zero" + str(e)) print() try: print("Likes send by him --> " + span_box[3].text) except Exception as e: - print("Likes send by him --> Zero") + print("Likes send by him --> Zero" + str(e)) print() try: @@ -193,10 +193,10 @@ def ScrapTweets(username): else: print("No. of parties he is Subscribed to --> Zero") except Exception as e: - print("No. of parties he is Subscribed to --> Zero") + print("No. of parties he is Subscribed to --> Zero" + + str(e)) print() - spana = soup.findAll('span', attrs={"class": "ProfileNav-value"}) + #spana = soup.findAll('span', attrs={"class": "ProfileNav-value"}) print("Tweets by "+ username + " are --> ") # TweetTextSize TweetTextSize--normal js-tweet-text tweet-text diff --git a/plugins/domain.py b/plugins/domain.py index fc8d025..1910bdf 100644 --- a/plugins/domain.py +++ b/plugins/domain.py @@ -12,11 +12,8 @@ from .webosint.crawler import crawler from .webosint.who.whoami import whoami -global host -global port - # Checking whether the target host is alive or dead -def CheckTarget(): +def CheckTarget(host,port): s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) result = s.connect_ex((host, port)) @@ -26,15 +23,11 @@ def CheckTarget(): return False # Main Method -def domain(h,p): - global host - global port - host=h - port=p +def domain(host,port): - if CheckTarget()==True: + if CheckTarget(host,port)==True: print("\nTarget Alive \n") - Menu() + Menu(host,port) else: print("The Host is Unreachable \n") exit() @@ -47,7 +40,6 @@ def domain(h,p): def nmaprec(host,port): - Choice = 1 while True: print("1. Scan Default Ports (22-443)") @@ -58,7 +50,7 @@ def nmaprec(host,port): if (Choice >= 0) and (Choice < 3): NmapFunctions[Choice](host, port) elif Choice == 3: - Menu() + Menu(host,port) else: print("Please choose an Appropriate option") @@ -80,7 +72,7 @@ def BruteForce(host, port): if (Selection >= 0) and (Selection < 3): BruteFunctions[Selection](host, port) elif Selection == 3: - Menu() + Menu(host,port) else: print("Please choose an Appropriate option") @@ -100,7 +92,7 @@ def BruteForce(host, port): 12:whoami } -def Menu(): +def Menu(host,port): Selection = 1 while True: print('') diff --git a/plugins/proxy.py b/plugins/proxy.py index 4bf87bb..73402e5 100644 --- a/plugins/proxy.py +++ b/plugins/proxy.py @@ -9,7 +9,7 @@ def ip2Proxy(IP): if re.match(r"^\d{1,3}\.\d{1,3}\.\d{1,3}\.\d{1,3}$",IP): db = IP2Proxy.IP2Proxy() - db.open("/root/Downloads/RECONSPIDER/plugins/IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN") + db.open("./plugins/IP2PROXY-LITE-PX8.BIN") record = db.get_all(IP) db.close() if record['is_proxy']!=0: diff --git a/plugins/webosint/crawler.py b/plugins/webosint/crawler.py index 59fdcc6..7f5eb01 100644 --- a/plugins/webosint/crawler.py +++ b/plugins/webosint/crawler.py @@ -1,6 +1,5 @@ import os import bs4 -import lxml import requests user_agent = {'User-Agent' : 'Mozilla/5.0 (X11; Linux x86_64; rv:60.0) Gecko/20100101 Firefox/60.0'} @@ -53,8 +52,8 @@ def crawler(target,port): url = url.strip() total.append(url) r_total.append(target + url) - except: - pass + except Exception as e: + print(e) elif 'Allow' in entry: url = entry.split(':') try: @@ -62,8 +61,8 @@ def crawler(target,port): url = url.strip() total.append(url) r_total.append(target + url) - except: - pass + except Exception as e: + print(e) r_total = set(r_total) print('['.rjust(8, '.') + ' {} ]'.format(str(len(r_total)))) @@ -154,7 +153,7 @@ def crawler(target,port): dumpfile.write('URL : {}'.format(target) + '\n\n') try: dumpfile.write('Title : {}'.format(soup.title.string) + '\n') - except AttributeError: + except AttributeError as e: dumpfile.write('Title : None' + '\n') dumpfile.write('\nrobots Links : ' + str(len(r_total))) dumpfile.write('\nsitemap Links : ' + str(len(sm_total))) @@ -164,6 +163,7 @@ def crawler(target,port): dumpfile.write('\nExternal Links : ' + str(len(ext_total))) dumpfile.write('\nImages Links : ' + str(len(img_total))) dumpfile.write('\nTotal Links Found : ' + str(len(total)) + '\n') + print(str(e)) if len(r_total) is not 0: dumpfile.write('\nrobots :\n\n') diff --git a/plugins/webosint/who/whoami.py b/plugins/webosint/who/whoami.py index 4c4a719..ed85b17 100644 --- a/plugins/webosint/who/whoami.py +++ b/plugins/webosint/who/whoami.py @@ -8,25 +8,27 @@ def whoami(target,post): ip = re.compile('(([2][5][0-5]\.)|([2][0-4][0-9]\.)|([0-1]?[0-9]?[0-9]\.)){3}' +'(([2][5][0-5])|([2][0-4][0-9])|([0-1]?[0-9]?[0-9]))') match = ip.search(getweb) - #target=match.group() - w = whois.whois(target) - print("Domain Name:"+ str(w['domain_name'])) - print("Register:"+str(w['registrar'])) - try: - print("Whois Server:"+str(w['whois_server'])) - except: - pass - print("Server:"+str(w['name_servers'])) - print("Emails:"+str(w['emails'])) - try: - print("Organisation:"+str(w['org'])) - except: - print("Organisation:"+str(w['organization'])) - try: - print("Address:"+str(w['address'])) - print("City:"+str(w['city'])) - print("State:"+str(w['state'])) - print("Zipcode:"+str(w['zipcode'])) - except: - pass - print("Country:"+str(w['country'])) + if match: + #target=match.group() + w = whois.whois(target) + print("Domain Name:"+ str(w['domain_name'])) + print("Register:"+str(w['registrar'])) + try: + print("Whois Server:"+str(w['whois_server'])) + except Exception as e: + print(e) + print("Server:"+str(w['name_servers'])) + print("Emails:"+str(w['emails'])) + try: + print("Organisation:"+str(w['org'])) + except Exception as e: + print("Organisation:"+str(w['organization'])) + print(e) + try: + print("Address:"+str(w['address'])) + print("City:"+str(w['city'])) + print("State:"+str(w['state'])) + print("Zipcode:"+str(w['zipcode'])) + except Exception as e: + print(e) + print("Country:"+str(w['country'])) diff --git a/plugins/webvuln/bruteforce.py b/plugins/webvuln/bruteforce.py index dae472d..0a7a599 100644 --- a/plugins/webvuln/bruteforce.py +++ b/plugins/webvuln/bruteforce.py @@ -1,4 +1,5 @@ -import paramiko,requests,socket +import paramiko +import socket from ftplib import FTP def ssh(host, port): @@ -35,7 +36,6 @@ def ssh(host, port): try: ssh.connect(host, port=port, username=username, password=password, timeout=10) flag = 0 - pass except paramiko.AuthenticationException: flag = 1 @@ -58,9 +58,6 @@ def ssh(host, port): print('') elif flag == 1: print("Invalid Credentials") - - else: - pass except socket.error as e: print("Error : %s" %e) @@ -93,7 +90,6 @@ def ssh(host, port): try: ssh.connect(host, port=22, username=username, password=password, timeout=10) flag = 0 - pass except paramiko.AuthenticationException: flag = 1 @@ -116,9 +112,6 @@ def ssh(host, port): print('') elif flag == 1: print("Invalid Credentials") - - else: - pass except socket.error as e: print("Error : %s" % e) @@ -155,7 +148,6 @@ def ftp(host, port): try: ftp.login(user='username', passwd='password') flag = 0 - pass except socket.error as e: flag = 2 @@ -177,8 +169,5 @@ def ftp(host, port): elif flag == 1: print("Invalid Credentials") - else: - pass - except socket.error as e: print("Error : %s" %e) diff --git a/setup.py b/setup.py index 0ecd9d2..8a7eb1f 100644 --- a/setup.py +++ b/setup.py @@ -1,4 +1,5 @@ from setuptools import setup +import os fout = open("core/config.py", "w") fout.write("shodan_api = " + '"' + "C23OXE0bVMrul2YeqcL7zxb6jZ4pj2by" + '"' + "\n") @@ -12,6 +13,22 @@ author="BhavKaran (@bhavsec)", author_email="contact@bhavkaran.com", license="GPL-3.0", - install_requires=["shodan", "requests", "prompt_toolkit","beautifulsoup4","urllib3","IP2proxy"], + install_requires=["shodan", "requests", "prompt_toolkit","beautifulsoup4","urllib3","IP2proxy","wget","paramiko","h8mail"], console=["reconspider.py"], ) + +import wget + +#Database +url="https://www.ip2location.com/download?token=hg5uYe2Jvri4R7P1j8b71Pk8dnvIU2M6A9jz2tvcVtGx8ZK2UPQgzr6Hk3cV68oH&file=PX8LITEBIN" +print('\nDownloading IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN...') +filepath=os.getcwd()+"/plugins/" +wget.download(url,out=filepath) +print('\nDownload Finished') + +import zipfile +print('\nExtracting Files') +with zipfile.ZipFile(filepath+"IP2PROXY-LITE-PX8.BIN.ZIP","r") as zip_ref: + zip_ref.extract("IP2PROXY-LITE-PX8.BIN",filepath) + +print("\nInstallation Successfull") From cb09b181d7f7deba8ad9ece8f12e21c86d1e3323 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Fri, 3 Apr 2020 20:28:46 +0530 Subject: [PATCH 11/30] mail breach updated --- README.md | 6 +++--- core/repl_prompt.py | 4 ++-- plugins/maildb.py | 25 ++++++++++++++++--------- reconspider.py | 2 +- 4 files changed, 22 insertions(+), 15 deletions(-) diff --git a/README.md b/README.md index a3a7307..b761409 100644 --- a/README.md +++ b/README.md @@ -110,7 +110,7 @@ ENTER 0 - 11 TO SELECT OPTIONS 10. TORRENT Gather torrent download history of IP 11. USERNAME Extract Account info. from social media 12. IP2PROXY Check whether IP uses any VPN / PROXY -13. MAIL BREACH Check whethers given mail is breached +13. MAIL BREACH Checks given domain has breached Mail 99. UPDATE Update ReconSpider to its latest version 0. EXIT Exit from ReconSpider to your terminal @@ -297,10 +297,10 @@ IPADDRESS (Eg:192.168.1.1) >> **12. MAIL BREACH** -This option allows you to identify whether provided mail has been breach in some website. +This option allows you to identify all breached mail ID from given domain. ``` Reconspider >> 13 -MAIL ADDRESS (Eg:temp@gmail.com) >> +MAIL ADDRESS (Eg:intercom.io) >> ``` **99. UPDATE** diff --git a/core/repl_prompt.py b/core/repl_prompt.py index 36cad71..270e57b 100644 --- a/core/repl_prompt.py +++ b/core/repl_prompt.py @@ -120,9 +120,9 @@ def repl(): # Read–eval–print loop elif choice == 13: while 1: - emailaddress = prompt("MAIL ADDRESS (Eg:temp@gmail.com) >> ") + domain = prompt("MAIL ADDRESS (Eg:intercom.io) >> ") break - maildb(emailaddress) + maildb(domain) continue elif choice == 99: diff --git a/plugins/maildb.py b/plugins/maildb.py index 9882ca1..712e63f 100644 --- a/plugins/maildb.py +++ b/plugins/maildb.py @@ -1,14 +1,21 @@ -import os +import requests +import json def maildb(emailaddress): if ("@" and ".com") or ("@" and ".in") in emailaddress: - os.system("h8mail -t "+emailaddress+" -o "+os.getcwd()+"/plugins/output.csv > " +os.getcwd()+"/plugins/output.log") - f=open(os.getcwd()+"/plugins/output.csv","r") - line=f.readlines() - if len(line) > 1: - for i in line: - print(i) - else: - print("Data breached is Not Compromised") + req=requests.get("https://api.hunter.io/v2/domain-search?domain="+emailaddress+"&api_key=9f189e87e011a1d2f3013ace7b14045dec60f62c") + j=req.json() + print("[+] Breaching from "+emailaddress+"...\n") + for i in range(len(j['data']['emails'])): + print("Email ID :",j['data']['emails'][i]['value']) + print("First Name :",j['data']['emails'][i]['first_name']) + print("Last Name :",j['data']['emails'][i]['last_name']) + if j['data']['emails'][i]['position']!=None: + print("Position :",j['data']['emails'][i]['position']) + if j['data']['emails'][i]['linkedin']!=None: + print("Linkedin :",j['data']['emails'][i]['linkedin']) + if j['data']['emails'][i]['twitter']!=None: + print("Twitter :",j['data']['emails'][i]['twitter']) + print() else: print("Error: Invalid Email Address") diff --git a/reconspider.py b/reconspider.py index d09bf9f..e238091 100644 --- a/reconspider.py +++ b/reconspider.py @@ -26,7 +26,7 @@ def banner(): 10. TORRENT Gather torrent download history of IP 11. USERNAME Extract Account info. from social media 12. IP2PROXY Check whether IP uses any VPN / PROXY -13. MAIL BREACH Check whethers given mail is breached +13. MAIL BREACH Checks given domain has breached Mail 99. UPDATE Update ReconSpider to its latest version 0. EXIT Exit from ReconSpider to your terminal From 31cd58f1d82ae7c3f79cc961570d363b3172018a Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Fri, 3 Apr 2020 20:40:08 +0530 Subject: [PATCH 12/30] Repl_prompt.py update --- core/repl_prompt.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/core/repl_prompt.py b/core/repl_prompt.py index 270e57b..eeedcb0 100644 --- a/core/repl_prompt.py +++ b/core/repl_prompt.py @@ -120,9 +120,9 @@ def repl(): # Read–eval–print loop elif choice == 13: while 1: - domain = prompt("MAIL ADDRESS (Eg:intercom.io) >> ") + web = prompt("MAIL ADDRESS (Eg:intercom.io) >> ") break - maildb(domain) + maildb(web) continue elif choice == 99: From 4e41d4add5c0fa7bd150a9a8aa7046cc647655a8 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Fri, 3 Apr 2020 21:25:25 +0530 Subject: [PATCH 13/30] Proxy option fixed --- plugins/proxy.py | 2 +- plugins/webosint/who/whois.py | 10 +++------- plugins/whois.py | 5 ++--- 3 files changed, 6 insertions(+), 11 deletions(-) diff --git a/plugins/proxy.py b/plugins/proxy.py index 73402e5..91d50df 100644 --- a/plugins/proxy.py +++ b/plugins/proxy.py @@ -33,7 +33,7 @@ def ip2Proxy(IP): response = r.json() print("Latitude :"+" {latitude}".format(**response)) print("Longitude :"+" {longitude}".format(**response)) - if input("Want More Whois Details (Y/N): "): + if input("Want More Whois Details (Y/N):") in ["Y","y"]: whois_more(IP) if response['latitude'] and response['longitude']: lats = response['latitude'] diff --git a/plugins/webosint/who/whois.py b/plugins/webosint/who/whois.py index 536f928..4915dc7 100644 --- a/plugins/webosint/who/whois.py +++ b/plugins/webosint/who/whois.py @@ -1,8 +1,4 @@ -import os +import requests def whois_more(IP): - os.system("whois "+IP+" > output.txt") - f=open("output.txt","r") - f1=f.readlines() - for line in f1: - if "%" not in line and line.strip(): - print(line) + result = requests.get('http://api.hackertarget.com/whois/?q=' + IP).text + print('\n'+ result + '\n') diff --git a/plugins/whois.py b/plugins/whois.py index 05310a6..3eda9a6 100644 --- a/plugins/whois.py +++ b/plugins/whois.py @@ -1,7 +1,6 @@ -from requests import get - +import requests def whois(wh): url = wh result = get('http://api.hackertarget.com/whois/?q=' + url).text - print('\n'+ result + '\n') \ No newline at end of file + print('\n'+ result + '\n') From 02a08a09cc9f8ae2a6a245ee1face384d4027f77 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Fri, 3 Apr 2020 21:51:52 +0530 Subject: [PATCH 14/30] Fixed issues --- core/repl_prompt.py | 2 +- core/updater.py | 4 ++-- plugins/Username.py | 9 +++++---- plugins/domain.py | 2 +- plugins/maildb.py | 1 - plugins/webosint/crawler.py | 6 +++--- plugins/webosint/who/whois.py | 1 + plugins/webvuln/clickjacking.py | 6 +++--- plugins/whois.py | 2 +- 9 files changed, 17 insertions(+), 16 deletions(-) diff --git a/core/repl_prompt.py b/core/repl_prompt.py index eeedcb0..3721842 100644 --- a/core/repl_prompt.py +++ b/core/repl_prompt.py @@ -23,7 +23,7 @@ def repl(): # Read–eval–print loop while 1: user_input = prompt("\nReconspider >> ") if len(user_input)==0: - print("ENTER 1 - 7 TO SELECT OPTIONS") + print("ENTER 1 - 13 TO SELECT OPTIONS") continue try: choice = int(user_input) diff --git a/core/updater.py b/core/updater.py index b151eba..3b48e97 100644 --- a/core/updater.py +++ b/core/updater.py @@ -26,10 +26,10 @@ def update(): currentPath = os.getcwd().split('/') # if you know it, you know it folder = currentPath[-1] # current directory name path = '/'.join(currentPath) # current directory path - + if sys.version_info[0] > 2: choice = input('\n%s Would you like to update? [Y/n] ' % que).lower() - + else: choice = raw_input('\n%s Would you like to update? [Y/n] ' % que).lower() diff --git a/plugins/Username.py b/plugins/Username.py index 5cd4a5c..aa8e19e 100644 --- a/plugins/Username.py +++ b/plugins/Username.py @@ -1,6 +1,6 @@ import requests from bs4 import BeautifulSoup -from urllib.request import urlopen as uReq +import urllib.request out=[] @@ -105,9 +105,10 @@ def Instagram(username): def ScrapTweets(username): link = "https://twitter.com/" + username - the_client = uReq(link) - page_html = the_client.read() - the_client.close() + the_client = urllib.request.Request(link) + with urllib.request.urlopen(the_client) as response: + page_html = response.read() + #the_client.close() soup = BeautifulSoup(page_html, 'html.parser') diff --git a/plugins/domain.py b/plugins/domain.py index 1910bdf..9390bd3 100644 --- a/plugins/domain.py +++ b/plugins/domain.py @@ -114,7 +114,7 @@ def Menu(host,port): if (Selection >= 0) and (Selection <=12): MainFunctions[Selection](host, port) elif Selection == 99: - exit() + return else: print("Error: Please choose an Appropriate option") print('') diff --git a/plugins/maildb.py b/plugins/maildb.py index 712e63f..c6ec678 100644 --- a/plugins/maildb.py +++ b/plugins/maildb.py @@ -1,5 +1,4 @@ import requests -import json def maildb(emailaddress): if ("@" and ".com") or ("@" and ".in") in emailaddress: diff --git a/plugins/webosint/crawler.py b/plugins/webosint/crawler.py index 7f5eb01..8b33cf8 100644 --- a/plugins/webosint/crawler.py +++ b/plugins/webosint/crawler.py @@ -23,7 +23,7 @@ def crawler(target,port): print ('\n' + '[+]' + ' Crawling Target...'+ '\n') try: target=port+target - rqst = requests.get(target, headers=user_agent, verify=False, timeout=10) + rqst = requests.get(target, headers=user_agent, verify=True, timeout=10) sc = rqst.status_code if sc == 200: domain = target.split('//') @@ -36,7 +36,7 @@ def crawler(target,port): sm_url = 'http://{}/sitemap.xml'.format(domain) print( '[+]' + ' Looking for robots.txt' , end = '') - r_rqst = requests.get(r_url, headers=user_agent, verify=False, timeout=10) + r_rqst = requests.get(r_url, headers=user_agent, verify=True, timeout=10) r_sc = r_rqst.status_code if r_sc == 200: @@ -72,7 +72,7 @@ def crawler(target,port): print( '['.rjust(9, '.') + ' {} ]'.format(r_sc) ) print('[+]' + ' Looking for sitemap.xml' , end = '') - sm_rqst = requests.get(sm_url, headers=user_agent, verify=False, timeout=10) + sm_rqst = requests.get(sm_url, headers=user_agent, verify=True, timeout=10) sm_sc = sm_rqst.status_code if sm_sc == 200: print('['.rjust(8, '.') + ' Found ]' ) diff --git a/plugins/webosint/who/whois.py b/plugins/webosint/who/whois.py index 4915dc7..51934fe 100644 --- a/plugins/webosint/who/whois.py +++ b/plugins/webosint/who/whois.py @@ -1,4 +1,5 @@ import requests + def whois_more(IP): result = requests.get('http://api.hackertarget.com/whois/?q=' + IP).text print('\n'+ result + '\n') diff --git a/plugins/webvuln/clickjacking.py b/plugins/webvuln/clickjacking.py index 47cee35..9659fd8 100644 --- a/plugins/webvuln/clickjacking.py +++ b/plugins/webvuln/clickjacking.py @@ -1,4 +1,4 @@ -from urllib.request import urlopen +import urllib.request def ClickJacking(host, port): @@ -12,8 +12,8 @@ def ClickJacking(host, port): url = (port+host) - data = urlopen(url) - headers = data.info() + data=urllib.request.urlretrieve("http://www.skcet.ac.in")[1] + headers=data.as_string() if not "X-Frame-Options" in headers: print("Website is vulnerable to ClickJacking") diff --git a/plugins/whois.py b/plugins/whois.py index 3eda9a6..4f4ff37 100644 --- a/plugins/whois.py +++ b/plugins/whois.py @@ -2,5 +2,5 @@ def whois(wh): url = wh - result = get('http://api.hackertarget.com/whois/?q=' + url).text + result = requests.get('http://api.hackertarget.com/whois/?q=' + url).text print('\n'+ result + '\n') From bfb681f5a5448a2e13d0b4340b2e95ef9866edc1 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Fri, 3 Apr 2020 21:57:37 +0530 Subject: [PATCH 15/30] clickjacking fixed --- README.md | 2 +- core/repl_prompt.py | 2 +- plugins/webvuln/clickjacking.py | 2 +- 3 files changed, 3 insertions(+), 3 deletions(-) diff --git a/README.md b/README.md index b761409..d2d631e 100644 --- a/README.md +++ b/README.md @@ -300,7 +300,7 @@ IPADDRESS (Eg:192.168.1.1) >> This option allows you to identify all breached mail ID from given domain. ``` Reconspider >> 13 -MAIL ADDRESS (Eg:intercom.io) >> +DOMAIN (Eg:intercom.io) >> ``` **99. UPDATE** diff --git a/core/repl_prompt.py b/core/repl_prompt.py index 3721842..eb841a8 100644 --- a/core/repl_prompt.py +++ b/core/repl_prompt.py @@ -120,7 +120,7 @@ def repl(): # Read–eval–print loop elif choice == 13: while 1: - web = prompt("MAIL ADDRESS (Eg:intercom.io) >> ") + web = prompt("DOMAIN (Eg:intercom.io) >> ") break maildb(web) continue diff --git a/plugins/webvuln/clickjacking.py b/plugins/webvuln/clickjacking.py index 9659fd8..b4a3e02 100644 --- a/plugins/webvuln/clickjacking.py +++ b/plugins/webvuln/clickjacking.py @@ -12,7 +12,7 @@ def ClickJacking(host, port): url = (port+host) - data=urllib.request.urlretrieve("http://www.skcet.ac.in")[1] + data=urllib.request.urlretrieve(url)[1] headers=data.as_string() if not "X-Frame-Options" in headers: From 9c36d79b871d03991e974611eda51b37a4452e5c Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Fri, 3 Apr 2020 22:22:43 +0530 Subject: [PATCH 16/30] Fixed --- plugins/Username.py | 7 ++++--- plugins/webvuln/clickjacking.py | 6 +++--- 2 files changed, 7 insertions(+), 6 deletions(-) diff --git a/plugins/Username.py b/plugins/Username.py index aa8e19e..10f86ae 100644 --- a/plugins/Username.py +++ b/plugins/Username.py @@ -105,9 +105,10 @@ def Instagram(username): def ScrapTweets(username): link = "https://twitter.com/" + username - the_client = urllib.request.Request(link) - with urllib.request.urlopen(the_client) as response: - page_html = response.read() + if link.lower().startswith('http'): + the_client = urllib.request.Request(link) + with urllib.request.urlopen(the_client) as response: + page_html = response.read() #the_client.close() soup = BeautifulSoup(page_html, 'html.parser') diff --git a/plugins/webvuln/clickjacking.py b/plugins/webvuln/clickjacking.py index b4a3e02..24ff5e9 100644 --- a/plugins/webvuln/clickjacking.py +++ b/plugins/webvuln/clickjacking.py @@ -11,9 +11,9 @@ def ClickJacking(host, port): url = (port+host) - - data=urllib.request.urlretrieve(url)[1] - headers=data.as_string() + if url.lower().startswith('http'): + data=urllib.request.urlretrieve(url)[1] + headers=data.as_string() if not "X-Frame-Options" in headers: print("Website is vulnerable to ClickJacking") From f508f0a3995f9f67af1833a4cadcc0c6fd4956c4 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Fri, 3 Apr 2020 22:29:54 +0530 Subject: [PATCH 17/30] Update --- plugins/Username.py | 7 +++---- 1 file changed, 3 insertions(+), 4 deletions(-) diff --git a/plugins/Username.py b/plugins/Username.py index 10f86ae..aa8e19e 100644 --- a/plugins/Username.py +++ b/plugins/Username.py @@ -105,10 +105,9 @@ def Instagram(username): def ScrapTweets(username): link = "https://twitter.com/" + username - if link.lower().startswith('http'): - the_client = urllib.request.Request(link) - with urllib.request.urlopen(the_client) as response: - page_html = response.read() + the_client = urllib.request.Request(link) + with urllib.request.urlopen(the_client) as response: + page_html = response.read() #the_client.close() soup = BeautifulSoup(page_html, 'html.parser') From 897257d5c6f9c2f55481ed8721bc02cb5db4dcbb Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sat, 4 Apr 2020 19:08:13 +0530 Subject: [PATCH 18/30] Fixed CORS --- README.md | 4 +-- plugins/Username.py | 6 +--- plugins/dnsdump.py | 14 ++++---- plugins/domain.py | 24 +++----------- plugins/honeypot.py | 16 ++++----- plugins/webvuln/bruteforce.py | 58 --------------------------------- plugins/webvuln/clickjacking.py | 8 ++--- plugins/webvuln/cors.py | 2 +- reconspider.py | 9 +---- 9 files changed, 28 insertions(+), 113 deletions(-) diff --git a/README.md b/README.md index d2d631e..11ccaad 100644 --- a/README.md +++ b/README.md @@ -96,7 +96,7 @@ __________ _________ __ ___ developer: https://bhavkaran.com -ENTER 0 - 11 TO SELECT OPTIONS +ENTER 0 - 13 TO SELECT OPTIONS 1. IP Enumerate information from IP Address 2. DOMAIN Gather information about given DOMAIN @@ -295,7 +295,7 @@ Reconspider >> 12 IPADDRESS (Eg:192.168.1.1) >> ``` -**12. MAIL BREACH** +**13. MAIL BREACH** This option allows you to identify all breached mail ID from given domain. ``` diff --git a/plugins/Username.py b/plugins/Username.py index aa8e19e..f0c586d 100644 --- a/plugins/Username.py +++ b/plugins/Username.py @@ -1,6 +1,5 @@ import requests from bs4 import BeautifulSoup -import urllib.request out=[] @@ -105,11 +104,8 @@ def Instagram(username): def ScrapTweets(username): link = "https://twitter.com/" + username - the_client = urllib.request.Request(link) - with urllib.request.urlopen(the_client) as response: - page_html = response.read() - #the_client.close() + page_html=requests.get(link).content soup = BeautifulSoup(page_html, 'html.parser') try: diff --git a/plugins/dnsdump.py b/plugins/dnsdump.py index c7f14cc..54934f9 100644 --- a/plugins/dnsdump.py +++ b/plugins/dnsdump.py @@ -6,24 +6,26 @@ def dnsmap(dnsmap_inp): domain = dnsmap_inp - response = requests.Session().get('https://dnsdumpster.com/').text - csrf_token = re.search( - r"name='csrfmiddlewaretoken' value='(.*?)'", response).group(1) + r=requests.Session() + '''response = r.get('https://dnsdumpster.com/').text + csrf_token = re.search(r"name='csrfmiddlewaretoken' value='(.*?)'", response).group(1) cookies = {'csrftoken': csrf_token} headers = {'Referer': 'https://dnsdumpster.com/'} data = {'csrfmiddlewaretoken': csrf_token, 'targetip': domain} - response = requests.Session().post( - 'https://dnsdumpster.com/', cookies=cookies, data=data, headers=headers) + response = r.post('https://dnsdumpster.com/', cookies=cookies, data=data, headers=headers)''' image = requests.get('https://dnsdumpster.com/static/map/%s.png' % domain) + if image.status_code == 200: image_name = domain.replace(".com","") with open('%s.png' % image_name, 'wb') as f: f.write(image.content) print("\n%s.png DNS Map image stored to current reconspider directory" % image_name) - + if (platform.system() != "Windows"): pass else: os.startfile('%s.png' % image_name) + else: + print("Sorry We Would not find the dnsmap") diff --git a/plugins/domain.py b/plugins/domain.py index 9390bd3..26b56e9 100644 --- a/plugins/domain.py +++ b/plugins/domain.py @@ -4,7 +4,7 @@ from .webosint.portscan import DefaultPort,Customrange from .webosint.reverseip import ReverseIP from .webosint.subdomain import SubDomain -from .webvuln.bruteforce import ssh,ftp +from .webvuln.bruteforce import ssh from .webvuln.clickjacking import ClickJacking from .webvuln.cors import Cors from .webvuln.hostheader import HostHeader @@ -54,27 +54,11 @@ def nmaprec(host,port): else: print("Please choose an Appropriate option") -BruteFunctions = { - 1: ssh, - 2: ftp - } +BruteFunctions = {1: ssh} def BruteForce(host, port): - Selection = 1 - while True: - print('') - print("1. SSH") - print("2. FTP") - print("3. Main Menu") - print('') - Selection = int(input("BruteForce >> ")) - print('') - if (Selection >= 0) and (Selection < 3): - BruteFunctions[Selection](host, port) - elif Selection == 3: - Menu(host,port) - else: - print("Please choose an Appropriate option") + print("\nBrute Forcing SSH") + BruteFunctions[1](host,port) MainFunctions = { diff --git a/plugins/honeypot.py b/plugins/honeypot.py index 1bce654..fa53d0b 100644 --- a/plugins/honeypot.py +++ b/plugins/honeypot.py @@ -1,7 +1,6 @@ import sys from requests import get from core.config import shodan_api -from core.colors import bad, info, red, green, end def honeypot(inp): @@ -11,10 +10,11 @@ def honeypot(inp): except: result = None sys.stdout.write('\n%s No information available' % bad + '\n') - if result: - if float(result) < 0.5: - color = green - else: - color = red - probability = str(float(result) * 10) - print('\n%sHoneypot Probabilty: %s%s%%%s' % (info, color, probability, end) + '\n') + if "error" in result or "404" in result: + print("IP Not found") + return + elif result: + probability = str(float(result) * 10) + print('\n[+] Honeypot Probabilty: %s%%' % (probability) + '\n') + else: + print("Something went Wrong") diff --git a/plugins/webvuln/bruteforce.py b/plugins/webvuln/bruteforce.py index 0a7a599..ac2765e 100644 --- a/plugins/webvuln/bruteforce.py +++ b/plugins/webvuln/bruteforce.py @@ -1,6 +1,5 @@ import paramiko import socket -from ftplib import FTP def ssh(host, port): print("1. Default Port (22)") @@ -114,60 +113,3 @@ def ssh(host, port): print("Invalid Credentials") except socket.error as e: print("Error : %s" % e) - - - - - -def ftp(host, port): - s = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - port = 21 - s.settimeout(10) - try: - connect = s.connect_ex((host, port)) - if connect != 0: - print("[+] Port %s: Closed" % port) - s.close() - - elif connect == 0: - print("[+] Port %s: Open" % port) - s.close() - wordlist = input("Enter Wordlist location (Press Enter for Default Wordlist) : ") - if wordlist == '': - f = open("src/ftp.ini", "r") - f1 = f.readlines() - else: - f = open(wordlist, "r") - f1 = f.readlines() - for x in f1: - y = x.split(':') - username = y[0].strip(":") - password = y[1].strip("\n") - ftp = FTP(host) - print("Checking with Username : %s , Password : %s" % (username, password)) - try: - ftp.login(user='username', passwd='password') - flag = 0 - - except socket.error as e: - flag = 2 - print(e) - - except KeyboardInterrupt: - print("\n User Interrupt! Exitting...") - exit() - - except Exception as e: - flag = 1 - - if flag == 0: - print('') - print("Credentials Found") - print("Username : %s" % username) - print("Password : %s" % password) - print('') - elif flag == 1: - print("Invalid Credentials") - - except socket.error as e: - print("Error : %s" %e) diff --git a/plugins/webvuln/clickjacking.py b/plugins/webvuln/clickjacking.py index 24ff5e9..fd32ac7 100644 --- a/plugins/webvuln/clickjacking.py +++ b/plugins/webvuln/clickjacking.py @@ -1,4 +1,4 @@ -import urllib.request +import requests def ClickJacking(host, port): @@ -11,10 +11,8 @@ def ClickJacking(host, port): url = (port+host) - if url.lower().startswith('http'): - data=urllib.request.urlretrieve(url)[1] - headers=data.as_string() - + page=requests.get(url) + headers=page.headers if not "X-Frame-Options" in headers: print("Website is vulnerable to ClickJacking") diff --git a/plugins/webvuln/cors.py b/plugins/webvuln/cors.py index 3442335..a92b3b0 100644 --- a/plugins/webvuln/cors.py +++ b/plugins/webvuln/cors.py @@ -19,7 +19,7 @@ def Cors(host, port): print("1. CORS check in Default Host") print("2. CORS check in Host's Custom Endpoint") print('') - choice = int(input('root@osint:~/Domain/CORS#')) + choice = int(input('CORS >>')) print('') cookies = input("Paste the Cookies (If None,then hit enter) : ") global header1 diff --git a/reconspider.py b/reconspider.py index e238091..100979a 100644 --- a/reconspider.py +++ b/reconspider.py @@ -12,7 +12,7 @@ def banner(): developer: https://bhavkaran.com -ENTER 0 - 11 TO SELECT OPTIONS +ENTER 0 - 13 TO SELECT OPTIONS 1. IP Enumerate information from IP Address 2. DOMAIN Gather information about given DOMAIN @@ -39,10 +39,3 @@ def banner(): except ModuleNotFoundError: print('\nSeems like you haven\'t installed Requirements, Please install using: python setup.py install') quit() -else: - try: - print(banner()) - from core import repl_prompt - except ImportError: - print('\nSeems like you haven\'t installed Requirements, Please install using: python setup.py install') - quit() From 6fc4b2568a74faa3951a0a700accd0be59f7fffa Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sat, 4 Apr 2020 19:12:00 +0530 Subject: [PATCH 19/30] dnsdump fixed --- plugins/dnsdump.py | 1 - 1 file changed, 1 deletion(-) diff --git a/plugins/dnsdump.py b/plugins/dnsdump.py index 54934f9..6fe4ecd 100644 --- a/plugins/dnsdump.py +++ b/plugins/dnsdump.py @@ -6,7 +6,6 @@ def dnsmap(dnsmap_inp): domain = dnsmap_inp - r=requests.Session() '''response = r.get('https://dnsdumpster.com/').text csrf_token = re.search(r"name='csrfmiddlewaretoken' value='(.*?)'", response).group(1) From d82740a5e2bc7f988a3ec7578d8dc68fb8e1c160 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sat, 4 Apr 2020 19:14:35 +0530 Subject: [PATCH 20/30] Updated README.md --- README.md | 2 +- plugins/dnsdump.py | 7 ------- 2 files changed, 1 insertion(+), 8 deletions(-) diff --git a/README.md b/README.md index 11ccaad..780aca0 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ ReconSpider can be used by Infosec Researchers, Penetration Testers, Bug Hunters ReconSpider aggregate all the raw data, visualize it on a dashboard and facilitate alerting and monitoring on the data. -Recon Spider also combines the capabilities of [Photon](https://github.com/s0md3v/Photon) and [Recon Dog](https://github.com/s0md3v/ReconDog) to do a comprehensive enumeration of attack surface. +Recon Spider also combines the capabilities of [Wave](https://github.com/adithyan-ak/WAVE),[Photon](https://github.com/s0md3v/Photon) and [Recon Dog](https://github.com/s0md3v/ReconDog) to do a comprehensive enumeration of attack surface. # Why it's called ReconSpider ? diff --git a/plugins/dnsdump.py b/plugins/dnsdump.py index 6fe4ecd..04eb471 100644 --- a/plugins/dnsdump.py +++ b/plugins/dnsdump.py @@ -6,13 +6,6 @@ def dnsmap(dnsmap_inp): domain = dnsmap_inp - '''response = r.get('https://dnsdumpster.com/').text - csrf_token = re.search(r"name='csrfmiddlewaretoken' value='(.*?)'", response).group(1) - - cookies = {'csrftoken': csrf_token} - headers = {'Referer': 'https://dnsdumpster.com/'} - data = {'csrfmiddlewaretoken': csrf_token, 'targetip': domain} - response = r.post('https://dnsdumpster.com/', cookies=cookies, data=data, headers=headers)''' image = requests.get('https://dnsdumpster.com/static/map/%s.png' % domain) From 1907de25ee82e7b5fec3787dfd993e748d7b1764 Mon Sep 17 00:00:00 2001 From: Aravindha1234u <52521300+Aravindha1234u@users.noreply.github.com> Date: Sun, 5 Apr 2020 08:43:19 +0530 Subject: [PATCH 21/30] Update setup.py --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 8a7eb1f..e3f4cec 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ author="BhavKaran (@bhavsec)", author_email="contact@bhavkaran.com", license="GPL-3.0", - install_requires=["shodan", "requests", "prompt_toolkit","beautifulsoup4","urllib3","IP2proxy","wget","paramiko","h8mail"], + install_requires=["shodan", "requests", "prompt_toolkit","beautifulsoup4","urllib3","IP2proxy","wget","paramiko","h8mail","nmap","pythonping","whois","gmplot"], console=["reconspider.py"], ) From 6194f19799e2acdf31ffbb28fdfa67a996b0b935 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sun, 5 Apr 2020 13:39:52 +0530 Subject: [PATCH 22/30] Updated setup.py --- reconspider.py | 7 +++++++ setup.py | 2 +- 2 files changed, 8 insertions(+), 1 deletion(-) diff --git a/reconspider.py b/reconspider.py index 100979a..fd66c85 100644 --- a/reconspider.py +++ b/reconspider.py @@ -39,3 +39,10 @@ def banner(): except ModuleNotFoundError: print('\nSeems like you haven\'t installed Requirements, Please install using: python setup.py install') quit() +else: + try: + print(banner()) + from core import repl_prompt + except ImportError: + print('\nSeems like you haven\'t installed Requirements, Please install using: python setup.py install') + quit() diff --git a/setup.py b/setup.py index 8a7eb1f..1f3253a 100644 --- a/setup.py +++ b/setup.py @@ -13,7 +13,7 @@ author="BhavKaran (@bhavsec)", author_email="contact@bhavkaran.com", license="GPL-3.0", - install_requires=["shodan", "requests", "prompt_toolkit","beautifulsoup4","urllib3","IP2proxy","wget","paramiko","h8mail"], + install_requires=["shodan", "requests","prompt_toolkit","wget","beautifulsoup4","click","urllib3","IP2proxy","wget","paramiko","h8mail","pillow"], console=["reconspider.py"], ) From 1702a99fa4c4805b2bfb90e34d9c8853504b9b6b Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sun, 5 Apr 2020 13:45:36 +0530 Subject: [PATCH 23/30] Updated reconspider.py --- README.md | 2 +- setup.py | 6 +----- 2 files changed, 2 insertions(+), 6 deletions(-) diff --git a/README.md b/README.md index 780aca0..8b8ef34 100644 --- a/README.md +++ b/README.md @@ -19,7 +19,7 @@ ReconSpider can be used by Infosec Researchers, Penetration Testers, Bug Hunters ReconSpider aggregate all the raw data, visualize it on a dashboard and facilitate alerting and monitoring on the data. -Recon Spider also combines the capabilities of [Wave](https://github.com/adithyan-ak/WAVE),[Photon](https://github.com/s0md3v/Photon) and [Recon Dog](https://github.com/s0md3v/ReconDog) to do a comprehensive enumeration of attack surface. +Recon Spider also combines the capabilities of [Wave](https://github.com/adithyan-ak/WAVE) ,[Photon](https://github.com/s0md3v/Photon) and [Recon Dog](https://github.com/s0md3v/ReconDog) to do a comprehensive enumeration of attack surface. # Why it's called ReconSpider ? diff --git a/setup.py b/setup.py index 35c6e64..a9b114b 100644 --- a/setup.py +++ b/setup.py @@ -13,11 +13,7 @@ author="BhavKaran (@bhavsec)", author_email="contact@bhavkaran.com", license="GPL-3.0", -<<<<<<< HEAD - install_requires=["shodan", "requests","prompt_toolkit","wget","beautifulsoup4","click","urllib3","IP2proxy","wget","paramiko","h8mail","pillow"], -======= - install_requires=["shodan", "requests", "prompt_toolkit","beautifulsoup4","urllib3","IP2proxy","wget","paramiko","h8mail","nmap","pythonping","whois","gmplot"], ->>>>>>> 1907de25ee82e7b5fec3787dfd993e748d7b1764 + install_requires=["shodan", "requests", "prompt_toolkit","wget","beautifulsoup4","click","urllib3","IP2proxy","wget","paramiko","h8mail","nmap","pythonping","whois","gmplot","pillow"], console=["reconspider.py"], ) From 3cd40d8a50e550cd7e82fac28ea58d1310b9c9de Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sun, 5 Apr 2020 13:56:57 +0530 Subject: [PATCH 24/30] Updated Setup.py --- setup.py | 7 +++++-- 1 file changed, 5 insertions(+), 2 deletions(-) diff --git a/setup.py b/setup.py index a9b114b..579ef6d 100644 --- a/setup.py +++ b/setup.py @@ -1,5 +1,6 @@ from setuptools import setup import os +import pip fout = open("core/config.py", "w") fout.write("shodan_api = " + '"' + "C23OXE0bVMrul2YeqcL7zxb6jZ4pj2by" + '"' + "\n") @@ -16,8 +17,10 @@ install_requires=["shodan", "requests", "prompt_toolkit","wget","beautifulsoup4","click","urllib3","IP2proxy","wget","paramiko","h8mail","nmap","pythonping","whois","gmplot","pillow"], console=["reconspider.py"], ) - -import wget +try: + import wget +except: + pip.main(['install','wget']) #Database url="https://www.ip2location.com/download?token=hg5uYe2Jvri4R7P1j8b71Pk8dnvIU2M6A9jz2tvcVtGx8ZK2UPQgzr6Hk3cV68oH&file=PX8LITEBIN" From ed5ec3406c255593078f392ef1d335ce586428fa Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sun, 5 Apr 2020 14:00:10 +0530 Subject: [PATCH 25/30] Exception fixed --- setup.py | 4 +++- 1 file changed, 3 insertions(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 579ef6d..15dabdb 100644 --- a/setup.py +++ b/setup.py @@ -17,9 +17,11 @@ install_requires=["shodan", "requests", "prompt_toolkit","wget","beautifulsoup4","click","urllib3","IP2proxy","wget","paramiko","h8mail","nmap","pythonping","whois","gmplot","pillow"], console=["reconspider.py"], ) + try: import wget -except: +except Exception as e: + print(e) pip.main(['install','wget']) #Database From 036002c042cf6eadd9777f88f138a1661f46b88a Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sun, 5 Apr 2020 14:03:20 +0530 Subject: [PATCH 26/30] Fixed Import module error --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 15dabdb..22fd143 100644 --- a/setup.py +++ b/setup.py @@ -23,7 +23,7 @@ except Exception as e: print(e) pip.main(['install','wget']) - + import wget #Database url="https://www.ip2location.com/download?token=hg5uYe2Jvri4R7P1j8b71Pk8dnvIU2M6A9jz2tvcVtGx8ZK2UPQgzr6Hk3cV68oH&file=PX8LITEBIN" print('\nDownloading IP2PROXY-IP-PROXYTYPE-COUNTRY-REGION-CITY-ISP-DOMAIN-USAGETYPE-ASN-LASTSEEN.BIN...') From c9700c655ccfabee318e6b6ac071982e44a7a3f2 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sun, 5 Apr 2020 14:09:09 +0530 Subject: [PATCH 27/30] lxml for crawler updated --- setup.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/setup.py b/setup.py index 22fd143..2ffd8f5 100644 --- a/setup.py +++ b/setup.py @@ -14,7 +14,7 @@ author="BhavKaran (@bhavsec)", author_email="contact@bhavkaran.com", license="GPL-3.0", - install_requires=["shodan", "requests", "prompt_toolkit","wget","beautifulsoup4","click","urllib3","IP2proxy","wget","paramiko","h8mail","nmap","pythonping","whois","gmplot","pillow"], + install_requires=["shodan", "requests", "prompt_toolkit","wget","beautifulsoup4","click","urllib3","IP2proxy","wget","paramiko","h8mail","nmap","pythonping","whois","gmplot","pillow","lxml"], console=["reconspider.py"], ) From 1084634c1f828b805ced328c041f646a08be00c1 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Sun, 5 Apr 2020 15:14:59 +0530 Subject: [PATCH 28/30] Nmap Fixed --- core/repl_prompt.py | 2 +- plugins/Username.py | 28 ++++++++++++++++------------ plugins/domain.py | 31 ++++++++++++++++++------------- plugins/portscan.py | 2 +- plugins/webvuln/hostheader.py | 1 + 5 files changed, 37 insertions(+), 27 deletions(-) diff --git a/core/repl_prompt.py b/core/repl_prompt.py index eb841a8..980f391 100644 --- a/core/repl_prompt.py +++ b/core/repl_prompt.py @@ -28,7 +28,7 @@ def repl(): # Read–eval–print loop try: choice = int(user_input) except ValueError: - print("ENTER 1 - 7 TO SELECT OPTIONS") + print("ENTER 1 - 13 TO SELECT OPTIONS") continue if choice == 1: diff --git a/plugins/Username.py b/plugins/Username.py index f0c586d..bf90cd1 100644 --- a/plugins/Username.py +++ b/plugins/Username.py @@ -37,18 +37,22 @@ def find_name(): ###Finding About the user details #finding work details of the user def find_eduwork_details(): - education = soup.find(id="pagelet_eduwork") - apple=education.find(attrs={"class":"_4qm1"}) - if (apple.get_text() != " "): - for category in education.find_all(attrs={"class":"_4qm1"}): - print(category.find('span').get_text() + " : ") - for company in category.find_all(attrs={"class":"_2tdc"}): - if (company.get_text() != " "): - print(company.get_text()) - else: - continue - else: - print("No work details found") + try: + education = soup.find(id="pagelet_eduwork") + apple=education.find(attrs={"class":"_4qm1"}) + if (apple.get_text() != " "): + for category in education.find_all(attrs={"class":"_4qm1"}): + print(category.find('span').get_text() + " : ") + for company in category.find_all(attrs={"class":"_2tdc"}): + if (company.get_text() != " "): + print(company.get_text()) + else: + continue + else: + print("No work details found") + except Exception as e: + print(str(e)) + print() #finding home details of the user def find_home_details(): diff --git a/plugins/domain.py b/plugins/domain.py index 26b56e9..cd2049d 100644 --- a/plugins/domain.py +++ b/plugins/domain.py @@ -11,6 +11,7 @@ from .webosint.header import header from .webosint.crawler import crawler from .webosint.who.whoami import whoami +from .plugins.portscan import PortScan # Checking whether the target host is alive or dead def CheckTarget(host,port): @@ -40,19 +41,23 @@ def domain(host,port): def nmaprec(host,port): - Choice = 1 - while True: - print("1. Scan Default Ports (22-443)") - print("2. Enter Custom Range") - print("3. Back to Main Menu") - print('') - Choice = int(input(">> ")) - if (Choice >= 0) and (Choice < 3): - NmapFunctions[Choice](host, port) - elif Choice == 3: - Menu(host,port) - else: - print("Please choose an Appropriate option") + try: + Choice = 1 + while True: + print("1. Scan Default Ports (22-443)") + print("2. Enter Custom Range") + print("3. Back to Main Menu") + print('') + Choice = int(input(">> ")) + if (Choice >= 0) and (Choice < 3): + NmapFunctions[Choice](host, port) + elif Choice == 3: + Menu(host,port) + else: + print("Please choose an Appropriate option") + except AttributeError: + PortScan(host) + BruteFunctions = {1: ssh} diff --git a/plugins/portscan.py b/plugins/portscan.py index 4999f50..fed172a 100644 --- a/plugins/portscan.py +++ b/plugins/portscan.py @@ -1,6 +1,6 @@ from requests import get -def portscan(inp): +def PortScan(inp): result = get('http://api.hackertarget.com/nmap/?q=' + inp).text print('\n' + result + '\n') diff --git a/plugins/webvuln/hostheader.py b/plugins/webvuln/hostheader.py index 0bc000f..7d0dcc5 100644 --- a/plugins/webvuln/hostheader.py +++ b/plugins/webvuln/hostheader.py @@ -7,6 +7,7 @@ def HostHeader(host, port): port = 'https://' else: print("Could'nt fetch data for the given PORT") + return url = (port + host) headers = {'Host': 'http://evil.com'} response = requests.get(url, headers=headers) From 60915a18b2d580ee49c38ffae7975e1ef633f4e7 Mon Sep 17 00:00:00 2001 From: Aravindha1234u <52521300+Aravindha1234u@users.noreply.github.com> Date: Mon, 6 Apr 2020 13:35:30 +0530 Subject: [PATCH 29/30] Update domain.py --- plugins/domain.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/plugins/domain.py b/plugins/domain.py index cd2049d..96f4930 100644 --- a/plugins/domain.py +++ b/plugins/domain.py @@ -11,7 +11,7 @@ from .webosint.header import header from .webosint.crawler import crawler from .webosint.who.whoami import whoami -from .plugins.portscan import PortScan +from .portscan import PortScan # Checking whether the target host is alive or dead def CheckTarget(host,port): From 56461da15e059afcadcbf44a0aa4988be1e37f09 Mon Sep 17 00:00:00 2001 From: aravindha1234u Date: Wed, 8 Apr 2020 22:26:55 +0530 Subject: [PATCH 30/30] Banner Updated --- core/repl_prompt.py | 18 ++++++++++++++---- reconspider.py | 36 ++++++++++++++++++------------------ 2 files changed, 32 insertions(+), 22 deletions(-) diff --git a/core/repl_prompt.py b/core/repl_prompt.py index 980f391..0f50e98 100644 --- a/core/repl_prompt.py +++ b/core/repl_prompt.py @@ -17,18 +17,20 @@ from plugins.Username import user from core.updater import update from prompt_toolkit import prompt +from reconspider import menu def repl(): # Read–eval–print loop while 1: + print(menu()) user_input = prompt("\nReconspider >> ") if len(user_input)==0: - print("ENTER 1 - 13 TO SELECT OPTIONS") + print("\n") continue try: choice = int(user_input) except ValueError: - print("ENTER 1 - 13 TO SELECT OPTIONS") + print("\n") continue if choice == 1: @@ -42,9 +44,17 @@ def repl(): # Read–eval–print loop elif choice == 2: while 1: host = input("HOST (URL / IP) >> ") - port = int(input("PORT >> ")) + port = input("PORT >> ") + try: + if port == "": + port=80 + elif port not in [80,443]: + print("Invalid port - Available(80,443)") + continue + except ValueError: + port=80 break - domain(host,port) + domain(host,int(port)) continue elif choice == 3: diff --git a/reconspider.py b/reconspider.py index fd66c85..ea3c593 100644 --- a/reconspider.py +++ b/reconspider.py @@ -1,7 +1,7 @@ import sys def banner(): - return(""" + return (""" __________ _________ __ ___ \______ \ ____ ____ ____ ____ / _____/_____ |__| __| _/___________ | _// __ \_/ ___\/ _ \ / \ \_____ \\\____ \| |/ __ |/ __ \_ __ \\ @@ -9,9 +9,9 @@ def banner(): |____|_ /\___ >\___ >____/|___| / /_______ / __/|__\____ |\___ >__| \/ \/ \/ \/ \/|__| \/ \/ - developer: https://bhavkaran.com - - + developer: https://bhavkaran.com""") +def menu(): + return (""" ENTER 0 - 13 TO SELECT OPTIONS 1. IP Enumerate information from IP Address @@ -32,17 +32,17 @@ def banner(): 0. EXIT Exit from ReconSpider to your terminal """) -if sys.version_info[0] > 2: - try: - print(banner()) - from core import repl_prompt - except ModuleNotFoundError: - print('\nSeems like you haven\'t installed Requirements, Please install using: python setup.py install') - quit() -else: - try: - print(banner()) - from core import repl_prompt - except ImportError: - print('\nSeems like you haven\'t installed Requirements, Please install using: python setup.py install') - quit() +if __name__ == '__main__': + if sys.version_info[0] > 2: + try: + print(banner()) + from core import repl_prompt + except ModuleNotFoundError: + print('\nSeems like you haven\'t installed Requirements, Please install using: python setup.py install') + quit() + else: + try: + from core import repl_prompt + except ImportError: + print('\nSeems like you haven\'t installed Requirements, Please install using: python setup.py install') + quit()