Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature/disable ssl verify #33

Open
wants to merge 6 commits into
base: master
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
4 changes: 2 additions & 2 deletions lib/core.py
Original file line number Diff line number Diff line change
Expand Up @@ -154,13 +154,13 @@ def get_method(self):
Log.info("URL is not an HTTP url, ignoring")

@classmethod
def main(self,url,proxy,headers,payload,cookie,method=2):
def main(self,url,proxy,headers,payload,cookie,method=2,ssl_verify=True):

print(W+"*"*15)
self.payload=payload
self.url=url

self.session=session(proxy,headers,cookie)
self.session=session(proxy,headers,cookie,ssl_verify)
Log.info("Checking connection to: "+Y+url)
try:
ctr=self.session.get(url)
Expand Down
12 changes: 6 additions & 6 deletions lib/crawler/crawler.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,11 +11,11 @@ class crawler:
visited=[]

@classmethod
def getLinks(self,base,proxy,headers,cookie):
def getLinks(self,base,proxy,headers,cookie,ssl_verify):

lst=[]

conn=session(proxy,headers,cookie)
conn=session(proxy,headers,cookie,ssl_verify)
text=conn.get(base).text
isi=BeautifulSoup(text,"html.parser")

Expand All @@ -37,17 +37,17 @@ def getLinks(self,base,proxy,headers,cookie):
return lst

@classmethod
def crawl(self,base,depth,proxy,headers,level,method,cookie):
def crawl(self,base,depth,proxy,headers,level,method,cookie,ssl_verify):

urls=self.getLinks(base,proxy,headers,cookie)
urls=self.getLinks(base,proxy,headers,cookie,ssl_verify)

for url in urls:
if url.startswith("https://") or url.startswith("http://"):
p=Process(target=core.main, args=(url,proxy,headers,level,cookie,method))
p=Process(target=core.main, args=(url,proxy,headers,level,cookie,method,ssl_verify))
p.start()
p.join()
if depth != 0:
self.crawl(url,depth-1,base,proxy,level,method,cookie)
self.crawl(url,depth-1,base,proxy,level,method,cookie,ssl_verify)

else:
break
21 changes: 16 additions & 5 deletions pwnxss.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
'''
PwnXSS - 2019/2020
PwnXSS - 2019/2022
This project was created by Andripwn with Pwn0sec team.
Copyright under the MIT license
'''
Expand All @@ -13,6 +13,16 @@
Github: https://www.github.com/pwn0sec/PwnXSS
Version: 0.5 Final
"""
def str2bool(v):
if isinstance(v, bool):
return v
if v.lower() in ('yes', 'true', 't', 'y', '1'):
return True
elif v.lower() in ('no', 'false', 'f', 'n', '0'):
return False
else:
raise argparse.ArgumentTypeError('Boolean value expected.')

def check(getopt):
payload=int(getopt.payload_level)
if payload > 6 and getopt.payload is None:
Expand Down Expand Up @@ -44,25 +54,26 @@ def start():
pos_opt.add_argument("--proxy",default=None,metavar="",help="Set proxy (e.g. {'https':'https://10.10.1.10:1080'})")
pos_opt.add_argument("--about",action="store_true",help="Print information about PwnXSS tool")
pos_opt.add_argument("--cookie",help="Set cookie (e.g {'ID':'1094200543'})",default='''{"ID":"1094200543"}''',metavar="")
pos_opt.add_argument("--ssl-verify", type=str2bool, nargs='?',const=True, default=True,help="SSL Certificate Verification. Default: True")

getopt=parse.parse_args()
print(logo)
Log.info("Starting PwnXSS...")
if getopt.u:
core.main(getopt.u,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method)
core.main(getopt.u,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method,getopt.ssl_verify)

crawler.crawl(getopt.u,int(getopt.depth),getopt.proxy,getopt.user_agent,check(getopt),getopt.method,getopt.cookie)
crawler.crawl(getopt.u,int(getopt.depth),getopt.proxy,getopt.user_agent,check(getopt),getopt.method,getopt.cookie,getopt.ssl_verify)

elif getopt.single:
core.main(getopt.single,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method)
core.main(getopt.single,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method,getopt.ssl_verify)

elif getopt.about:
print("""
***************
Project: PwnXSS
License: MIT
Author: Security Executions Code
Last updates: 2019 may 26
Last updates: 2022 Dec 30
Note: Take your own RISK
****************
"""+epilog)
Expand Down