diff --git a/lib/core.py b/lib/core.py index c2d1169..d19c19c 100644 --- a/lib/core.py +++ b/lib/core.py @@ -154,13 +154,13 @@ def get_method(self): Log.info("URL is not an HTTP url, ignoring") @classmethod - def main(self,url,proxy,headers,payload,cookie,method=2): + def main(self,url,proxy,headers,payload,cookie,method=2,ssl_verify=True): print(W+"*"*15) self.payload=payload self.url=url - self.session=session(proxy,headers,cookie) + self.session=session(proxy,headers,cookie,ssl_verify) Log.info("Checking connection to: "+Y+url) try: ctr=self.session.get(url) diff --git a/lib/crawler/crawler.py b/lib/crawler/crawler.py index ccb5339..cf0ea92 100644 --- a/lib/crawler/crawler.py +++ b/lib/crawler/crawler.py @@ -11,11 +11,11 @@ class crawler: visited=[] @classmethod - def getLinks(self,base,proxy,headers,cookie): + def getLinks(self,base,proxy,headers,cookie,ssl_verify): lst=[] - conn=session(proxy,headers,cookie) + conn=session(proxy,headers,cookie,ssl_verify) text=conn.get(base).text isi=BeautifulSoup(text,"html.parser") @@ -37,17 +37,17 @@ def getLinks(self,base,proxy,headers,cookie): return lst @classmethod - def crawl(self,base,depth,proxy,headers,level,method,cookie): + def crawl(self,base,depth,proxy,headers,level,method,cookie,ssl_verify): - urls=self.getLinks(base,proxy,headers,cookie) + urls=self.getLinks(base,proxy,headers,cookie,ssl_verify) for url in urls: if url.startswith("https://") or url.startswith("http://"): - p=Process(target=core.main, args=(url,proxy,headers,level,cookie,method)) + p=Process(target=core.main, args=(url,proxy,headers,level,cookie,method,ssl_verify)) p.start() p.join() if depth != 0: - self.crawl(url,depth-1,base,proxy,level,method,cookie) + self.crawl(url,depth-1,base,proxy,level,method,cookie,ssl_verify) else: break diff --git a/pwnxss.py b/pwnxss.py index 8c39bff..326e607 100644 --- a/pwnxss.py +++ b/pwnxss.py @@ -1,5 +1,5 @@ ''' -PwnXSS - 2019/2020 +PwnXSS - 2019/2022 This project was created by Andripwn with Pwn0sec team. Copyright under the MIT license ''' @@ -13,6 +13,16 @@ Github: https://www.github.com/pwn0sec/PwnXSS Version: 0.5 Final """ +def str2bool(v): + if isinstance(v, bool): + return v + if v.lower() in ('yes', 'true', 't', 'y', '1'): + return True + elif v.lower() in ('no', 'false', 'f', 'n', '0'): + return False + else: + raise argparse.ArgumentTypeError('Boolean value expected.') + def check(getopt): payload=int(getopt.payload_level) if payload > 6 and getopt.payload is None: @@ -44,17 +54,18 @@ def start(): pos_opt.add_argument("--proxy",default=None,metavar="",help="Set proxy (e.g. {'https':'https://10.10.1.10:1080'})") pos_opt.add_argument("--about",action="store_true",help="Print information about PwnXSS tool") pos_opt.add_argument("--cookie",help="Set cookie (e.g {'ID':'1094200543'})",default='''{"ID":"1094200543"}''',metavar="") + pos_opt.add_argument("--ssl-verify", type=str2bool, nargs='?',const=True, default=True,help="SSL Certificate Verification. Default: True") getopt=parse.parse_args() print(logo) Log.info("Starting PwnXSS...") if getopt.u: - core.main(getopt.u,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method) + core.main(getopt.u,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method,getopt.ssl_verify) - crawler.crawl(getopt.u,int(getopt.depth),getopt.proxy,getopt.user_agent,check(getopt),getopt.method,getopt.cookie) + crawler.crawl(getopt.u,int(getopt.depth),getopt.proxy,getopt.user_agent,check(getopt),getopt.method,getopt.cookie,getopt.ssl_verify) elif getopt.single: - core.main(getopt.single,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method) + core.main(getopt.single,getopt.proxy,getopt.user_agent,check(getopt),getopt.cookie,getopt.method,getopt.ssl_verify) elif getopt.about: print(""" @@ -62,7 +73,7 @@ def start(): Project: PwnXSS License: MIT Author: Security Executions Code -Last updates: 2019 may 26 +Last updates: 2022 Dec 30 Note: Take your own RISK **************** """+epilog)