Skip to content

Commit

Permalink
Merge pull request #45 from zeroday0619/refactoring
Browse files Browse the repository at this point in the history
feat: refactoring typed programing
  • Loading branch information
gnh1201 authored Sep 13, 2024
2 parents 9c2b66f + 6b99ee9 commit bc08241
Show file tree
Hide file tree
Showing 9 changed files with 197 additions and 145 deletions.
21 changes: 17 additions & 4 deletions base.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,6 +19,7 @@
import subprocess
import platform

from abc import ABC, abstractmethod
from datetime import datetime, timezone
from typing import Union, List

Expand Down Expand Up @@ -47,14 +48,14 @@ def jsonrpc2_create_id(data):
def jsonrpc2_encode(method, params=None):
data = {"jsonrpc": "2.0", "method": method, "params": params}
id = jsonrpc2_create_id(data)
id = data.get('id')
id = data.get("id")
return (id, json.dumps(data))


def jsonrpc2_decode(text):
data = json.loads(text)
type = 'error' if 'error' in data else 'result' if 'result' in data else None
id = data.get('id')
type = "error" if "error" in data else "result" if "result" in data else None
id = data.get("id")
rpcdata = data.get(type) if type else None
return type, id, rpcdata

Expand All @@ -68,6 +69,7 @@ def jsonrpc2_error_encode(error, id=""):
data = {"jsonrpc": "2.0", "error": error, "id": id}
return json.dumps(data)


def find_openssl_binpath():
system = platform.system()

Expand Down Expand Up @@ -121,8 +123,19 @@ def find_openssl_binpath():
return "openssl"


class ExtensionType:
def __init__(self):
self.type: str = None
self.method: str = None
self.exported_methods: list[str] = []
self.connection_type: str = None


type extension_type = ExtensionType


class Extension:
extensions = []
extensions: list[extension_type] = []
protocols = []
buffer_size = 8192

Expand Down
Empty file modified download_certs.sh
100644 → 100755
Empty file.
67 changes: 39 additions & 28 deletions plugins/alwaysonline.py
Original file line number Diff line number Diff line change
Expand Up @@ -15,7 +15,7 @@
from decouple import config
from elasticsearch import Elasticsearch, NotFoundError
import hashlib
from datetime import datetime
from datetime import datetime, UTC
from base import Extension, Logger

logger = Logger(name="wayback")
Expand All @@ -29,11 +29,13 @@

es = Elasticsearch([es_host])

def generate_id(url):

def generate_id(url: str):
"""Generate a unique ID for a URL by hashing it."""
return hashlib.sha256(url.encode('utf-8')).hexdigest()
return hashlib.sha256(url.encode("utf-8")).hexdigest()


def get_cached_page_from_google(url):
def get_cached_page_from_google(url: str):
status_code, content = (0, b"")

# Google Cache URL
Expand All @@ -50,8 +52,9 @@ def get_cached_page_from_google(url):

return status_code, content


# API documentation: https://archive.org/help/wayback_api.php
def get_cached_page_from_wayback(url):
def get_cached_page_from_wayback(url: str):
status_code, content = (0, b"")

# Wayback Machine API URL
Expand Down Expand Up @@ -89,73 +92,81 @@ def get_cached_page_from_wayback(url):

return status_code, content

def get_cached_page_from_elasticsearch(url):

def get_cached_page_from_elasticsearch(url: str):
url_id = generate_id(url)
try:
result = es.get(index=es_index, id=url_id)
logger.info(result['_source'])
return 200, result['_source']['content'].encode(client_encoding)
logger.info(result["_source"])
return 200, result["_source"]["content"].encode(client_encoding)
except NotFoundError:
return 404, b""
except Exception as e:
logger.error(f"Error fetching from Elasticsearch: {e}")
return 502, b""

def cache_to_elasticsearch(url, data):

def cache_to_elasticsearch(url: str, data: bytes):
url_id = generate_id(url)
timestamp = datetime.utcnow().isoformat()
timestamp = datetime.now(UTC).timestamp()
try:
es.index(index=es_index, id=url_id, body={
"url": url,
"content": data.decode(client_encoding),
"timestamp": timestamp
})
es.index(
index=es_index,
id=url_id,
body={
"url": url,
"content": data.decode(client_encoding),
"timestamp": timestamp,
},
)
except Exception as e:
logger.error(f"Error caching to Elasticsearch: {e}")

def get_page_from_origin_server(url):

def get_page_from_origin_server(url: str):
try:
response = requests.get(url)
return response.status_code, response.content
except Exception as e:
return 502, str(e).encode(client_encoding)


class AlwaysOnline(Extension):
def __init__(self):
self.type = "connector" # this is a connector
self.connection_type = "alwaysonline"
self.buffer_size = 8192

def connect(self, conn, data, webserver, port, scheme, method, url):
def connect(self, conn: socket.socket, data: bytes, webserver: bytes, port: bytes, scheme: bytes, method: bytes, url: bytes):
logger.info("[*] Connecting... Connecting...")

connected = False

is_ssl = scheme in [b"https", b"tls", b"ssl"]
cache_hit = 0
buffered = b""
def sendall(sock, conn, data):

def sendall(_sock: socket.socket, _conn: socket.socket, _data: bytes):
# send first chuck
sock.send(data)
if len(data) < self.buffer_size:
sock.send(_data)
if len(_data) < self.buffer_size:
return

# send following chunks
conn.settimeout(1)
_conn.settimeout(1)
while True:
try:
chunk = conn.recv(self.buffer_size)
chunk = _conn.recv(self.buffer_size)
if not chunk:
break
sock.send(chunk)
_sock.send(chunk)
except:
break

target_url = url.decode(client_encoding)
target_scheme = scheme.decode(client_encoding)
target_webserver = webserver.decode(client_encoding)

if "://" not in target_url:
target_url = f"{target_scheme}://{target_webserver}:{port}{target_url}"

Expand Down
9 changes: 5 additions & 4 deletions plugins/bio.py
Original file line number Diff line number Diff line change
Expand Up @@ -9,13 +9,14 @@
# Updated at: 2024-07-02
#

from socket import socket
from Bio.Seq import Seq
from Bio.SeqUtils import gc_fraction

from base import Extension


def _analyze_sequence(sequence) -> dict[str, str]:
def _analyze_sequence(sequence: str) -> dict[str, str]:
"""
Analyze a given DNA sequence to provide various nucleotide transformations and translations.
Expand All @@ -41,7 +42,7 @@ def _analyze_sequence(sequence) -> dict[str, str]:
)


def _gc_content_calculation(sequence) -> dict[str, str]:
def _gc_content_calculation(sequence: str) -> dict[str, str]:
"""
Calculate the GC content of a given DNA sequence and return it as a float.
Expand All @@ -63,7 +64,7 @@ def __init__(self):
def dispatch(self, type, id, params, conn):
conn.send(b"Greeting! dispatch")

def analyze_sequence(self, type, id, params, conn):
def analyze_sequence(self, type, id, params, conn: socket):
"""
Analyze a DNA sequence provided in the params dictionary.
Expand Down Expand Up @@ -91,7 +92,7 @@ def analyze_sequence(self, type, id, params, conn):
result = _analyze_sequence(params["sequence"])
return result

def gc_content_calculation(self, type, id, params, conn):
def gc_content_calculation(self, type, id, params, conn: socket):
"""
Calculate the GC content for a given DNA sequence provided in the params dictionary.
Expand Down
60 changes: 35 additions & 25 deletions plugins/container.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,7 +11,7 @@
#

import docker

from socket import socket
from base import Extension, Logger

logger = Logger("Container")
Expand All @@ -21,26 +21,36 @@ class Container(Extension):
def __init__(self):
self.type = "rpcmethod"
self.method = "container_init"
self.exported_methods = ["container_cteate", "container_start", "container_run", "container_stop", "container_pause", "container_unpause", "container_restart", "container_kill", "container_remove"]
self.exported_methods = [
"container_cteate",
"container_start",
"container_run",
"container_stop",
"container_pause",
"container_unpause",
"container_restart",
"container_kill",
"container_remove",
]

# docker
self.client = docker.from_env()

def dispatch(self, type, id, params, conn):
def dispatch(self, type, id, params, conn: socket):
logger.info("[*] Greeting! dispatch")
conn.send(b"Greeting! dispatch")

def container_cteate(self, type, id, params, conn):
def container_cteate(self, type, id, params, conn: socket):
# todo: -
return b"[*] Created"
def container_start(self, type, id, params, conn):
name = params['name']

def container_start(self, type, id, params, conn: socket):
name = params["name"]

container = self.client.containers.get(name)
container.start()
def container_run(self, type, id, params, conn):

def container_run(self, type, id, params, conn: socket):
devices = params["devices"]
image = params["image"]
devices = params["devices"]
Expand All @@ -60,43 +70,43 @@ def container_run(self, type, id, params, conn):
logger.info("[*] Running...")
return b"[*] Running..."

def container_stop(self, type, id, params, conn):
def container_stop(self, type, id, params, conn: socket):
name = params["name"]

container = self.client.containers.get(name)
container.stop()

logger.info("[*] Stopped")
return b"[*] Stopped"
def container_pause(self, type, id, params, conn):
name = params['name']

def container_pause(self, type, id, params, conn: socket):
name = params["name"]

container = self.client.containers.get(name)
container.pause()
return b"[*] Paused"
def container_unpause(self, type, id, params, conn):
name = params['name']

def container_unpause(self, type, id, params, conn: socket):
name = params["name"]

container = self.client.containers.get(name)
container.unpause()
return b"[*] Unpaused"
def container_restart(self, type, id, params, conn):
name = params['name']

def container_restart(self, type, id, params, conn: socket):
name = params["name"]

container = self.client.containers.get(name)
container.restart()
return b"[*] Restarted"
def container_kill(self, type, id, params, conn):

def container_kill(self, type, id, params, conn: socket):
# TODO: -
return b"[*] Killed"
def container_remove(self, type, id, params, conn):
name = params['name']

def container_remove(self, type, id, params, conn: socket):
name = params["name"]

container = self.client.containers.get(name)
container.remove()
return b"[*] Removed"
return b"[*] Removed"
5 changes: 3 additions & 2 deletions plugins/serial.py
Original file line number Diff line number Diff line change
Expand Up @@ -25,6 +25,7 @@

logger = logging.getLogger(__name__)


class Serial(Extension):
def __init__(self):
self.type = "connector"
Expand All @@ -38,7 +39,7 @@ def connect(self, conn, data, webserver, port, scheme, method, url):
connected = False
ser = None
try:
port_path = url.decode(client_encoding).replace('/', '')
port_path = url.decode(client_encoding).replace("/", "")
if not ser:
ser = serial.Serial(port_path, baudrate=9600, timeout=2)
connected = True
Expand All @@ -49,7 +50,7 @@ def connect(self, conn, data, webserver, port, scheme, method, url):

ser_data = ser.read_all()
logger.debug(f"Data received: {ser_data}")

if ser_data:
conn.send(ser_data.decode(client_encoding))
except serial.SerialException as e:
Expand Down
Loading

0 comments on commit bc08241

Please sign in to comment.