mirror of
https://github.com/gnh1201/caterpillar.git
synced 2024-11-26 07:21:46 +00:00
feat: refactoring typed programing
This commit is contained in:
parent
93e0b4edd9
commit
910e5e4ed5
|
@ -15,7 +15,7 @@ import requests
|
||||||
from decouple import config
|
from decouple import config
|
||||||
from elasticsearch import Elasticsearch, NotFoundError
|
from elasticsearch import Elasticsearch, NotFoundError
|
||||||
import hashlib
|
import hashlib
|
||||||
from datetime import datetime
|
from datetime import datetime, UTC
|
||||||
from base import Extension, Logger
|
from base import Extension, Logger
|
||||||
|
|
||||||
logger = Logger(name="wayback")
|
logger = Logger(name="wayback")
|
||||||
|
@ -30,12 +30,12 @@ except Exception as e:
|
||||||
es = Elasticsearch([es_host])
|
es = Elasticsearch([es_host])
|
||||||
|
|
||||||
|
|
||||||
def generate_id(url):
|
def generate_id(url: str):
|
||||||
"""Generate a unique ID for a URL by hashing it."""
|
"""Generate a unique ID for a URL by hashing it."""
|
||||||
return hashlib.sha256(url.encode("utf-8")).hexdigest()
|
return hashlib.sha256(url.encode("utf-8")).hexdigest()
|
||||||
|
|
||||||
|
|
||||||
def get_cached_page_from_google(url):
|
def get_cached_page_from_google(url: str):
|
||||||
status_code, content = (0, b"")
|
status_code, content = (0, b"")
|
||||||
|
|
||||||
# Google Cache URL
|
# Google Cache URL
|
||||||
|
@ -54,7 +54,7 @@ def get_cached_page_from_google(url):
|
||||||
|
|
||||||
|
|
||||||
# API documentation: https://archive.org/help/wayback_api.php
|
# API documentation: https://archive.org/help/wayback_api.php
|
||||||
def get_cached_page_from_wayback(url):
|
def get_cached_page_from_wayback(url: str):
|
||||||
status_code, content = (0, b"")
|
status_code, content = (0, b"")
|
||||||
|
|
||||||
# Wayback Machine API URL
|
# Wayback Machine API URL
|
||||||
|
@ -93,7 +93,7 @@ def get_cached_page_from_wayback(url):
|
||||||
return status_code, content
|
return status_code, content
|
||||||
|
|
||||||
|
|
||||||
def get_cached_page_from_elasticsearch(url):
|
def get_cached_page_from_elasticsearch(url: str):
|
||||||
url_id = generate_id(url)
|
url_id = generate_id(url)
|
||||||
try:
|
try:
|
||||||
result = es.get(index=es_index, id=url_id)
|
result = es.get(index=es_index, id=url_id)
|
||||||
|
@ -106,9 +106,9 @@ def get_cached_page_from_elasticsearch(url):
|
||||||
return 502, b""
|
return 502, b""
|
||||||
|
|
||||||
|
|
||||||
def cache_to_elasticsearch(url, data):
|
def cache_to_elasticsearch(url: str, data: bytes):
|
||||||
url_id = generate_id(url)
|
url_id = generate_id(url)
|
||||||
timestamp = datetime.utcnow().isoformat()
|
timestamp = datetime.now(UTC).timestamp()
|
||||||
try:
|
try:
|
||||||
es.index(
|
es.index(
|
||||||
index=es_index,
|
index=es_index,
|
||||||
|
@ -123,7 +123,7 @@ def cache_to_elasticsearch(url, data):
|
||||||
logger.error(f"Error caching to Elasticsearch: {e}")
|
logger.error(f"Error caching to Elasticsearch: {e}")
|
||||||
|
|
||||||
|
|
||||||
def get_page_from_origin_server(url):
|
def get_page_from_origin_server(url: str):
|
||||||
try:
|
try:
|
||||||
response = requests.get(url)
|
response = requests.get(url)
|
||||||
return response.status_code, response.content
|
return response.status_code, response.content
|
||||||
|
@ -137,7 +137,7 @@ class AlwaysOnline(Extension):
|
||||||
self.connection_type = "alwaysonline"
|
self.connection_type = "alwaysonline"
|
||||||
self.buffer_size = 8192
|
self.buffer_size = 8192
|
||||||
|
|
||||||
def connect(self, conn, data, webserver, port, scheme, method, url):
|
def connect(self, conn: socket.socket, data: bytes, webserver: bytes, port: bytes, scheme: bytes, method: bytes, url: bytes):
|
||||||
logger.info("[*] Connecting... Connecting...")
|
logger.info("[*] Connecting... Connecting...")
|
||||||
|
|
||||||
connected = False
|
connected = False
|
||||||
|
@ -146,20 +146,20 @@ class AlwaysOnline(Extension):
|
||||||
cache_hit = 0
|
cache_hit = 0
|
||||||
buffered = b""
|
buffered = b""
|
||||||
|
|
||||||
def sendall(sock, conn, data):
|
def sendall(_sock: socket.socket, _conn: socket.socket, _data: bytes):
|
||||||
# send first chuck
|
# send first chuck
|
||||||
sock.send(data)
|
sock.send(_data)
|
||||||
if len(data) < self.buffer_size:
|
if len(_data) < self.buffer_size:
|
||||||
return
|
return
|
||||||
|
|
||||||
# send following chunks
|
# send following chunks
|
||||||
conn.settimeout(1)
|
_conn.settimeout(1)
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
chunk = conn.recv(self.buffer_size)
|
chunk = _conn.recv(self.buffer_size)
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
sock.send(chunk)
|
_sock.send(chunk)
|
||||||
except:
|
except:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
|
|
@ -9,13 +9,14 @@
|
||||||
# Updated at: 2024-07-02
|
# Updated at: 2024-07-02
|
||||||
#
|
#
|
||||||
|
|
||||||
|
from socket import socket
|
||||||
from Bio.Seq import Seq
|
from Bio.Seq import Seq
|
||||||
from Bio.SeqUtils import gc_fraction
|
from Bio.SeqUtils import gc_fraction
|
||||||
|
|
||||||
from base import Extension
|
from base import Extension
|
||||||
|
|
||||||
|
|
||||||
def _analyze_sequence(sequence) -> dict[str, str]:
|
def _analyze_sequence(sequence: str) -> dict[str, str]:
|
||||||
"""
|
"""
|
||||||
Analyze a given DNA sequence to provide various nucleotide transformations and translations.
|
Analyze a given DNA sequence to provide various nucleotide transformations and translations.
|
||||||
|
|
||||||
|
@ -41,7 +42,7 @@ def _analyze_sequence(sequence) -> dict[str, str]:
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def _gc_content_calculation(sequence) -> dict[str, str]:
|
def _gc_content_calculation(sequence: str) -> dict[str, str]:
|
||||||
"""
|
"""
|
||||||
Calculate the GC content of a given DNA sequence and return it as a float.
|
Calculate the GC content of a given DNA sequence and return it as a float.
|
||||||
|
|
||||||
|
@ -63,7 +64,7 @@ class PyBio(Extension):
|
||||||
def dispatch(self, type, id, params, conn):
|
def dispatch(self, type, id, params, conn):
|
||||||
conn.send(b"Greeting! dispatch")
|
conn.send(b"Greeting! dispatch")
|
||||||
|
|
||||||
def analyze_sequence(self, type, id, params, conn):
|
def analyze_sequence(self, type, id, params, conn: socket):
|
||||||
"""
|
"""
|
||||||
Analyze a DNA sequence provided in the params dictionary.
|
Analyze a DNA sequence provided in the params dictionary.
|
||||||
|
|
||||||
|
@ -91,7 +92,7 @@ class PyBio(Extension):
|
||||||
result = _analyze_sequence(params["sequence"])
|
result = _analyze_sequence(params["sequence"])
|
||||||
return result
|
return result
|
||||||
|
|
||||||
def gc_content_calculation(self, type, id, params, conn):
|
def gc_content_calculation(self, type, id, params, conn: socket):
|
||||||
"""
|
"""
|
||||||
Calculate the GC content for a given DNA sequence provided in the params dictionary.
|
Calculate the GC content for a given DNA sequence provided in the params dictionary.
|
||||||
|
|
||||||
|
|
|
@ -11,7 +11,7 @@
|
||||||
#
|
#
|
||||||
|
|
||||||
import docker
|
import docker
|
||||||
|
from socket import socket
|
||||||
from base import Extension, Logger
|
from base import Extension, Logger
|
||||||
|
|
||||||
logger = Logger("Container")
|
logger = Logger("Container")
|
||||||
|
@ -36,21 +36,21 @@ class Container(Extension):
|
||||||
# docker
|
# docker
|
||||||
self.client = docker.from_env()
|
self.client = docker.from_env()
|
||||||
|
|
||||||
def dispatch(self, type, id, params, conn):
|
def dispatch(self, type, id, params, conn: socket):
|
||||||
logger.info("[*] Greeting! dispatch")
|
logger.info("[*] Greeting! dispatch")
|
||||||
conn.send(b"Greeting! dispatch")
|
conn.send(b"Greeting! dispatch")
|
||||||
|
|
||||||
def container_cteate(self, type, id, params, conn):
|
def container_cteate(self, type, id, params, conn: socket):
|
||||||
# todo: -
|
# todo: -
|
||||||
return b"[*] Created"
|
return b"[*] Created"
|
||||||
|
|
||||||
def container_start(self, type, id, params, conn):
|
def container_start(self, type, id, params, conn: socket):
|
||||||
name = params["name"]
|
name = params["name"]
|
||||||
|
|
||||||
container = self.client.containers.get(name)
|
container = self.client.containers.get(name)
|
||||||
container.start()
|
container.start()
|
||||||
|
|
||||||
def container_run(self, type, id, params, conn):
|
def container_run(self, type, id, params, conn: socket):
|
||||||
devices = params["devices"]
|
devices = params["devices"]
|
||||||
image = params["image"]
|
image = params["image"]
|
||||||
devices = params["devices"]
|
devices = params["devices"]
|
||||||
|
@ -70,7 +70,7 @@ class Container(Extension):
|
||||||
logger.info("[*] Running...")
|
logger.info("[*] Running...")
|
||||||
return b"[*] Running..."
|
return b"[*] Running..."
|
||||||
|
|
||||||
def container_stop(self, type, id, params, conn):
|
def container_stop(self, type, id, params, conn: socket):
|
||||||
name = params["name"]
|
name = params["name"]
|
||||||
|
|
||||||
container = self.client.containers.get(name)
|
container = self.client.containers.get(name)
|
||||||
|
@ -79,32 +79,32 @@ class Container(Extension):
|
||||||
logger.info("[*] Stopped")
|
logger.info("[*] Stopped")
|
||||||
return b"[*] Stopped"
|
return b"[*] Stopped"
|
||||||
|
|
||||||
def container_pause(self, type, id, params, conn):
|
def container_pause(self, type, id, params, conn: socket):
|
||||||
name = params["name"]
|
name = params["name"]
|
||||||
|
|
||||||
container = self.client.containers.get(name)
|
container = self.client.containers.get(name)
|
||||||
container.pause()
|
container.pause()
|
||||||
return b"[*] Paused"
|
return b"[*] Paused"
|
||||||
|
|
||||||
def container_unpause(self, type, id, params, conn):
|
def container_unpause(self, type, id, params, conn: socket):
|
||||||
name = params["name"]
|
name = params["name"]
|
||||||
|
|
||||||
container = self.client.containers.get(name)
|
container = self.client.containers.get(name)
|
||||||
container.unpause()
|
container.unpause()
|
||||||
return b"[*] Unpaused"
|
return b"[*] Unpaused"
|
||||||
|
|
||||||
def container_restart(self, type, id, params, conn):
|
def container_restart(self, type, id, params, conn: socket):
|
||||||
name = params["name"]
|
name = params["name"]
|
||||||
|
|
||||||
container = self.client.containers.get(name)
|
container = self.client.containers.get(name)
|
||||||
container.restart()
|
container.restart()
|
||||||
return b"[*] Restarted"
|
return b"[*] Restarted"
|
||||||
|
|
||||||
def container_kill(self, type, id, params, conn):
|
def container_kill(self, type, id, params, conn: socket):
|
||||||
# TODO: -
|
# TODO: -
|
||||||
return b"[*] Killed"
|
return b"[*] Killed"
|
||||||
|
|
||||||
def container_remove(self, type, id, params, conn):
|
def container_remove(self, type, id, params, conn: socket):
|
||||||
name = params["name"]
|
name = params["name"]
|
||||||
|
|
||||||
container = self.client.containers.get(name)
|
container = self.client.containers.get(name)
|
||||||
|
|
14
smtp.py
14
smtp.py
|
@ -20,7 +20,7 @@ from requests.auth import HTTPBasicAuth
|
||||||
from base import (
|
from base import (
|
||||||
extract_credentials,
|
extract_credentials,
|
||||||
jsonrpc2_encode,
|
jsonrpc2_encode,
|
||||||
Logger,
|
Logger, jsonrpc2_decode,
|
||||||
)
|
)
|
||||||
|
|
||||||
logger = Logger(name="smtp")
|
logger = Logger(name="smtp")
|
||||||
|
@ -47,8 +47,8 @@ class CaterpillarSMTPHandler:
|
||||||
self.smtp_version = "0.1.6"
|
self.smtp_version = "0.1.6"
|
||||||
|
|
||||||
async def handle_DATA(self, server, session, envelope):
|
async def handle_DATA(self, server, session, envelope):
|
||||||
mailfrom = envelope.mail_from
|
mail_from = envelope.mail_from
|
||||||
rcpttos = envelope.rcpt_tos
|
rcpt_tos = envelope.rcpt_tos
|
||||||
data = envelope.content
|
data = envelope.content
|
||||||
|
|
||||||
message = EmailMessage()
|
message = EmailMessage()
|
||||||
|
@ -65,7 +65,7 @@ class CaterpillarSMTPHandler:
|
||||||
},
|
},
|
||||||
"data": {
|
"data": {
|
||||||
"to": to,
|
"to": to,
|
||||||
"from": mailfrom,
|
"from": mail_from,
|
||||||
"subject": subject,
|
"subject": subject,
|
||||||
"message": data.decode("utf-8"),
|
"message": data.decode("utf-8"),
|
||||||
},
|
},
|
||||||
|
@ -81,11 +81,11 @@ class CaterpillarSMTPHandler:
|
||||||
auth=auth,
|
auth=auth,
|
||||||
)
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
type, id, rpcdata = jsonrpc2_decode(response.text)
|
_type, _id, rpc_data = jsonrpc2_decode(response.text)
|
||||||
if rpcdata["success"]:
|
if rpc_data["success"]:
|
||||||
logger.info("[*] Email sent successfully.")
|
logger.info("[*] Email sent successfully.")
|
||||||
else:
|
else:
|
||||||
raise Exception(f"({rpcdata['code']}) {rpcdata['message']}")
|
raise Exception(f"({rpc_data['code']}) {rpc_data['message']}")
|
||||||
else:
|
else:
|
||||||
raise Exception(f"Status {response.status_code}")
|
raise Exception(f"Status {response.status_code}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
|
|
12
web.py
12
web.py
|
@ -49,18 +49,18 @@ def process_jsonrpc2():
|
||||||
conn = Connection(request)
|
conn = Connection(request)
|
||||||
|
|
||||||
# JSON-RPC 2.0 request
|
# JSON-RPC 2.0 request
|
||||||
jsondata = request.get_json(silent=True)
|
json_data = request.get_json(silent=True)
|
||||||
if jsondata["jsonrpc"] == "2.0":
|
if json_data["jsonrpc"] == "2.0":
|
||||||
return Extension.dispatch_rpcmethod(
|
return Extension.dispatch_rpcmethod(
|
||||||
jsondata["method"], "call", jsondata["id"], jsondata["params"], conn
|
json_data["method"], "call", json_data["id"], json_data["params"], conn
|
||||||
)
|
)
|
||||||
|
|
||||||
# when error
|
# when error
|
||||||
return jsonrpc2_error_encode({"message": "Not vaild JSON-RPC 2.0 request"})
|
return jsonrpc2_error_encode({"message": "Not valid JSON-RPC 2.0 request"})
|
||||||
|
|
||||||
|
|
||||||
def jsonrpc2_server(conn, id, method, params):
|
def jsonrpc2_server(conn, _id, method, params):
|
||||||
return Extension.dispatch_rpcmethod(method, "call", id, params, conn)
|
return Extension.dispatch_rpcmethod(method, "call", _id, params, conn)
|
||||||
|
|
||||||
|
|
||||||
class Connection:
|
class Connection:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user