Revert "Fix SSL negotiation + ruff formatted"

This reverts commit 57ed60fd01.
This commit is contained in:
Namhyeon Go 2024-07-11 19:00:58 +09:00
parent 57ed60fd01
commit 1a8022df73
12 changed files with 283 additions and 505 deletions

1
.gitignore vendored
View File

@ -4,7 +4,6 @@ logs/
settings.ini settings.ini
.env .env
*.crt *.crt
*.key
### Python ### ### Python ###
# Byte-compiled / optimized / DLL files # Byte-compiled / optimized / DLL files

146
base.py
View File

@ -8,7 +8,7 @@
# Euiseo Cha (Wonkwang University) <zeroday0619_dev@outlook.com> # Euiseo Cha (Wonkwang University) <zeroday0619_dev@outlook.com>
# https://github.com/gnh1201/caterpillar # https://github.com/gnh1201/caterpillar
# Created at: 2024-05-20 # Created at: 2024-05-20
# Updated at: 2024-07-11 # Updated at: 2024-07-09
# #
import logging import logging
@ -17,25 +17,21 @@ import json
import os import os
import re import re
import importlib import importlib
import subprocess
import platform
from datetime import datetime, timezone from datetime import datetime, timezone
from typing import Union, List from typing import Union, List
client_encoding = "utf-8" client_encoding = 'utf-8'
def extract_credentials(url): def extract_credentials(url):
pattern = re.compile( pattern = re.compile(r'(?P<scheme>\w+://)?(?P<username>[^:/]+):(?P<password>[^@]+)@(?P<url>.+)')
r"(?P<scheme>\w+://)?(?P<username>[^:/]+):(?P<password>[^@]+)@(?P<url>.+)"
)
match = pattern.match(url) match = pattern.match(url)
if match: if match:
scheme = match.group("scheme") if match.group("scheme") else "https://" scheme = match.group('scheme') if match.group('scheme') else 'https://'
username = match.group("username") username = match.group('username')
password = match.group("password") password = match.group('password')
url = match.group("url") url = match.group('url')
return username, password, scheme + url return username, password, scheme + url
else: else:
return None, None, url return None, None, url
@ -46,76 +42,35 @@ def jsonrpc2_create_id(data):
def jsonrpc2_encode(method, params=None): def jsonrpc2_encode(method, params=None):
data = {"jsonrpc": "2.0", "method": method, "params": params} data = {
"jsonrpc": "2.0",
"method": method,
"params": params
}
id = jsonrpc2_create_id(data) id = jsonrpc2_create_id(data)
data["id"] = id data['id'] = id
return (id, json.dumps(data)) return (id, json.dumps(data))
def jsonrpc2_result_encode(result, id=""): def jsonrpc2_result_encode(result, id=''):
data = {"jsonrpc": "2.0", "result": result, "id": id} data = {
"jsonrpc": "2.0",
"result": result,
"id": id
}
return json.dumps(data) return json.dumps(data)
def jsonrpc2_error_encode(error, id=""): def jsonrpc2_error_encode(error, id=''):
data = {"jsonrpc": "2.0", "error": error, "id": id} data = {
"jsonrpc": "2.0",
"error": error,
"id": id
}
return json.dumps(data) return json.dumps(data)
def find_openssl_binpath(): class Extension():
system = platform.system()
if system == "Windows":
possible_paths = [
os.path.join(
os.getenv("ProgramFiles", "C:\\Program Files"),
"OpenSSL-Win64",
"bin",
"openssl.exe",
),
os.path.join(
os.getenv("ProgramFiles", "C:\\Program Files"),
"OpenSSL-Win32",
"bin",
"openssl.exe",
),
os.path.join(
os.getenv("ProgramFiles(x86)", "C:\\Program Files (x86)"),
"OpenSSL-Win32",
"bin",
"openssl.exe",
),
os.path.join(
os.getenv("ProgramW6432", "C:\\Program Files"),
"OpenSSL-Win64",
"bin",
"openssl.exe",
),
os.path.join(
os.getenv("ProgramW6432", "C:\\Program Files"),
"OpenSSL-Win32",
"bin",
"openssl.exe",
),
]
for path in possible_paths:
if os.path.exists(path):
return path
else:
try:
result = subprocess.run(
["which", "openssl"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
path = result.stdout.decode().strip()
if path:
return path
except Exception as e:
pass
return "openssl"
class Extension:
extensions = [] extensions = []
protocols = [] protocols = []
buffer_size = 8192 buffer_size = 8192
@ -130,8 +85,8 @@ class Extension:
@classmethod @classmethod
def register(cls, s): def register(cls, s):
module_name, class_name = s.strip().split(".")[0:2] module_name, class_name = s.strip().split('.')[0:2]
module_path = "plugins." + module_name module_path = 'plugins.' + module_name
try: try:
module = importlib.import_module(module_path) module = importlib.import_module(module_path)
@ -151,9 +106,7 @@ class Extension:
@classmethod @classmethod
def get_rpcmethod(cls, method): def get_rpcmethod(cls, method):
for extension in cls.extensions: for extension in cls.extensions:
is_exported_method = (method == extension.method) or ( is_exported_method = (method == extension.method) or (method in extension.exported_methods)
method in extension.exported_methods
)
if extension.type == "rpcmethod" and is_exported_method: if extension.type == "rpcmethod" and is_exported_method:
return extension return extension
return None return None
@ -172,25 +125,24 @@ class Extension:
@classmethod @classmethod
def get_connector(cls, connection_type): def get_connector(cls, connection_type):
for extension in cls.extensions: for extension in cls.extensions:
if ( if extension.type == "connector" and extension.connection_type == connection_type:
extension.type == "connector"
and extension.connection_type == connection_type
):
return extension return extension
return None return None
@classmethod @classmethod
def send_accept(cls, conn, method, success=True): def send_accept(cls, conn, method, success=True):
if "tcp" in cls.protocols: if 'tcp' in cls.protocols:
_, message = jsonrpc2_encode(f"{method}_accept", {"success": success}) _, message = jsonrpc2_encode(f"{method}_accept", {
"success": success
})
conn.send(message.encode(client_encoding)) conn.send(message.encode(client_encoding))
print(f"Accepted request with {cls.protocols[0]} protocol") print(f"Accepted request with {cls.protocols[0]} protocol")
@classmethod @classmethod
def readall(cls, conn): def readall(cls, conn):
if "tcp" in cls.protocols: if 'tcp' in cls.protocols:
data = b"" data = b''
while True: while True:
try: try:
chunk = conn.recv(cls.buffer_size) chunk = conn.recv(cls.buffer_size)
@ -202,13 +154,13 @@ class Extension:
return data return data
elif "http" in cls.protocols: elif 'http' in cls.protocols:
# empty binary when an file not exists # empty binary when an file not exists
if "file" not in conn.request.files: if 'file' not in conn.request.files:
return b"" return b''
# read an uploaded file with binary mode # read an uploaded file with binary mode
file = conn.request.files["file"] file = conn.request.files['file']
return file.read() return file.read()
def __init__(self): def __init__(self):
@ -230,33 +182,25 @@ class Extension:
class Logger(logging.Logger): class Logger(logging.Logger):
def __init__(self, name: str, level: int = logging.NOTSET): def __init__(self, name: str, level: int = logging.NOTSET):
super().__init__(name, level) super().__init__(name, level)
self.formatter = logging.Formatter( self.formatter = logging.Formatter('[%(asctime)s] %(levelname)s %(module)s: %(message)s')
"[%(asctime)s] %(levelname)s %(module)s: %(message)s"
)
if not os.path.isdir("logs"): if not os.path.isdir("logs"):
os.mkdir("logs") os.mkdir("logs")
stream_handler = logging.StreamHandler() stream_handler = logging.StreamHandler()
file_handler = logging.FileHandler( file_handler = logging.FileHandler('logs/' + name + "-" + self._generate_timestamp() + '.log')
"logs/" + name + "-" + self._generate_timestamp() + ".log"
)
self._set_formatters([stream_handler, file_handler]) self._set_formatters([stream_handler, file_handler])
self._add_handlers([stream_handler, file_handler]) self._add_handlers([stream_handler, file_handler])
@staticmethod @staticmethod
def _generate_timestamp(): def _generate_timestamp():
date = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d") date = datetime.now(tz=timezone.utc).strftime('%Y-%m-%d')
return date return date
def _set_formatters( def _set_formatters(self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]):
self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]
):
for handler in handlers: for handler in handlers:
handler.setFormatter(self.formatter) handler.setFormatter(self.formatter)
def _add_handlers( def _add_handlers(self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]):
self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]
):
for handler in handlers: for handler in handlers:
self.addHandler(handler) self.addHandler(handler)

View File

@ -2,7 +2,6 @@
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt %CD%\ca.crt bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt %CD%\ca.crt
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key %CD%\ca.key bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key %CD%\ca.key
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key %CD%\cert.key bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key %CD%\cert.key
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key %CD%\default.crt
REM echo if you want generate a certificate... REM echo if you want generate a certificate...
REM openssl genrsa -out ca.key 2048 REM openssl genrsa -out ca.key 2048

View File

@ -2,7 +2,6 @@
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/default.crt
# echo "if you want generate a certificate..." # echo "if you want generate a certificate..."
#openssl genrsa -out ca.key 2048 #openssl genrsa -out ca.key 2048

View File

@ -15,7 +15,6 @@ from Bio.SeqUtils import gc_fraction
from base import Extension from base import Extension
def _analyze_sequence(sequence) -> dict[str, str]: def _analyze_sequence(sequence) -> dict[str, str]:
""" """
Analyze a given DNA sequence to provide various nucleotide transformations and translations. Analyze a given DNA sequence to provide various nucleotide transformations and translations.
@ -62,7 +61,7 @@ class PyBio(Extension):
self.exported_methods = ["analyze_sequence", "gc_content_calculation"] self.exported_methods = ["analyze_sequence", "gc_content_calculation"]
def dispatch(self, type, id, params, conn): def dispatch(self, type, id, params, conn):
conn.send(b"Greeting! dispatch") conn.send(b'Greeting! dispatch')
def analyze_sequence(self, type, id, params, conn): def analyze_sequence(self, type, id, params, conn):
""" """
@ -89,7 +88,7 @@ class PyBio(Extension):
"transcription": "AUGCGUACGUAGCUAGCUAGCGUAGCUAGCUGACU", "transcription": "AUGCGUACGUAGCUAGCUAGCGUAGCUAGCUGACU",
"translation": "MRT*LASVAS*"} "translation": "MRT*LASVAS*"}
""" """
result = _analyze_sequence(params["sequence"]) result = _analyze_sequence(params['sequence'])
return result return result
def gc_content_calculation(self, type, id, params, conn): def gc_content_calculation(self, type, id, params, conn):
@ -104,5 +103,5 @@ class PyBio(Extension):
:return: Dictionary containing the GC content as a float. :return: Dictionary containing the GC content as a float.
Example: {"gc_content": 0.5142857142857142} Example: {"gc_content": 0.5142857142857142}
""" """
result = _gc_content_calculation(params["sequence"]) result = _gc_content_calculation(params['sequence'])
return result return result

View File

@ -28,15 +28,15 @@ class Container(Extension):
def dispatch(self, type, id, params, conn): def dispatch(self, type, id, params, conn):
logger.info("[*] Greeting! dispatch") logger.info("[*] Greeting! dispatch")
conn.send(b"Greeting! dispatch") conn.send(b'Greeting! dispatch')
def container_run(self, type, id, params, conn): def container_run(self, type, id, params, conn):
devices = params["devices"] devices = params['devices']
image = params["image"] image = params['image']
devices = params["devices"] devices = params['devices']
name = params["name"] name = params['name']
environment = params["environment"] environment = params['environment']
volumes = params["volumes"] volumes = params['volumes']
container = self.client.containers.run( container = self.client.containers.run(
image, image,
@ -44,16 +44,17 @@ class Container(Extension):
name=name, name=name,
volumes=volumes, volumes=volumes,
environment=environment, environment=environment,
detach=True, detach=True
) )
container.logs() container.logs()
logger.info("[*] Running...") logger.info("[*] Running...")
def container_stop(self, type, id, params, conn): def container_stop(self, type, id, params, conn):
name = params["name"] name = params['name']
container = self.client.containers.get(name) container = self.client.containers.get(name)
container.stop() container.stop()
logger.info("[*] Stopped") logger.info("[*] Stopped")

View File

@ -25,55 +25,48 @@ from base import Extension, Logger
logger = Logger(name="fediverse") logger = Logger(name="fediverse")
try: try:
client_encoding = config("CLIENT_ENCODING", default="utf-8") client_encoding = config('CLIENT_ENCODING', default='utf-8')
truecaptcha_userid = config("TRUECAPTCHA_USERID") # truecaptcha.org truecaptcha_userid = config('TRUECAPTCHA_USERID') # truecaptcha.org
truecaptcha_apikey = config("TRUECAPTCHA_APIKEY") # truecaptcha.org truecaptcha_apikey = config('TRUECAPTCHA_APIKEY') # truecaptcha.org
dictionary_file = config( dictionary_file = config('DICTIONARY_FILE', default='words_alpha.txt') # https://github.com/dwyl/english-words
"DICTIONARY_FILE", default="words_alpha.txt" librey_apiurl = config('LIBREY_APIURL', default='https://search.catswords.net') # https://github.com/Ahwxorg/librey
) # https://github.com/dwyl/english-words
librey_apiurl = config(
"LIBREY_APIURL", default="https://search.catswords.net"
) # https://github.com/Ahwxorg/librey
except Exception as e: except Exception as e:
logger.error("[*] Invalid configuration", exc_info=e) logger.error("[*] Invalid configuration", exc_info=e)
class Fediverse(Extension): class Fediverse(Extension):
def __init__(self): def __init__(self):
self.type = "filter" # this is a filter self.type = "filter" # this is a filter
# Load data to use KnownWords4 strategy # Load data to use KnownWords4 strategy
# Download data: https://github.com/dwyl/english-words # Download data: https://github.com/dwyl/english-words
self.known_words = [] self.known_words = []
if dictionary_file != "" and os.path.isfile(dictionary_file): if dictionary_file != '' and os.path.isfile(dictionary_file):
with open(dictionary_file, "r") as file: with open(dictionary_file, "r") as file:
words = file.readlines() words = file.readlines()
self.known_words = [ self.known_words = [word.strip() for word in words if len(word.strip()) > 3]
word.strip() for word in words if len(word.strip()) > 3
]
logger.info("[*] Data loaded to use KnownWords4 strategy") logger.info("[*] Data loaded to use KnownWords4 strategy")
def test(self, filtered, data, webserver, port, scheme, method, url): def test(self, filtered, data, webserver, port, scheme, method, url):
# prevent cache confusing # prevent cache confusing
if data.find(b"<title>Welcome to nginx!</title>") > -1: if data.find(b'<title>Welcome to nginx!</title>') > -1:
return True return True
# allowed conditions # allowed conditions
if method == b"GET" or url.find(b"/api") > -1: if method == b'GET' or url.find(b'/api') > -1:
return False return False
# convert to text # convert to text
data_length = len(data) data_length = len(data)
text = data.decode(client_encoding, errors="ignore") text = data.decode(client_encoding, errors='ignore')
error_rate = (data_length - len(text)) / data_length error_rate = (data_length - len(text)) / data_length
if error_rate > 0.2: # it is a binary data if error_rate > 0.2: # it is a binary data
return False return False
# check ID with K-Anonymity strategy # check ID with K-Anonymity strategy
pattern = r"\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b" pattern = r'\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b'
matches = list(set(re.findall(pattern, text))) matches = list(set(re.findall(pattern, text)))
if len(matches) > 0: if len(matches) > 0:
logger.info("[*] Found ID: %s" % (", ".join(matches))) logger.info("[*] Found ID: %s" % (', '.join(matches)))
try: try:
filtered = not all(map(self.pwnedpasswords_test, matches)) filtered = not all(map(self.pwnedpasswords_test, matches))
except Exception as e: except Exception as e:
@ -89,45 +82,40 @@ class Fediverse(Extension):
def vowel_ratio_test(s): def vowel_ratio_test(s):
ratio = self.calculate_vowel_ratio(s) ratio = self.calculate_vowel_ratio(s)
return ratio > 0.2 and ratio < 0.8 return ratio > 0.2 and ratio < 0.8
if all(map(vowel_ratio_test, matches)): if all(map(vowel_ratio_test, matches)):
score += 1 score += 1
strategies.append("VowelRatio10") strategies.append('VowelRatio10')
# check ID with Palindrome4 strategy # check ID with Palindrome4 strategy
if all(map(self.has_palindrome, matches)): if all(map(self.has_palindrome, matches)):
score += 1 score += 1
strategies.append("Palindrome4") strategies.append('Palindrome4')
# check ID with KnownWords4 strategy # check ID with KnownWords4 strategy
if all(map(self.has_known_word, matches)): if all(map(self.has_known_word, matches)):
score += 2 score += 2
strategies.append("KnownWords4") strategies.append('KnownWords4')
# check ID with SearchEngine3 strategy # check ID with SearchEngine3 strategy
if librey_apiurl != "" and all(map(self.search_engine_test, matches)): if librey_apiurl != '' and all(map(self.search_engine_test, matches)):
score += 1 score += 1
strategies.append("SearchEngine3") strategies.append('SearchEngine3')
# check ID with RepeatedNumbers3 strategy # check ID with RepeatedNumbers3 strategy
if all(map(self.repeated_numbers_test, matches)): if all(map(self.repeated_numbers_test, matches)):
score += 1 score += 1
strategies.append("RepeatedNumbers3") strategies.append('RepeatedNumbers3')
# logging score # logging score
with open("score.log", "a") as file: with open('score.log', 'a') as file:
file.write( file.write("%s\t%s\t%s\r\n" % ('+'.join(matches), str(score), '+'.join(strategies)))
"%s\t%s\t%s\r\n"
% ("+".join(matches), str(score), "+".join(strategies))
)
# make decision # make decision
if score > 1: if score > 1:
filtered = False filtered = False
# check an attached images (check images with Not-CAPTCHA strategy) # check an attached images (check images with Not-CAPTCHA strategy)
if truecaptcha_userid != "" and not filtered and len(matches) > 0: if truecaptcha_userid != '' and not filtered and len(matches) > 0:
def webp_to_png_base64(url): def webp_to_png_base64(url):
try: try:
response = requests.get(url) response = requests.get(url)
@ -135,9 +123,7 @@ class Fediverse(Extension):
img_png = img.convert("RGBA") img_png = img.convert("RGBA")
buffered = io.BytesIO() buffered = io.BytesIO()
img_png.save(buffered, format="PNG") img_png.save(buffered, format="PNG")
encoded_image = base64.b64encode(buffered.getvalue()).decode( encoded_image = base64.b64encode(buffered.getvalue()).decode(client_encoding)
client_encoding
)
return encoded_image return encoded_image
except: except:
return None return None
@ -157,15 +143,11 @@ class Fediverse(Extension):
solved = self.truecaptcha_solve(encoded_image) solved = self.truecaptcha_solve(encoded_image)
if solved: if solved:
logger.info("[*] solved: %s" % (solved)) logger.info("[*] solved: %s" % (solved))
filtered = filtered or ( filtered = filtered or (solved.lower() in ['ctkpaarr', 'spam'])
solved.lower() in ["ctkpaarr", "spam"]
)
else: else:
logger.info("[*] not solved") logger.info("[*] not solved")
except Exception as e: except Exception as e:
logger.error( logger.error("[*] Not CAPTCHA strategy not working!", exc_info=e)
"[*] Not CAPTCHA strategy not working!", exc_info=e
)
return filtered return filtered
@ -184,52 +166,44 @@ class Fediverse(Extension):
l5_sha1 = p_sha1[-5:] l5_sha1 = p_sha1[-5:]
# Making GET request using Requests library # Making GET request using Requests library
response = requests.get(f"https://api.pwnedpasswords.com/range/{f5_sha1}") response = requests.get(f'https://api.pwnedpasswords.com/range/{f5_sha1}')
# Checking if request was successful # Checking if request was successful
if response.status_code == 200: if response.status_code == 200:
# Parsing response text # Parsing response text
hashes = response.text.split("\r\n") hashes = response.text.split('\r\n')
# Using list comprehension to find matching hashes # Using list comprehension to find matching hashes
matching_hashes = [ matching_hashes = [line.split(':')[0] for line in hashes if line.endswith(l5_sha1)]
line.split(":")[0] for line in hashes if line.endswith(l5_sha1)
]
# If there are matching hashes, return True, else return False # If there are matching hashes, return True, else return False
return bool(matching_hashes) return bool(matching_hashes)
else: else:
raise Exception( raise Exception("api.pwnedpasswords.com response status: %s" % (str(response.status_code)))
"api.pwnedpasswords.com response status: %s"
% (str(response.status_code))
)
return False return False
# Strategy: Not-CAPTCHA - use truecaptcha.org # Strategy: Not-CAPTCHA - use truecaptcha.org
def truecaptcha_solve(self, encoded_image): def truecaptcha_solve(self, encoded_image):
url = "https://api.apitruecaptcha.org/one/gettext" url = 'https://api.apitruecaptcha.org/one/gettext'
data = { data = {
"userid": truecaptcha_userid, 'userid': truecaptcha_userid,
"apikey": truecaptcha_apikey, 'apikey': truecaptcha_apikey,
"data": encoded_image, 'data': encoded_image,
"mode": "human", 'mode': 'human'
} }
response = requests.post(url=url, json=data) response = requests.post(url = url, json = data)
if response.status_code == 200: if response.status_code == 200:
data = response.json() data = response.json()
if "error_message" in data: if 'error_message' in data:
print("[*] Error: %s" % (data["error_message"])) print ("[*] Error: %s" % (data['error_message']))
return None return None
if "result" in data: if 'result' in data:
return data["result"] return data['result']
else: else:
raise Exception( raise Exception("api.apitruecaptcha.org response status: %s" % (str(response.status_code)))
"api.apitruecaptcha.org response status: %s"
% (str(response.status_code))
)
return None return None
@ -241,10 +215,10 @@ class Fediverse(Extension):
return 0.0 return 0.0
# Count the number of vowels ('a', 'e', 'i', 'o', 'u', 'w', 'y') in the string. # Count the number of vowels ('a', 'e', 'i', 'o', 'u', 'w', 'y') in the string.
vowel_count = sum(1 for char in s if char.lower() in "aeiouwy") vowel_count = sum(1 for char in s if char.lower() in 'aeiouwy')
# Define vowel-ending patterns # Define vowel-ending patterns
vowel_ending_patterns = ["ang", "eng", "ing", "ong", "ung", "ank", "ink", "dge"] vowel_ending_patterns = ['ang', 'eng', 'ing', 'ong', 'ung', 'ank', 'ink', 'dge']
# Count the occurrences of vowel-ending patterns in the string. # Count the occurrences of vowel-ending patterns in the string.
vowel_count += sum(s.count(pattern) for pattern in vowel_ending_patterns) vowel_count += sum(s.count(pattern) for pattern in vowel_ending_patterns)
@ -291,8 +265,8 @@ class Fediverse(Extension):
data = response.json() data = response.json()
if "results_source" in data: if 'results_source' in data:
del data["results_source"] del data['results_source']
num_results = len(data) num_results = len(data)
@ -300,4 +274,4 @@ class Fediverse(Extension):
# Strategy: RepeatedNumbers3 # Strategy: RepeatedNumbers3
def repeated_numbers_test(self, s): def repeated_numbers_test(self, s):
return bool(re.search(r"\d{3,}", s)) return bool(re.search(r'\d{3,}', s))

View File

@ -15,22 +15,20 @@ import json
from base import Extension from base import Extension
class PortScanner(Extension): class PortScanner(Extension):
def __init__(self): def __init__(self):
self.type = "rpcmethod" self.type = "rpcmethod"
self.method = "scan_ports_by_hosts" self.method = "scan_ports_by_hosts"
self.exported_methods = [] self.exported_methods = []
def dispatch(self, type, id, params, conn): def dispatch(self, type, id, params, conn):
hosts = params["hosts"] hosts = params['hosts']
binpath = params["binpath"] binpath = params['binpath']
nm = nmap.PortScanner(nmap_search_path=(binpath,)) nm = nmap.PortScanner(nmap_search_path=(binpath,))
result = nm.scan(hosts=hosts, arguments="-T5 -sV -p0-65535 --max-retries 0") result = nm.scan(hosts=hosts, arguments='-T5 -sV -p0-65535 --max-retries 0')
return result
return result;
if __name__ == "__main__": if __name__ == "__main__":
main(sys.argv) main(sys.argv)

View File

@ -18,13 +18,12 @@ from base import Extension, Logger
logger = Logger(name="wayback") logger = Logger(name="wayback")
try: try:
client_encoding = config("CLIENT_ENCODING") client_encoding = config('CLIENT_ENCODING')
except Exception as e: except Exception as e:
logger.error("[*] Invalid configuration", exc_info=e) logger.error("[*] Invalid configuration", exc_info=e)
def get_cached_page_from_google(url): def get_cached_page_from_google(url):
status_code, text = (0, "") status_code, text = (0, '')
# Google Cache URL # Google Cache URL
google_cache_url = "https://webcache.googleusercontent.com/search?q=cache:" + url google_cache_url = "https://webcache.googleusercontent.com/search?q=cache:" + url
@ -34,16 +33,15 @@ def get_cached_page_from_google(url):
# Check if the request was successful (status code 200) # Check if the request was successful (status code 200)
if response.status_code == 200: if response.status_code == 200:
text = response.text # Extract content from response text = response.text # Extract content from response
else: else:
status_code = response.status_code status_code = response.status_code
return status_code, text return status_code, text
# API documentation: https://archive.org/help/wayback_api.php # API documentation: https://archive.org/help/wayback_api.php
def get_cached_page_from_wayback(url): def get_cached_page_from_wayback(url):
status_code, text = (0, "") status_code, text = (0, '')
# Wayback Machine API URL # Wayback Machine API URL
wayback_api_url = "http://archive.org/wayback/available?url=" + url wayback_api_url = "http://archive.org/wayback/available?url=" + url
@ -58,7 +56,7 @@ def get_cached_page_from_wayback(url):
data = response.json() data = response.json()
archived_snapshots = data.get("archived_snapshots", {}) archived_snapshots = data.get("archived_snapshots", {})
closest_snapshot = archived_snapshots.get("closest", {}) closest_snapshot = archived_snapshots.get("closest", {})
# Check if the URL is available in the archive # Check if the URL is available in the archive
if closest_snapshot: if closest_snapshot:
archived_url = closest_snapshot.get("url", "") archived_url = closest_snapshot.get("url", "")
@ -66,7 +64,7 @@ def get_cached_page_from_wayback(url):
# If URL is available, fetch the content of the archived page # If URL is available, fetch the content of the archived page
if archived_url: if archived_url:
archived_page_response = requests.get(archived_url) archived_page_response = requests.get(archived_url)
status_code = archived_page_response.status_code status_code = archived_page_response.status_code;
if status_code == 200: if status_code == 200:
text = archived_page_response.text text = archived_page_response.text
else: else:
@ -80,10 +78,9 @@ def get_cached_page_from_wayback(url):
return status_code, text return status_code, text
class Wayback(Extension): class Wayback(Extension):
def __init__(self): def __init__(self):
self.type = "connector" # this is a connctor self.type = "connector" # this is a connctor
self.connection_type = "wayback" self.connection_type = "wayback"
def connect(self, conn, data, webserver, port, scheme, method, url): def connect(self, conn, data, webserver, port, scheme, method, url):

360
server.py
View File

@ -7,7 +7,7 @@
# Namyheon Go (Catswords Research) <gnh1201@gmail.com> # Namyheon Go (Catswords Research) <gnh1201@gmail.com>
# https://github.com/gnh1201/caterpillar # https://github.com/gnh1201/caterpillar
# Created at: 2022-10-06 # Created at: 2022-10-06
# Updated at: 2024-07-11 # Updated at: 2024-07-09
# #
import argparse import argparse
@ -32,34 +32,24 @@ from requests.auth import HTTPBasicAuth
from urllib.parse import urlparse from urllib.parse import urlparse
from decouple import config from decouple import config
from base import ( from base import Extension, extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode, Logger
Extension,
extract_credentials,
jsonrpc2_create_id,
jsonrpc2_encode,
jsonrpc2_result_encode,
find_openssl_binpath,
Logger,
)
logger = Logger(name="server") logger = Logger(name="server")
# initialization # initialization
try: try:
listening_port = config("PORT", default=5555, cast=int) listening_port = config('PORT', default=5555, cast=int)
_username, _password, server_url = extract_credentials( _username, _password, server_url = extract_credentials(config('SERVER_URL', default=''))
config("SERVER_URL", default="") server_connection_type = config('SERVER_CONNECTION_TYPE', default='')
) cakey = config('CA_KEY', default='ca.key')
server_connection_type = config("SERVER_CONNECTION_TYPE", default="") cacert = config('CA_CERT', default='ca.crt')
cakey = config("CA_KEY", default="ca.key") certkey = config('CERT_KEY', default='cert.key')
cacert = config("CA_CERT", default="ca.crt") certdir = config('CERT_DIR', default='certs/')
certkey = config("CERT_KEY", default="cert.key") openssl_binpath = config('OPENSSL_BINPATH', default='openssl')
certdir = config("CERT_DIR", default="certs/") client_encoding = config('CLIENT_ENCODING', default='utf-8')
openssl_binpath = config("OPENSSL_BINPATH", default=find_openssl_binpath()) local_domain = config('LOCAL_DOMAIN', default='')
client_encoding = config("CLIENT_ENCODING", default="utf-8") proxy_pass = config('PROXY_PASS', default='')
local_domain = config("LOCAL_DOMAIN", default="") use_extensions = config('USE_EXTENSIONS', default='')
proxy_pass = config("PROXY_PASS", default="")
use_extensions = config("USE_EXTENSIONS", default="")
except KeyboardInterrupt: except KeyboardInterrupt:
logger.warning("[*] User has requested an interrupt") logger.warning("[*] User has requested an interrupt")
logger.warning("[*] Application Exiting.....") logger.warning("[*] Application Exiting.....")
@ -68,12 +58,8 @@ except Exception as e:
logger.error("[*] Failed to initialize:", exc_info=e) logger.error("[*] Failed to initialize:", exc_info=e)
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument( parser.add_argument('--max_conn', help="Maximum allowed connections", default=255, type=int)
"--max_conn", help="Maximum allowed connections", default=255, type=int parser.add_argument('--buffer_size', help="Number of samples to be used", default=8192, type=int)
)
parser.add_argument(
"--buffer_size", help="Number of samples to be used", default=8192, type=int
)
args = parser.parse_args() args = parser.parse_args()
max_connection = args.max_conn max_connection = args.max_conn
@ -83,7 +69,7 @@ resolved_address_list = []
# set environment of Extension # set environment of Extension
Extension.set_buffer_size(buffer_size) Extension.set_buffer_size(buffer_size)
Extension.set_protocol("tcp") Extension.set_protocol('tcp')
# set basic authentication # set basic authentication
auth = None auth = None
@ -92,36 +78,36 @@ if _username:
def parse_first_data(data): def parse_first_data(data):
parsed_data = (b"", b"", b"", b"", b"") parsed_data = (b'', b'', b'', b'', b'')
try: try:
first_line = data.split(b"\n")[0] first_line = data.split(b'\n')[0]
method, url = first_line.split()[0:2] method, url = first_line.split()[0:2]
http_pos = url.find(b"://") # Finding the position of :// http_pos = url.find(b'://') #Finding the position of ://
scheme = b"http" # check http/https or other protocol scheme = b'http' # check http/https or other protocol
if http_pos == -1: if http_pos == -1:
temp = url temp = url
else: else:
temp = url[(http_pos + 3) :] temp = url[(http_pos + 3):]
scheme = url[0:http_pos] scheme = url[0:http_pos]
port_pos = temp.find(b":") port_pos = temp.find(b':')
webserver_pos = temp.find(b"/") webserver_pos = temp.find(b'/')
if webserver_pos == -1: if webserver_pos == -1:
webserver_pos = len(temp) webserver_pos = len(temp)
webserver = b"" webserver = b''
port = -1 port = -1
if port_pos == -1 or webserver_pos < port_pos: if port_pos == -1 or webserver_pos < port_pos:
port = 80 port = 80
webserver = temp[:webserver_pos] webserver = temp[:webserver_pos]
else: else:
port = int((temp[(port_pos + 1) :])[: webserver_pos - port_pos - 1]) port = int((temp[(port_pos + 1):])[:webserver_pos - port_pos - 1])
webserver = temp[:port_pos] webserver = temp[:port_pos]
if port == 443: if port == 443:
scheme = b"https" scheme = b'https'
parsed_data = (webserver, port, scheme, method, url) parsed_data = (webserver, port, scheme, method, url)
except Exception as e: except Exception as e:
@ -133,16 +119,14 @@ def parse_first_data(data):
def conn_string(conn, data, addr): def conn_string(conn, data, addr):
# JSON-RPC 2.0 request # JSON-RPC 2.0 request
def process_jsonrpc2(data): def process_jsonrpc2(data):
jsondata = json.loads(data.decode(client_encoding, errors="ignore")) jsondata = json.loads(data.decode(client_encoding, errors='ignore'))
if jsondata["jsonrpc"] == "2.0": if jsondata['jsonrpc'] == "2.0":
jsonrpc2_server( jsonrpc2_server(conn, jsondata['id'], jsondata['method'], jsondata['params'])
conn, jsondata["id"], jsondata["method"], jsondata["params"]
)
return True return True
return False return False
# JSON-RPC 2.0 request over Socket (stateful) # JSON-RPC 2.0 request over Socket (stateful)
if data.find(b"{") == 0 and process_jsonrpc2(data): if data.find(b'{') == 0 and process_jsonrpc2(data):
# will be close by the client # will be close by the client
return return
@ -152,18 +136,18 @@ def conn_string(conn, data, addr):
# JSON-RPC 2.0 request over HTTP (stateless) # JSON-RPC 2.0 request over HTTP (stateless)
path = urlparse(url.decode(client_encoding)).path path = urlparse(url.decode(client_encoding)).path
if path == "/proxy-cgi/jsonrpc2": if path == "/proxy-cgi/jsonrpc2":
conn.send(b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n") conn.send(b'HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n')
pos = data.find(b"\r\n\r\n") pos = data.find(b'\r\n\r\n')
if pos > -1 and process_jsonrpc2(data[pos + 4 :]): if pos > -1 and process_jsonrpc2(data[pos + 4:]):
conn.close() # will be close by the server conn.close() # will be close by the server
return return
# if it is reverse proxy # if it is reverse proxy
if local_domain != "": if local_domain != '':
localserver = local_domain.encode(client_encoding) localserver = local_domain.encode(client_encoding)
if webserver == localserver or data.find(b"\nHost: " + localserver) > -1: if webserver == localserver or data.find(b'\nHost: ' + localserver) > -1:
logger.info("[*] Detected the reverse proxy request: %s" % local_domain) logger.info("[*] Detected the reverse proxy request: %s" % local_domain)
scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b":") scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b':')
webserver = _webserver[2:] webserver = _webserver[2:]
port = int(_port.decode(client_encoding)) port = int(_port.decode(client_encoding))
@ -173,7 +157,7 @@ def conn_string(conn, data, addr):
def jsonrpc2_server(conn, id, method, params): def jsonrpc2_server(conn, id, method, params):
if method == "relay_accept": if method == "relay_accept":
accepted_relay[id] = conn accepted_relay[id] = conn
connection_speed = params["connection_speed"] connection_speed = params['connection_speed']
logger.info("[*] connection speed: %s milliseconds" % (str(connection_speed))) logger.info("[*] connection speed: %s milliseconds" % (str(connection_speed)))
while conn.fileno() > -1: while conn.fileno() > -1:
time.sleep(1) time.sleep(1)
@ -182,82 +166,39 @@ def jsonrpc2_server(conn, id, method, params):
else: else:
Extension.dispatch_rpcmethod(method, "call", id, params, conn) Extension.dispatch_rpcmethod(method, "call", id, params, conn)
# return in conn_string() #return in conn_string()
def proxy_connect(webserver, conn): def proxy_connect(webserver, conn):
hostname = webserver.decode(client_encoding) hostname = webserver.decode(client_encoding)
certpath = "%s/%s.crt" % (certdir.rstrip("/"), hostname) certpath = "%s/%s.crt" % (certdir.rstrip('/'), hostname)
if not os.path.exists(certdir): if not os.path.exists(certdir):
os.makedirs(certdir) os.makedirs(certdir)
# https://stackoverflow.com/questions/24055036/handle-https-request-in-proxy-server-by-c-sharp-connect-tunnel # https://stackoverflow.com/questions/24055036/handle-https-request-in-proxy-server-by-c-sharp-connect-tunnel
conn.send(b"HTTP/1.1 200 Connection Established\r\n\r\n") conn.send(b'HTTP/1.1 200 Connection Established\r\n\r\n')
# https://github.com/inaz2/proxy2/blob/master/proxy2.py # https://github.com/inaz2/proxy2/blob/master/proxy2.py
try: try:
if not os.path.isfile(certpath): if not os.path.isfile(certpath):
epoch = "%d" % (time.time() * 1000) epoch = "%d" % (time.time() * 1000)
p1 = Popen( p1 = Popen([openssl_binpath, "req", "-new", "-key", certkey, "-subj", "/CN=%s" % hostname], stdout=PIPE)
[
openssl_binpath,
"req",
"-new",
"-key",
certkey,
"-subj",
"/CN=%s" % hostname,
],
stdout=PIPE,
)
p2 = Popen( p2 = Popen(
[ [openssl_binpath, "x509", "-req", "-days", "3650", "-CA", cacert, "-CAkey", cakey, "-set_serial", epoch,
openssl_binpath, "-out", certpath], stdin=p1.stdout, stderr=PIPE)
"x509",
"-req",
"-days",
"3650",
"-CA",
cacert,
"-CAkey",
cakey,
"-set_serial",
epoch,
"-out",
certpath,
],
stdin=p1.stdout,
stderr=PIPE,
)
p2.communicate() p2.communicate()
except FileNotFoundError as e:
logger.error(
"[*] OpenSSL distribution not found on this system. Skipping certificate issuance.",
exc_info=e,
)
certpath = "default.crt"
except Exception as e: except Exception as e:
logger.error("[*] Skipping certificate issuance.", exc_info=e) logger.error("[*] Skipped generating the certificate.", exc_info=e)
certpath = "default.crt"
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server # https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
# https://docs.python.org/3/library/ssl.html # https://docs.python.org/3/library/ssl.html
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
context.load_cert_chain(certpath, certkey) context.load_cert_chain(certpath, certkey)
try: # https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server conn = context.wrap_socket(conn, server_side=True)
conn = context.wrap_socket(conn, server_side=True) data = conn.recv(buffer_size)
data = conn.recv(buffer_size)
except ssl.SSLError as e:
logger.error(
"[*] SSL negotiation failed. Check that the CA certificate is installed.",
exc_info=e,
)
return (conn, b"")
return (conn, data) return (conn, data)
@ -278,19 +219,17 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
logger.info("[*] Started the request. %s" % (str(addr[0]))) logger.info("[*] Started the request. %s" % (str(addr[0])))
# SSL negotiation # SSL negotiation
is_ssl = scheme in [b"https", b"tls", b"ssl"] is_ssl = scheme in [b'https', b'tls', b'ssl']
if is_ssl and method == b"CONNECT": if is_ssl and method == b'CONNECT':
while True: while True:
try: try:
conn, data = proxy_connect(webserver, conn) conn, data = proxy_connect(webserver, conn)
break # success break # success
# except OSError as e: #except OSError as e:
# print ("[*] Retrying SSL negotiation... (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e))) # print ("[*] Retrying SSL negotiation... (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
except Exception as e: except Exception as e:
raise Exception( raise Exception(
"SSL negotiation failed. (%s:%s) %s" "SSL negotiation failed. (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
% (webserver.decode(client_encoding), str(port), str(e))
)
# override data # override data
if is_ssl: if is_ssl:
@ -298,9 +237,9 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
# https://stackoverflow.com/questions/44343739/python-sockets-ssl-eof-occurred-in-violation-of-protocol # https://stackoverflow.com/questions/44343739/python-sockets-ssl-eof-occurred-in-violation-of-protocol
def sock_close(sock, is_ssl=False): def sock_close(sock, is_ssl=False):
# if is_ssl: #if is_ssl:
# sock = sock.unwrap() # sock = sock.unwrap()
# sock.shutdown(socket.SHUT_RDWR) #sock.shutdown(socket.SHUT_RDWR)
sock.close() sock.close()
# Wait to see if there is more data to transmit # Wait to see if there is more data to transmit
@ -314,7 +253,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
return return
# send following chunks # send following chunks
buffered = b"" buffered = b''
conn.settimeout(1) conn.settimeout(1)
while True: while True:
try: try:
@ -322,14 +261,12 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
if not chunk: if not chunk:
break break
buffered += chunk buffered += chunk
if proxy_check_filtered( if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
buffered, webserver, port, scheme, method, url
):
sock_close(sock, is_ssl) sock_close(sock, is_ssl)
raise Exception("Filtered request") raise Exception("Filtered request")
sock.send(chunk) sock.send(chunk)
if len(buffered) > buffer_size * 2: if len(buffered) > buffer_size * 2:
buffered = buffered[-buffer_size * 2 :] buffered = buffered[-buffer_size * 2:]
except: except:
break break
@ -342,101 +279,75 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
context.check_hostname = False context.check_hostname = False
context.verify_mode = ssl.CERT_NONE context.verify_mode = ssl.CERT_NONE
sock = context.wrap_socket( sock = context.wrap_socket(sock, server_hostname=webserver.decode(client_encoding))
sock, server_hostname=webserver.decode(client_encoding)
)
sock.connect((webserver, port)) sock.connect((webserver, port))
# sock.sendall(data) #sock.sendall(data)
sendall(sock, conn, data) sendall(sock, conn, data)
else: else:
sock.connect((webserver, port)) sock.connect((webserver, port))
# sock.sendall(data) #sock.sendall(data)
sendall(sock, conn, data) sendall(sock, conn, data)
i = 0 i = 0
is_http_403 = False is_http_403 = False
buffered = b"" buffered = b''
while True: while True:
chunk = sock.recv(buffer_size) chunk = sock.recv(buffer_size)
if not chunk: if not chunk:
break break
if i == 0 and chunk.find(b"HTTP/1.1 403") == 0: if i == 0 and chunk.find(b'HTTP/1.1 403') == 0:
is_http_403 = True is_http_403 = True
break break
buffered += chunk buffered += chunk
if proxy_check_filtered(buffered, webserver, port, scheme, method, url): if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
sock_close(sock, is_ssl) sock_close(sock, is_ssl)
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1") add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
raise Exception("Filtered response") raise Exception("Filtered response")
conn.send(chunk) conn.send(chunk)
if len(buffered) > buffer_size * 2: if len(buffered) > buffer_size * 2:
buffered = buffered[-buffer_size * 2 :] buffered = buffered[-buffer_size * 2:]
i += 1 i += 1
# when blocked # when blocked
if is_http_403: if is_http_403:
logger.warning( logger.warning("[*] Blocked the request by remote server: %s" % (webserver.decode(client_encoding)))
"[*] Blocked the request by remote server: %s"
% (webserver.decode(client_encoding))
)
def bypass_callback(response, *args, **kwargs): def bypass_callback(response, *args, **kwargs):
if response.status_code != 200: if response.status_code != 200:
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}') conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
return return
# https://stackoverflow.com/questions/20658572/python-requests-print-entire-http-request-raw # https://stackoverflow.com/questions/20658572/python-requests-print-entire-http-request-raw
format_headers = lambda d: "\r\n".join( format_headers = lambda d: '\r\n'.join(f'{k}: {v}' for k, v in d.items())
f"{k}: {v}" for k, v in d.items()
)
first_data = ( first_data = textwrap.dedent('HTTP/1.1 {res.status_code} {res.reason}\r\n{reshdrs}\r\n\r\n').format(
textwrap.dedent( res=response,
"HTTP/1.1 {res.status_code} {res.reason}\r\n{reshdrs}\r\n\r\n" reshdrs=format_headers(response.headers),
) ).encode(client_encoding)
.format(
res=response,
reshdrs=format_headers(response.headers),
)
.encode(client_encoding)
)
conn.send(first_data) conn.send(first_data)
for chunk in response.iter_content(chunk_size=buffer_size): for chunk in response.iter_content(chunk_size=buffer_size):
conn.send(chunk) conn.send(chunk)
if is_ssl and method == b"GET": if is_ssl and method == b'GET':
logger.info("[*] Trying to bypass blocked request...") logger.info("[*] Trying to bypass blocked request...")
remote_url = "%s://%s%s" % ( remote_url = "%s://%s%s" % (
scheme.decode(client_encoding), scheme.decode(client_encoding), webserver.decode(client_encoding), url.decode(client_encoding))
webserver.decode(client_encoding), requests.get(remote_url, stream=True, verify=False, hooks={'response': bypass_callback})
url.decode(client_encoding),
)
requests.get(
remote_url,
stream=True,
verify=False,
hooks={"response": bypass_callback},
)
else: else:
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}') conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
sock_close(sock, is_ssl) sock_close(sock, is_ssl)
logger.info( logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
"[*] Received %s chunks. (%s bytes per chunk)"
% (str(i), str(buffer_size))
)
# stateful mode # stateful mode
elif server_connection_type == "stateful": elif server_connection_type == "stateful":
proxy_data = { proxy_data = {
"headers": { 'headers': {
"User-Agent": "php-httpproxy/0.1.5 (Client; Python " "User-Agent": "php-httpproxy/0.1.5 (Client; Python " + python_version() + "; abuse@catswords.net)",
+ python_version()
+ "; abuse@catswords.net)",
}, },
"data": { 'data': {
"buffer_size": str(buffer_size), "buffer_size": str(buffer_size),
"client_address": str(addr[0]), "client_address": str(addr[0]),
"client_port": str(listening_port), "client_port": str(listening_port),
@ -444,56 +355,41 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
"remote_address": webserver.decode(client_encoding), "remote_address": webserver.decode(client_encoding),
"remote_port": str(port), "remote_port": str(port),
"scheme": scheme.decode(client_encoding), "scheme": scheme.decode(client_encoding),
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"), "datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
}, }
} }
# get client address # get client address
logger.info("[*] resolving the client address...") logger.info("[*] resolving the client address...")
while len(resolved_address_list) == 0: while len(resolved_address_list) == 0:
try: try:
_, query_data = jsonrpc2_encode("get_client_address") _, query_data = jsonrpc2_encode('get_client_address')
query = requests.post( query = requests.post(server_url, headers=proxy_data['headers'], data=query_data, timeout=1,
server_url, auth=auth)
headers=proxy_data["headers"],
data=query_data,
timeout=1,
auth=auth,
)
if query.status_code == 200: if query.status_code == 200:
result = query.json()["result"] result = query.json()['result']
resolved_address_list.append(result["data"]) resolved_address_list.append(result['data'])
logger.info("[*] resolved IP: %s" % (result["data"])) logger.info("[*] resolved IP: %s" % (result['data']))
except requests.exceptions.ReadTimeout as e: except requests.exceptions.ReadTimeout as e:
pass pass
proxy_data["data"]["client_address"] = resolved_address_list[0] proxy_data['data']['client_address'] = resolved_address_list[0]
# build a tunnel # build a tunnel
def relay_connect(id, raw_data, proxy_data): def relay_connect(id, raw_data, proxy_data):
try: try:
# The tunnel connect forever until the client destroy it # The tunnel connect forever until the client destroy it
relay = requests.post( relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True,
server_url, timeout=None, auth=auth)
headers=proxy_data["headers"],
data=raw_data,
stream=True,
timeout=None,
auth=auth,
)
for chunk in relay.iter_content(chunk_size=buffer_size): for chunk in relay.iter_content(chunk_size=buffer_size):
jsondata = json.loads( jsondata = json.loads(chunk.decode(client_encoding, errors='ignore'))
chunk.decode(client_encoding, errors="ignore") if jsondata['jsonrpc'] == "2.0" and ("error" in jsondata):
) e = jsondata['error']
if jsondata["jsonrpc"] == "2.0" and ("error" in jsondata): logger.error("[*] Error received from the relay server: (%s) %s" % (
e = jsondata["error"] str(e['code']), str(e['message'])))
logger.error(
"[*] Error received from the relay server: (%s) %s"
% (str(e["code"]), str(e["message"]))
)
except requests.exceptions.ReadTimeout as e: except requests.exceptions.ReadTimeout as e:
pass pass
id, raw_data = jsonrpc2_encode("relay_connect", proxy_data["data"]) id, raw_data = jsonrpc2_encode('relay_connect', proxy_data['data'])
start_new_thread(relay_connect, (id, raw_data, proxy_data)) start_new_thread(relay_connect, (id, raw_data, proxy_data))
# wait for the relay # wait for the relay
@ -515,7 +411,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
# get response # get response
i = 0 i = 0
buffered = b"" buffered = b''
while True: while True:
chunk = sock.recv(buffer_size) chunk = sock.recv(buffer_size)
if not chunk: if not chunk:
@ -523,29 +419,24 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
buffered += chunk buffered += chunk
if proxy_check_filtered(buffered, webserver, port, scheme, method, url): if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
sock_close(sock, is_ssl) sock_close(sock, is_ssl)
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1") add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
raise Exception("Filtered response") raise Exception("Filtered response")
conn.send(chunk) conn.send(chunk)
if len(buffered) > buffer_size * 2: if len(buffered) > buffer_size * 2:
buffered = buffered[-buffer_size * 2 :] buffered = buffered[-buffer_size * 2:]
i += 1 i += 1
sock_close(sock, is_ssl) sock_close(sock, is_ssl)
logger.info( logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
"[*] Received %s chunks. (%s bytes per chunk)"
% (str(i), str(buffer_size))
)
# stateless mode # stateless mode
elif server_connection_type == "stateless": elif server_connection_type == "stateless":
proxy_data = { proxy_data = {
"headers": { 'headers': {
"User-Agent": "php-httpproxy/0.1.5 (Client; Python " "User-Agent": "php-httpproxy/0.1.5 (Client; Python " + python_version() + "; abuse@catswords.net)",
+ python_version()
+ "; abuse@catswords.net)",
}, },
"data": { 'data': {
"buffer_size": str(buffer_size), "buffer_size": str(buffer_size),
"request_data": base64.b64encode(data).decode(client_encoding), "request_data": base64.b64encode(data).decode(client_encoding),
"request_length": str(len(data)), "request_length": str(len(data)),
@ -555,36 +446,27 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
"remote_address": webserver.decode(client_encoding), "remote_address": webserver.decode(client_encoding),
"remote_port": str(port), "remote_port": str(port),
"scheme": scheme.decode(client_encoding), "scheme": scheme.decode(client_encoding),
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"), "datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
}, }
} }
_, raw_data = jsonrpc2_encode("relay_request", proxy_data["data"]) _, raw_data = jsonrpc2_encode('relay_request', proxy_data['data'])
logger.info("[*] Sending %s bytes..." % (str(len(raw_data)))) logger.info("[*] Sending %s bytes..." % (str(len(raw_data))))
i = 0 i = 0
relay = requests.post( relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, auth=auth)
server_url, buffered = b''
headers=proxy_data["headers"],
data=raw_data,
stream=True,
auth=auth,
)
buffered = b""
for chunk in relay.iter_content(chunk_size=buffer_size): for chunk in relay.iter_content(chunk_size=buffer_size):
buffered += chunk buffered += chunk
if proxy_check_filtered(buffered, webserver, port, scheme, method, url): if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1") add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
raise Exception("Filtered response") raise Exception("Filtered response")
conn.send(chunk) conn.send(chunk)
if len(buffered) > buffer_size * 2: if len(buffered) > buffer_size * 2:
buffered = buffered[-buffer_size * 2 :] buffered = buffered[-buffer_size * 2:]
i += 1 i += 1
logger.info( logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
"[*] Received %s chunks. (%s bytes per chunk)"
% (str(i), str(buffer_size))
)
# nothing at all # nothing at all
else: else:
@ -599,27 +481,27 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
except Exception as e: except Exception as e:
print(traceback.format_exc()) print(traceback.format_exc())
logger.error("[*] Exception on requesting the data.", exc_info=e) logger.error("[*] Exception on requesting the data.", exc_info=e)
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}') conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
conn.close() conn.close()
# journaling a filtered hosts # journaling a filtered hosts
def add_filtered_host(domain, ip_address): def add_filtered_host(domain, ip_address):
hosts_path = "./filtered.hosts" hosts_path = './filtered.hosts'
with open(hosts_path, "r") as file: with open(hosts_path, 'r') as file:
lines = file.readlines() lines = file.readlines()
domain_exists = any(domain in line for line in lines) domain_exists = any(domain in line for line in lines)
if not domain_exists: if not domain_exists:
lines.append(f"{ip_address}\t{domain}\n") lines.append(f"{ip_address}\t{domain}\n")
with open(hosts_path, "w") as file: with open(hosts_path, 'w') as file:
file.writelines(lines) file.writelines(lines)
def start(): # Main Program def start(): #Main Program
try: try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(("", listening_port)) sock.bind(('', listening_port))
sock.listen(max_connection) sock.listen(max_connection)
logger.info("[*] Server started successfully [ %d ]" % listening_port) logger.info("[*] Server started successfully [ %d ]" % listening_port)
except Exception as e: except Exception as e:
@ -628,9 +510,9 @@ def start(): # Main Program
while True: while True:
try: try:
conn, addr = sock.accept() # Accept connection from client browser conn, addr = sock.accept() #Accept connection from client browser
data = conn.recv(buffer_size) # Recieve client data data = conn.recv(buffer_size) #Recieve client data
start_new_thread(conn_string, (conn, data, addr)) # Starting a thread start_new_thread(conn_string, (conn, data, addr)) #Starting a thread
except KeyboardInterrupt: except KeyboardInterrupt:
sock.close() sock.close()
logger.info("[*] Graceful Shutdown") logger.info("[*] Graceful Shutdown")
@ -641,7 +523,7 @@ if __name__ == "__main__":
# Fix Value error # Fix Value error
if use_extensions: if use_extensions:
# load extensions # load extensions
for s in use_extensions.split(","): for s in use_extensions.split(','):
Extension.register(s) Extension.register(s)
else: else:
logger.warning("[*] No extensions registered") logger.warning("[*] No extensions registered")

56
smtp.py
View File

@ -20,22 +20,14 @@ import requests
from decouple import config from decouple import config
from requests.auth import HTTPBasicAuth from requests.auth import HTTPBasicAuth
from base import ( from base import extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode, Logger
extract_credentials,
jsonrpc2_create_id,
jsonrpc2_encode,
jsonrpc2_result_encode,
Logger,
)
logger = Logger(name="smtp") logger = Logger(name="smtp")
try: try:
smtp_host = config("SMTP_HOST", default="127.0.0.1") smtp_host = config('SMTP_HOST', default='127.0.0.1')
smtp_port = config("SMTP_PORT", default=25, cast=int) smtp_port = config('SMTP_PORT', default=25, cast=int)
_username, _password, server_url = extract_credentials( _username, _password, server_url = extract_credentials(config('SERVER_URL', default=''))
config("SERVER_URL", default="")
)
except KeyboardInterrupt: except KeyboardInterrupt:
logger.warning("[*] User has requested an interrupt") logger.warning("[*] User has requested an interrupt")
logger.warning("[*] Application Exiting.....") logger.warning("[*] Application Exiting.....")
@ -45,7 +37,6 @@ auth = None
if _username: if _username:
auth = HTTPBasicAuth(_username, _password) auth = HTTPBasicAuth(_username, _password)
class CaterpillarSMTPServer(SMTPServer): class CaterpillarSMTPServer(SMTPServer):
def __init__(self, localaddr, remoteaddr): def __init__(self, localaddr, remoteaddr):
self.__class__.smtpd_hostname = "CaterpillarSMTPServer" self.__class__.smtpd_hostname = "CaterpillarSMTPServer"
@ -53,54 +44,47 @@ class CaterpillarSMTPServer(SMTPServer):
super().__init__(localaddr, remoteaddr) super().__init__(localaddr, remoteaddr)
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs): def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
message_lines = data.decode("utf-8").split("\n") message_lines = data.decode('utf-8').split('\n')
subject = "" subject = ''
to = "" to = ''
for line in message_lines: for line in message_lines:
pos = line.find(":") pos = line.find(':')
if pos > -1: if pos > -1:
k = line[0:pos] k = line[0:pos]
v = line[pos + 1 :] v = line[pos+1:]
if k == "Subject": if k == 'Subject':
subject = v subject = v
elif k == "To": elif k == 'To':
to = v to = v
# build a data # build a data
proxy_data = { proxy_data = {
"headers": { 'headers': {
"User-Agent": "php-httpproxy/0.1.6 (Client; Python " "User-Agent": "php-httpproxy/0.1.6 (Client; Python " + python_version() + "; Caterpillar; abuse@catswords.net)",
+ python_version()
+ "; Caterpillar; abuse@catswords.net)",
}, },
"data": { 'data': {
"to": to, "to": to,
"from": mailfrom, "from": mailfrom,
"subject": subject, "subject": subject,
"message": data.decode("utf-8"), "message": data.decode('utf-8')
}, }
} }
_, raw_data = jsonrpc2_encode("relay_sendmail", proxy_data["data"]) _, raw_data = jsonrpc2_encode('relay_sendmail', proxy_data['data'])
# send HTTP POST request # send HTTP POST request
try: try:
response = requests.post( response = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, auth=auth)
server_url, headers=proxy_data["headers"], data=raw_data, auth=auth
)
if response.status_code == 200: if response.status_code == 200:
type, id, method, rpcdata = jsonrpc2_decode(response.text) type, id, method, rpcdata = jsonrpc2_decode(response.text)
if rpcdata["success"]: if rpcdata['success']:
logger.info("[*] Email sent successfully.") logger.info("[*] Email sent successfully.")
else: else:
raise Exception( raise Exception("(%s) %s" % (str(rpcdata['code']), rpcdata['message']))
"(%s) %s" % (str(rpcdata["code"]), rpcdata["message"])
)
else: else:
raise Exception("Status %s" % (str(response.status_code))) raise Exception("Status %s" % (str(response.status_code)))
except Exception as e: except Exception as e:
logger.error("[*] Failed to send email", exc_info=e) logger.error("[*] Failed to send email", exc_info=e)
# Start SMTP server # Start SMTP server
smtp_server = CaterpillarSMTPServer((smtp_host, smtp_port), None) smtp_server = CaterpillarSMTPServer((smtp_host, smtp_port), None)

48
web.py
View File

@ -19,51 +19,53 @@ from base import Extension, jsonrpc2_error_encode, Logger
# TODO: 나중에 Flask 커스텀 핸들러 구현 해야 함 # TODO: 나중에 Flask 커스텀 핸들러 구현 해야 함
logger = Logger(name="web") logger = Logger(name="web")
app = Flask(__name__) app = Flask(__name__)
app.config["UPLOAD_FOLDER"] = "data/" app.config['UPLOAD_FOLDER'] = 'data/'
if not os.path.exists(app.config["UPLOAD_FOLDER"]): if not os.path.exists(app.config['UPLOAD_FOLDER']):
os.makedirs(app.config["UPLOAD_FOLDER"]) os.makedirs(app.config['UPLOAD_FOLDER'])
@app.route("/") @app.route('/')
def upload_form(): def upload_form():
return render_template("upload.html") return render_template('upload.html')
@app.route("/upload", methods=["POST"]) @app.route('/upload', methods=['POST'])
def process_upload(): def process_upload():
# make connection profile from Flask request # make connection profile from Flask request
conn = Connection(request) conn = Connection(request)
# pass to the method # pass to the method
method = request.form["method"] method = request.form['method']
filename = request.files["file"].filename filename = request.files['file'].filename
params = {"filename": filename} params = {
'filename': filename
}
# just do it # just do it
return Extension.dispatch_rpcmethod(method, "call", "", params, conn) return Extension.dispatch_rpcmethod(method, 'call', '', params, conn)
@app.route("/jsonrpc2", methods=["POST"]) @app.route('/jsonrpc2', methods=['POST'])
def process_jsonrpc2(): def process_jsonrpc2():
# make connection profile from Flask request # make connection profile from Flask request
conn = Connection(request) conn = Connection(request)
# JSON-RPC 2.0 request # JSON-RPC 2.0 request
jsondata = request.get_json(silent=True) jsondata = request.get_json(silent=True)
if jsondata["jsonrpc"] == "2.0": if jsondata['jsonrpc'] == "2.0":
return Extension.dispatch_rpcmethod( return Extension.dispatch_rpcmethod(jsondata['method'], 'call', jsondata['id'], jsondata['params'], conn)
jsondata["method"], "call", jsondata["id"], jsondata["params"], conn
)
# when error # when error
return jsonrpc2_error_encode({"message": "Not vaild JSON-RPC 2.0 request"}) return jsonrpc2_error_encode({
'message': "Not vaild JSON-RPC 2.0 request"
})
def jsonrpc2_server(conn, id, method, params): def jsonrpc2_server(conn, id, method, params):
return Extension.dispatch_rpcmethod(method, "call", id, params, conn) return Extension.dispatch_rpcmethod(method, "call", id, params, conn)
class Connection: class Connection():
def send(self, data): def send(self, data):
self.messages.append(data) self.messages.append(data)
@ -81,9 +83,9 @@ class Connection:
if __name__ == "__main__": if __name__ == "__main__":
# initialization # initialization
try: try:
listening_port = config("PORT", default=5555, cast=int) listening_port = config('PORT', default=5555, cast=int)
client_encoding = config("CLIENT_ENCODING", default="utf-8") client_encoding = config('CLIENT_ENCODING', default='utf-8')
use_extensions = config("USE_EXTENSIONS", default="") use_extensions = config('USE_EXTENSIONS', default='')
except KeyboardInterrupt: except KeyboardInterrupt:
logger.warning("[*] User has requested an interrupt") logger.warning("[*] User has requested an interrupt")
logger.warning("[*] Application Exiting.....") logger.warning("[*] Application Exiting.....")
@ -92,14 +94,14 @@ if __name__ == "__main__":
logger.error("[*] Failed to initialize", exc_info=e) logger.error("[*] Failed to initialize", exc_info=e)
# set environment of Extension # set environment of Extension
Extension.set_protocol("http") Extension.set_protocol('http')
# Fix Value error # Fix Value error
if use_extensions: if use_extensions:
# load extensions # load extensions
for s in use_extensions.split(","): for s in use_extensions.split(','):
Extension.register(s) Extension.register(s)
else: else:
logger.warning("[*] No extensions registered") logger.warning("[*] No extensions registered")
app.run(debug=True, host="0.0.0.0", port=listening_port) app.run(debug=True, host='0.0.0.0', port=listening_port)