diff --git a/.gitignore b/.gitignore index a4124b6..fcf261f 100644 --- a/.gitignore +++ b/.gitignore @@ -4,7 +4,6 @@ logs/ settings.ini .env *.crt -*.key ### Python ### # Byte-compiled / optimized / DLL files diff --git a/base.py b/base.py index 467a90a..a205e6b 100644 --- a/base.py +++ b/base.py @@ -8,7 +8,7 @@ # Euiseo Cha (Wonkwang University) # https://github.com/gnh1201/caterpillar # Created at: 2024-05-20 -# Updated at: 2024-07-11 +# Updated at: 2024-07-09 # import logging @@ -17,25 +17,21 @@ import json import os import re import importlib -import subprocess -import platform from datetime import datetime, timezone from typing import Union, List -client_encoding = "utf-8" +client_encoding = 'utf-8' def extract_credentials(url): - pattern = re.compile( - r"(?P\w+://)?(?P[^:/]+):(?P[^@]+)@(?P.+)" - ) + pattern = re.compile(r'(?P\w+://)?(?P[^:/]+):(?P[^@]+)@(?P.+)') match = pattern.match(url) if match: - scheme = match.group("scheme") if match.group("scheme") else "https://" - username = match.group("username") - password = match.group("password") - url = match.group("url") + scheme = match.group('scheme') if match.group('scheme') else 'https://' + username = match.group('username') + password = match.group('password') + url = match.group('url') return username, password, scheme + url else: return None, None, url @@ -46,76 +42,35 @@ def jsonrpc2_create_id(data): def jsonrpc2_encode(method, params=None): - data = {"jsonrpc": "2.0", "method": method, "params": params} + data = { + "jsonrpc": "2.0", + "method": method, + "params": params + } id = jsonrpc2_create_id(data) - data["id"] = id + data['id'] = id return (id, json.dumps(data)) -def jsonrpc2_result_encode(result, id=""): - data = {"jsonrpc": "2.0", "result": result, "id": id} +def jsonrpc2_result_encode(result, id=''): + data = { + "jsonrpc": "2.0", + "result": result, + "id": id + } return json.dumps(data) -def jsonrpc2_error_encode(error, id=""): - data = {"jsonrpc": "2.0", "error": error, "id": id} +def jsonrpc2_error_encode(error, id=''): + data = { + "jsonrpc": "2.0", + "error": error, + "id": id + } return json.dumps(data) -def find_openssl_binpath(): - system = platform.system() - - if system == "Windows": - possible_paths = [ - os.path.join( - os.getenv("ProgramFiles", "C:\\Program Files"), - "OpenSSL-Win64", - "bin", - "openssl.exe", - ), - os.path.join( - os.getenv("ProgramFiles", "C:\\Program Files"), - "OpenSSL-Win32", - "bin", - "openssl.exe", - ), - os.path.join( - os.getenv("ProgramFiles(x86)", "C:\\Program Files (x86)"), - "OpenSSL-Win32", - "bin", - "openssl.exe", - ), - os.path.join( - os.getenv("ProgramW6432", "C:\\Program Files"), - "OpenSSL-Win64", - "bin", - "openssl.exe", - ), - os.path.join( - os.getenv("ProgramW6432", "C:\\Program Files"), - "OpenSSL-Win32", - "bin", - "openssl.exe", - ), - ] - for path in possible_paths: - if os.path.exists(path): - return path - else: - try: - result = subprocess.run( - ["which", "openssl"], stdout=subprocess.PIPE, stderr=subprocess.PIPE - ) - path = result.stdout.decode().strip() - if path: - return path - except Exception as e: - pass - - return "openssl" - - -class Extension: +class Extension(): extensions = [] protocols = [] buffer_size = 8192 @@ -130,8 +85,8 @@ class Extension: @classmethod def register(cls, s): - module_name, class_name = s.strip().split(".")[0:2] - module_path = "plugins." + module_name + module_name, class_name = s.strip().split('.')[0:2] + module_path = 'plugins.' + module_name try: module = importlib.import_module(module_path) @@ -151,9 +106,7 @@ class Extension: @classmethod def get_rpcmethod(cls, method): for extension in cls.extensions: - is_exported_method = (method == extension.method) or ( - method in extension.exported_methods - ) + is_exported_method = (method == extension.method) or (method in extension.exported_methods) if extension.type == "rpcmethod" and is_exported_method: return extension return None @@ -172,25 +125,24 @@ class Extension: @classmethod def get_connector(cls, connection_type): for extension in cls.extensions: - if ( - extension.type == "connector" - and extension.connection_type == connection_type - ): + if extension.type == "connector" and extension.connection_type == connection_type: return extension return None @classmethod def send_accept(cls, conn, method, success=True): - if "tcp" in cls.protocols: - _, message = jsonrpc2_encode(f"{method}_accept", {"success": success}) + if 'tcp' in cls.protocols: + _, message = jsonrpc2_encode(f"{method}_accept", { + "success": success + }) conn.send(message.encode(client_encoding)) print(f"Accepted request with {cls.protocols[0]} protocol") @classmethod def readall(cls, conn): - if "tcp" in cls.protocols: - data = b"" + if 'tcp' in cls.protocols: + data = b'' while True: try: chunk = conn.recv(cls.buffer_size) @@ -202,13 +154,13 @@ class Extension: return data - elif "http" in cls.protocols: + elif 'http' in cls.protocols: # empty binary when an file not exists - if "file" not in conn.request.files: - return b"" + if 'file' not in conn.request.files: + return b'' # read an uploaded file with binary mode - file = conn.request.files["file"] + file = conn.request.files['file'] return file.read() def __init__(self): @@ -230,33 +182,25 @@ class Extension: class Logger(logging.Logger): def __init__(self, name: str, level: int = logging.NOTSET): super().__init__(name, level) - self.formatter = logging.Formatter( - "[%(asctime)s] %(levelname)s %(module)s: %(message)s" - ) + self.formatter = logging.Formatter('[%(asctime)s] %(levelname)s %(module)s: %(message)s') if not os.path.isdir("logs"): os.mkdir("logs") stream_handler = logging.StreamHandler() - file_handler = logging.FileHandler( - "logs/" + name + "-" + self._generate_timestamp() + ".log" - ) + file_handler = logging.FileHandler('logs/' + name + "-" + self._generate_timestamp() + '.log') self._set_formatters([stream_handler, file_handler]) self._add_handlers([stream_handler, file_handler]) @staticmethod def _generate_timestamp(): - date = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d") + date = datetime.now(tz=timezone.utc).strftime('%Y-%m-%d') return date - def _set_formatters( - self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]] - ): + def _set_formatters(self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]): for handler in handlers: handler.setFormatter(self.formatter) - def _add_handlers( - self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]] - ): + def _add_handlers(self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]): for handler in handlers: self.addHandler(handler) diff --git a/download_certs.bat b/download_certs.bat index e6de5c0..5d429c2 100644 --- a/download_certs.bat +++ b/download_certs.bat @@ -2,7 +2,6 @@ bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt %CD%\ca.crt bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key %CD%\ca.key bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key %CD%\cert.key -bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key %CD%\default.crt REM echo if you want generate a certificate... REM openssl genrsa -out ca.key 2048 diff --git a/download_certs.sh b/download_certs.sh index 7e1c303..7a5e68a 100644 --- a/download_certs.sh +++ b/download_certs.sh @@ -2,7 +2,6 @@ wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key -wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/default.crt # echo "if you want generate a certificate..." #openssl genrsa -out ca.key 2048 diff --git a/plugins/bio.py b/plugins/bio.py index 3e1b4fd..57e821e 100644 --- a/plugins/bio.py +++ b/plugins/bio.py @@ -15,7 +15,6 @@ from Bio.SeqUtils import gc_fraction from base import Extension - def _analyze_sequence(sequence) -> dict[str, str]: """ Analyze a given DNA sequence to provide various nucleotide transformations and translations. @@ -62,7 +61,7 @@ class PyBio(Extension): self.exported_methods = ["analyze_sequence", "gc_content_calculation"] def dispatch(self, type, id, params, conn): - conn.send(b"Greeting! dispatch") + conn.send(b'Greeting! dispatch') def analyze_sequence(self, type, id, params, conn): """ @@ -89,7 +88,7 @@ class PyBio(Extension): "transcription": "AUGCGUACGUAGCUAGCUAGCGUAGCUAGCUGACU", "translation": "MRT*LASVAS*"} """ - result = _analyze_sequence(params["sequence"]) + result = _analyze_sequence(params['sequence']) return result def gc_content_calculation(self, type, id, params, conn): @@ -104,5 +103,5 @@ class PyBio(Extension): :return: Dictionary containing the GC content as a float. Example: {"gc_content": 0.5142857142857142} """ - result = _gc_content_calculation(params["sequence"]) + result = _gc_content_calculation(params['sequence']) return result diff --git a/plugins/container.py b/plugins/container.py index fcb1e17..c916157 100644 --- a/plugins/container.py +++ b/plugins/container.py @@ -28,15 +28,15 @@ class Container(Extension): def dispatch(self, type, id, params, conn): logger.info("[*] Greeting! dispatch") - conn.send(b"Greeting! dispatch") + conn.send(b'Greeting! dispatch') def container_run(self, type, id, params, conn): - devices = params["devices"] - image = params["image"] - devices = params["devices"] - name = params["name"] - environment = params["environment"] - volumes = params["volumes"] + devices = params['devices'] + image = params['image'] + devices = params['devices'] + name = params['name'] + environment = params['environment'] + volumes = params['volumes'] container = self.client.containers.run( image, @@ -44,16 +44,17 @@ class Container(Extension): name=name, volumes=volumes, environment=environment, - detach=True, + detach=True ) container.logs() logger.info("[*] Running...") def container_stop(self, type, id, params, conn): - name = params["name"] + name = params['name'] container = self.client.containers.get(name) container.stop() logger.info("[*] Stopped") + diff --git a/plugins/fediverse.py b/plugins/fediverse.py index 7294177..6ab6017 100644 --- a/plugins/fediverse.py +++ b/plugins/fediverse.py @@ -25,55 +25,48 @@ from base import Extension, Logger logger = Logger(name="fediverse") try: - client_encoding = config("CLIENT_ENCODING", default="utf-8") - truecaptcha_userid = config("TRUECAPTCHA_USERID") # truecaptcha.org - truecaptcha_apikey = config("TRUECAPTCHA_APIKEY") # truecaptcha.org - dictionary_file = config( - "DICTIONARY_FILE", default="words_alpha.txt" - ) # https://github.com/dwyl/english-words - librey_apiurl = config( - "LIBREY_APIURL", default="https://search.catswords.net" - ) # https://github.com/Ahwxorg/librey + client_encoding = config('CLIENT_ENCODING', default='utf-8') + truecaptcha_userid = config('TRUECAPTCHA_USERID') # truecaptcha.org + truecaptcha_apikey = config('TRUECAPTCHA_APIKEY') # truecaptcha.org + dictionary_file = config('DICTIONARY_FILE', default='words_alpha.txt') # https://github.com/dwyl/english-words + librey_apiurl = config('LIBREY_APIURL', default='https://search.catswords.net') # https://github.com/Ahwxorg/librey except Exception as e: logger.error("[*] Invalid configuration", exc_info=e) - class Fediverse(Extension): def __init__(self): - self.type = "filter" # this is a filter - + self.type = "filter" # this is a filter + # Load data to use KnownWords4 strategy # Download data: https://github.com/dwyl/english-words self.known_words = [] - if dictionary_file != "" and os.path.isfile(dictionary_file): + if dictionary_file != '' and os.path.isfile(dictionary_file): with open(dictionary_file, "r") as file: words = file.readlines() - self.known_words = [ - word.strip() for word in words if len(word.strip()) > 3 - ] + self.known_words = [word.strip() for word in words if len(word.strip()) > 3] logger.info("[*] Data loaded to use KnownWords4 strategy") def test(self, filtered, data, webserver, port, scheme, method, url): # prevent cache confusing - if data.find(b"Welcome to nginx!") > -1: + if data.find(b'Welcome to nginx!') > -1: return True # allowed conditions - if method == b"GET" or url.find(b"/api") > -1: + if method == b'GET' or url.find(b'/api') > -1: return False # convert to text data_length = len(data) - text = data.decode(client_encoding, errors="ignore") + text = data.decode(client_encoding, errors='ignore') error_rate = (data_length - len(text)) / data_length - if error_rate > 0.2: # it is a binary data + if error_rate > 0.2: # it is a binary data return False # check ID with K-Anonymity strategy - pattern = r"\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b" + pattern = r'\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b' matches = list(set(re.findall(pattern, text))) if len(matches) > 0: - logger.info("[*] Found ID: %s" % (", ".join(matches))) + logger.info("[*] Found ID: %s" % (', '.join(matches))) try: filtered = not all(map(self.pwnedpasswords_test, matches)) except Exception as e: @@ -89,45 +82,40 @@ class Fediverse(Extension): def vowel_ratio_test(s): ratio = self.calculate_vowel_ratio(s) return ratio > 0.2 and ratio < 0.8 - if all(map(vowel_ratio_test, matches)): score += 1 - strategies.append("VowelRatio10") + strategies.append('VowelRatio10') # check ID with Palindrome4 strategy if all(map(self.has_palindrome, matches)): score += 1 - strategies.append("Palindrome4") + strategies.append('Palindrome4') # check ID with KnownWords4 strategy if all(map(self.has_known_word, matches)): score += 2 - strategies.append("KnownWords4") + strategies.append('KnownWords4') # check ID with SearchEngine3 strategy - if librey_apiurl != "" and all(map(self.search_engine_test, matches)): + if librey_apiurl != '' and all(map(self.search_engine_test, matches)): score += 1 - strategies.append("SearchEngine3") + strategies.append('SearchEngine3') # check ID with RepeatedNumbers3 strategy if all(map(self.repeated_numbers_test, matches)): score += 1 - strategies.append("RepeatedNumbers3") + strategies.append('RepeatedNumbers3') # logging score - with open("score.log", "a") as file: - file.write( - "%s\t%s\t%s\r\n" - % ("+".join(matches), str(score), "+".join(strategies)) - ) + with open('score.log', 'a') as file: + file.write("%s\t%s\t%s\r\n" % ('+'.join(matches), str(score), '+'.join(strategies))) # make decision if score > 1: filtered = False # check an attached images (check images with Not-CAPTCHA strategy) - if truecaptcha_userid != "" and not filtered and len(matches) > 0: - + if truecaptcha_userid != '' and not filtered and len(matches) > 0: def webp_to_png_base64(url): try: response = requests.get(url) @@ -135,9 +123,7 @@ class Fediverse(Extension): img_png = img.convert("RGBA") buffered = io.BytesIO() img_png.save(buffered, format="PNG") - encoded_image = base64.b64encode(buffered.getvalue()).decode( - client_encoding - ) + encoded_image = base64.b64encode(buffered.getvalue()).decode(client_encoding) return encoded_image except: return None @@ -157,15 +143,11 @@ class Fediverse(Extension): solved = self.truecaptcha_solve(encoded_image) if solved: logger.info("[*] solved: %s" % (solved)) - filtered = filtered or ( - solved.lower() in ["ctkpaarr", "spam"] - ) + filtered = filtered or (solved.lower() in ['ctkpaarr', 'spam']) else: logger.info("[*] not solved") except Exception as e: - logger.error( - "[*] Not CAPTCHA strategy not working!", exc_info=e - ) + logger.error("[*] Not CAPTCHA strategy not working!", exc_info=e) return filtered @@ -184,52 +166,44 @@ class Fediverse(Extension): l5_sha1 = p_sha1[-5:] # Making GET request using Requests library - response = requests.get(f"https://api.pwnedpasswords.com/range/{f5_sha1}") + response = requests.get(f'https://api.pwnedpasswords.com/range/{f5_sha1}') # Checking if request was successful if response.status_code == 200: # Parsing response text - hashes = response.text.split("\r\n") + hashes = response.text.split('\r\n') # Using list comprehension to find matching hashes - matching_hashes = [ - line.split(":")[0] for line in hashes if line.endswith(l5_sha1) - ] + matching_hashes = [line.split(':')[0] for line in hashes if line.endswith(l5_sha1)] # If there are matching hashes, return True, else return False return bool(matching_hashes) else: - raise Exception( - "api.pwnedpasswords.com response status: %s" - % (str(response.status_code)) - ) + raise Exception("api.pwnedpasswords.com response status: %s" % (str(response.status_code))) return False # Strategy: Not-CAPTCHA - use truecaptcha.org def truecaptcha_solve(self, encoded_image): - url = "https://api.apitruecaptcha.org/one/gettext" + url = 'https://api.apitruecaptcha.org/one/gettext' data = { - "userid": truecaptcha_userid, - "apikey": truecaptcha_apikey, - "data": encoded_image, - "mode": "human", + 'userid': truecaptcha_userid, + 'apikey': truecaptcha_apikey, + 'data': encoded_image, + 'mode': 'human' } - response = requests.post(url=url, json=data) + response = requests.post(url = url, json = data) if response.status_code == 200: data = response.json() - if "error_message" in data: - print("[*] Error: %s" % (data["error_message"])) + if 'error_message' in data: + print ("[*] Error: %s" % (data['error_message'])) return None - if "result" in data: - return data["result"] + if 'result' in data: + return data['result'] else: - raise Exception( - "api.apitruecaptcha.org response status: %s" - % (str(response.status_code)) - ) + raise Exception("api.apitruecaptcha.org response status: %s" % (str(response.status_code))) return None @@ -241,10 +215,10 @@ class Fediverse(Extension): return 0.0 # Count the number of vowels ('a', 'e', 'i', 'o', 'u', 'w', 'y') in the string. - vowel_count = sum(1 for char in s if char.lower() in "aeiouwy") + vowel_count = sum(1 for char in s if char.lower() in 'aeiouwy') # Define vowel-ending patterns - vowel_ending_patterns = ["ang", "eng", "ing", "ong", "ung", "ank", "ink", "dge"] + vowel_ending_patterns = ['ang', 'eng', 'ing', 'ong', 'ung', 'ank', 'ink', 'dge'] # Count the occurrences of vowel-ending patterns in the string. vowel_count += sum(s.count(pattern) for pattern in vowel_ending_patterns) @@ -291,8 +265,8 @@ class Fediverse(Extension): data = response.json() - if "results_source" in data: - del data["results_source"] + if 'results_source' in data: + del data['results_source'] num_results = len(data) @@ -300,4 +274,4 @@ class Fediverse(Extension): # Strategy: RepeatedNumbers3 def repeated_numbers_test(self, s): - return bool(re.search(r"\d{3,}", s)) + return bool(re.search(r'\d{3,}', s)) diff --git a/plugins/portscanner.py b/plugins/portscanner.py index b8c260f..5ebd3db 100644 --- a/plugins/portscanner.py +++ b/plugins/portscanner.py @@ -15,22 +15,20 @@ import json from base import Extension - class PortScanner(Extension): def __init__(self): self.type = "rpcmethod" self.method = "scan_ports_by_hosts" self.exported_methods = [] - + def dispatch(self, type, id, params, conn): - hosts = params["hosts"] - binpath = params["binpath"] + hosts = params['hosts'] + binpath = params['binpath'] nm = nmap.PortScanner(nmap_search_path=(binpath,)) - result = nm.scan(hosts=hosts, arguments="-T5 -sV -p0-65535 --max-retries 0") - - return result + result = nm.scan(hosts=hosts, arguments='-T5 -sV -p0-65535 --max-retries 0') + return result; if __name__ == "__main__": main(sys.argv) diff --git a/plugins/wayback.py b/plugins/wayback.py index 36590fb..60e32e6 100644 --- a/plugins/wayback.py +++ b/plugins/wayback.py @@ -18,13 +18,12 @@ from base import Extension, Logger logger = Logger(name="wayback") try: - client_encoding = config("CLIENT_ENCODING") + client_encoding = config('CLIENT_ENCODING') except Exception as e: logger.error("[*] Invalid configuration", exc_info=e) - def get_cached_page_from_google(url): - status_code, text = (0, "") + status_code, text = (0, '') # Google Cache URL google_cache_url = "https://webcache.googleusercontent.com/search?q=cache:" + url @@ -34,16 +33,15 @@ def get_cached_page_from_google(url): # Check if the request was successful (status code 200) if response.status_code == 200: - text = response.text # Extract content from response + text = response.text # Extract content from response else: status_code = response.status_code return status_code, text - # API documentation: https://archive.org/help/wayback_api.php def get_cached_page_from_wayback(url): - status_code, text = (0, "") + status_code, text = (0, '') # Wayback Machine API URL wayback_api_url = "http://archive.org/wayback/available?url=" + url @@ -58,7 +56,7 @@ def get_cached_page_from_wayback(url): data = response.json() archived_snapshots = data.get("archived_snapshots", {}) closest_snapshot = archived_snapshots.get("closest", {}) - + # Check if the URL is available in the archive if closest_snapshot: archived_url = closest_snapshot.get("url", "") @@ -66,7 +64,7 @@ def get_cached_page_from_wayback(url): # If URL is available, fetch the content of the archived page if archived_url: archived_page_response = requests.get(archived_url) - status_code = archived_page_response.status_code + status_code = archived_page_response.status_code; if status_code == 200: text = archived_page_response.text else: @@ -80,10 +78,9 @@ def get_cached_page_from_wayback(url): return status_code, text - class Wayback(Extension): def __init__(self): - self.type = "connector" # this is a connctor + self.type = "connector" # this is a connctor self.connection_type = "wayback" def connect(self, conn, data, webserver, port, scheme, method, url): diff --git a/server.py b/server.py index 73826d2..8f90c65 100644 --- a/server.py +++ b/server.py @@ -7,7 +7,7 @@ # Namyheon Go (Catswords Research) # https://github.com/gnh1201/caterpillar # Created at: 2022-10-06 -# Updated at: 2024-07-11 +# Updated at: 2024-07-09 # import argparse @@ -32,34 +32,24 @@ from requests.auth import HTTPBasicAuth from urllib.parse import urlparse from decouple import config -from base import ( - Extension, - extract_credentials, - jsonrpc2_create_id, - jsonrpc2_encode, - jsonrpc2_result_encode, - find_openssl_binpath, - Logger, -) +from base import Extension, extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode, Logger logger = Logger(name="server") # initialization try: - listening_port = config("PORT", default=5555, cast=int) - _username, _password, server_url = extract_credentials( - config("SERVER_URL", default="") - ) - server_connection_type = config("SERVER_CONNECTION_TYPE", default="") - cakey = config("CA_KEY", default="ca.key") - cacert = config("CA_CERT", default="ca.crt") - certkey = config("CERT_KEY", default="cert.key") - certdir = config("CERT_DIR", default="certs/") - openssl_binpath = config("OPENSSL_BINPATH", default=find_openssl_binpath()) - client_encoding = config("CLIENT_ENCODING", default="utf-8") - local_domain = config("LOCAL_DOMAIN", default="") - proxy_pass = config("PROXY_PASS", default="") - use_extensions = config("USE_EXTENSIONS", default="") + listening_port = config('PORT', default=5555, cast=int) + _username, _password, server_url = extract_credentials(config('SERVER_URL', default='')) + server_connection_type = config('SERVER_CONNECTION_TYPE', default='') + cakey = config('CA_KEY', default='ca.key') + cacert = config('CA_CERT', default='ca.crt') + certkey = config('CERT_KEY', default='cert.key') + certdir = config('CERT_DIR', default='certs/') + openssl_binpath = config('OPENSSL_BINPATH', default='openssl') + client_encoding = config('CLIENT_ENCODING', default='utf-8') + local_domain = config('LOCAL_DOMAIN', default='') + proxy_pass = config('PROXY_PASS', default='') + use_extensions = config('USE_EXTENSIONS', default='') except KeyboardInterrupt: logger.warning("[*] User has requested an interrupt") logger.warning("[*] Application Exiting.....") @@ -68,12 +58,8 @@ except Exception as e: logger.error("[*] Failed to initialize:", exc_info=e) parser = argparse.ArgumentParser() -parser.add_argument( - "--max_conn", help="Maximum allowed connections", default=255, type=int -) -parser.add_argument( - "--buffer_size", help="Number of samples to be used", default=8192, type=int -) +parser.add_argument('--max_conn', help="Maximum allowed connections", default=255, type=int) +parser.add_argument('--buffer_size', help="Number of samples to be used", default=8192, type=int) args = parser.parse_args() max_connection = args.max_conn @@ -83,7 +69,7 @@ resolved_address_list = [] # set environment of Extension Extension.set_buffer_size(buffer_size) -Extension.set_protocol("tcp") +Extension.set_protocol('tcp') # set basic authentication auth = None @@ -92,36 +78,36 @@ if _username: def parse_first_data(data): - parsed_data = (b"", b"", b"", b"", b"") + parsed_data = (b'', b'', b'', b'', b'') try: - first_line = data.split(b"\n")[0] + first_line = data.split(b'\n')[0] method, url = first_line.split()[0:2] - http_pos = url.find(b"://") # Finding the position of :// - scheme = b"http" # check http/https or other protocol + http_pos = url.find(b'://') #Finding the position of :// + scheme = b'http' # check http/https or other protocol if http_pos == -1: temp = url else: - temp = url[(http_pos + 3) :] + temp = url[(http_pos + 3):] scheme = url[0:http_pos] - port_pos = temp.find(b":") + port_pos = temp.find(b':') - webserver_pos = temp.find(b"/") + webserver_pos = temp.find(b'/') if webserver_pos == -1: webserver_pos = len(temp) - webserver = b"" + webserver = b'' port = -1 if port_pos == -1 or webserver_pos < port_pos: port = 80 webserver = temp[:webserver_pos] else: - port = int((temp[(port_pos + 1) :])[: webserver_pos - port_pos - 1]) + port = int((temp[(port_pos + 1):])[:webserver_pos - port_pos - 1]) webserver = temp[:port_pos] if port == 443: - scheme = b"https" + scheme = b'https' parsed_data = (webserver, port, scheme, method, url) except Exception as e: @@ -133,16 +119,14 @@ def parse_first_data(data): def conn_string(conn, data, addr): # JSON-RPC 2.0 request def process_jsonrpc2(data): - jsondata = json.loads(data.decode(client_encoding, errors="ignore")) - if jsondata["jsonrpc"] == "2.0": - jsonrpc2_server( - conn, jsondata["id"], jsondata["method"], jsondata["params"] - ) + jsondata = json.loads(data.decode(client_encoding, errors='ignore')) + if jsondata['jsonrpc'] == "2.0": + jsonrpc2_server(conn, jsondata['id'], jsondata['method'], jsondata['params']) return True return False # JSON-RPC 2.0 request over Socket (stateful) - if data.find(b"{") == 0 and process_jsonrpc2(data): + if data.find(b'{') == 0 and process_jsonrpc2(data): # will be close by the client return @@ -152,18 +136,18 @@ def conn_string(conn, data, addr): # JSON-RPC 2.0 request over HTTP (stateless) path = urlparse(url.decode(client_encoding)).path if path == "/proxy-cgi/jsonrpc2": - conn.send(b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n") - pos = data.find(b"\r\n\r\n") - if pos > -1 and process_jsonrpc2(data[pos + 4 :]): + conn.send(b'HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n') + pos = data.find(b'\r\n\r\n') + if pos > -1 and process_jsonrpc2(data[pos + 4:]): conn.close() # will be close by the server return # if it is reverse proxy - if local_domain != "": + if local_domain != '': localserver = local_domain.encode(client_encoding) - if webserver == localserver or data.find(b"\nHost: " + localserver) > -1: + if webserver == localserver or data.find(b'\nHost: ' + localserver) > -1: logger.info("[*] Detected the reverse proxy request: %s" % local_domain) - scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b":") + scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b':') webserver = _webserver[2:] port = int(_port.decode(client_encoding)) @@ -173,7 +157,7 @@ def conn_string(conn, data, addr): def jsonrpc2_server(conn, id, method, params): if method == "relay_accept": accepted_relay[id] = conn - connection_speed = params["connection_speed"] + connection_speed = params['connection_speed'] logger.info("[*] connection speed: %s milliseconds" % (str(connection_speed))) while conn.fileno() > -1: time.sleep(1) @@ -182,82 +166,39 @@ def jsonrpc2_server(conn, id, method, params): else: Extension.dispatch_rpcmethod(method, "call", id, params, conn) - # return in conn_string() + #return in conn_string() def proxy_connect(webserver, conn): hostname = webserver.decode(client_encoding) - certpath = "%s/%s.crt" % (certdir.rstrip("/"), hostname) + certpath = "%s/%s.crt" % (certdir.rstrip('/'), hostname) if not os.path.exists(certdir): os.makedirs(certdir) # https://stackoverflow.com/questions/24055036/handle-https-request-in-proxy-server-by-c-sharp-connect-tunnel - conn.send(b"HTTP/1.1 200 Connection Established\r\n\r\n") + conn.send(b'HTTP/1.1 200 Connection Established\r\n\r\n') # https://github.com/inaz2/proxy2/blob/master/proxy2.py try: if not os.path.isfile(certpath): epoch = "%d" % (time.time() * 1000) - p1 = Popen( - [ - openssl_binpath, - "req", - "-new", - "-key", - certkey, - "-subj", - "/CN=%s" % hostname, - ], - stdout=PIPE, - ) + p1 = Popen([openssl_binpath, "req", "-new", "-key", certkey, "-subj", "/CN=%s" % hostname], stdout=PIPE) p2 = Popen( - [ - openssl_binpath, - "x509", - "-req", - "-days", - "3650", - "-CA", - cacert, - "-CAkey", - cakey, - "-set_serial", - epoch, - "-out", - certpath, - ], - stdin=p1.stdout, - stderr=PIPE, - ) + [openssl_binpath, "x509", "-req", "-days", "3650", "-CA", cacert, "-CAkey", cakey, "-set_serial", epoch, + "-out", certpath], stdin=p1.stdout, stderr=PIPE) p2.communicate() - except FileNotFoundError as e: - logger.error( - "[*] OpenSSL distribution not found on this system. Skipping certificate issuance.", - exc_info=e, - ) - certpath = "default.crt" except Exception as e: - logger.error("[*] Skipping certificate issuance.", exc_info=e) - certpath = "default.crt" + logger.error("[*] Skipped generating the certificate.", exc_info=e) # https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server # https://docs.python.org/3/library/ssl.html context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER) - context.check_hostname = False - context.verify_mode = ssl.CERT_NONE context.load_cert_chain(certpath, certkey) - try: - # https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server - conn = context.wrap_socket(conn, server_side=True) - data = conn.recv(buffer_size) - except ssl.SSLError as e: - logger.error( - "[*] SSL negotiation failed. Check that the CA certificate is installed.", - exc_info=e, - ) - return (conn, b"") + # https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server + conn = context.wrap_socket(conn, server_side=True) + data = conn.recv(buffer_size) return (conn, data) @@ -278,19 +219,17 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): logger.info("[*] Started the request. %s" % (str(addr[0]))) # SSL negotiation - is_ssl = scheme in [b"https", b"tls", b"ssl"] - if is_ssl and method == b"CONNECT": + is_ssl = scheme in [b'https', b'tls', b'ssl'] + if is_ssl and method == b'CONNECT': while True: try: conn, data = proxy_connect(webserver, conn) break # success - # except OSError as e: + #except OSError as e: # print ("[*] Retrying SSL negotiation... (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e))) except Exception as e: raise Exception( - "SSL negotiation failed. (%s:%s) %s" - % (webserver.decode(client_encoding), str(port), str(e)) - ) + "SSL negotiation failed. (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e))) # override data if is_ssl: @@ -298,9 +237,9 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): # https://stackoverflow.com/questions/44343739/python-sockets-ssl-eof-occurred-in-violation-of-protocol def sock_close(sock, is_ssl=False): - # if is_ssl: + #if is_ssl: # sock = sock.unwrap() - # sock.shutdown(socket.SHUT_RDWR) + #sock.shutdown(socket.SHUT_RDWR) sock.close() # Wait to see if there is more data to transmit @@ -314,7 +253,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): return # send following chunks - buffered = b"" + buffered = b'' conn.settimeout(1) while True: try: @@ -322,14 +261,12 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): if not chunk: break buffered += chunk - if proxy_check_filtered( - buffered, webserver, port, scheme, method, url - ): + if proxy_check_filtered(buffered, webserver, port, scheme, method, url): sock_close(sock, is_ssl) raise Exception("Filtered request") sock.send(chunk) if len(buffered) > buffer_size * 2: - buffered = buffered[-buffer_size * 2 :] + buffered = buffered[-buffer_size * 2:] except: break @@ -342,101 +279,75 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): context.check_hostname = False context.verify_mode = ssl.CERT_NONE - sock = context.wrap_socket( - sock, server_hostname=webserver.decode(client_encoding) - ) + sock = context.wrap_socket(sock, server_hostname=webserver.decode(client_encoding)) sock.connect((webserver, port)) - # sock.sendall(data) + #sock.sendall(data) sendall(sock, conn, data) else: sock.connect((webserver, port)) - # sock.sendall(data) + #sock.sendall(data) sendall(sock, conn, data) i = 0 is_http_403 = False - buffered = b"" + buffered = b'' while True: chunk = sock.recv(buffer_size) if not chunk: break - if i == 0 and chunk.find(b"HTTP/1.1 403") == 0: + if i == 0 and chunk.find(b'HTTP/1.1 403') == 0: is_http_403 = True break buffered += chunk if proxy_check_filtered(buffered, webserver, port, scheme, method, url): sock_close(sock, is_ssl) - add_filtered_host(webserver.decode(client_encoding), "127.0.0.1") + add_filtered_host(webserver.decode(client_encoding), '127.0.0.1') raise Exception("Filtered response") conn.send(chunk) if len(buffered) > buffer_size * 2: - buffered = buffered[-buffer_size * 2 :] + buffered = buffered[-buffer_size * 2:] i += 1 # when blocked if is_http_403: - logger.warning( - "[*] Blocked the request by remote server: %s" - % (webserver.decode(client_encoding)) - ) + logger.warning("[*] Blocked the request by remote server: %s" % (webserver.decode(client_encoding))) def bypass_callback(response, *args, **kwargs): if response.status_code != 200: - conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}') + conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}") return # https://stackoverflow.com/questions/20658572/python-requests-print-entire-http-request-raw - format_headers = lambda d: "\r\n".join( - f"{k}: {v}" for k, v in d.items() - ) + format_headers = lambda d: '\r\n'.join(f'{k}: {v}' for k, v in d.items()) - first_data = ( - textwrap.dedent( - "HTTP/1.1 {res.status_code} {res.reason}\r\n{reshdrs}\r\n\r\n" - ) - .format( - res=response, - reshdrs=format_headers(response.headers), - ) - .encode(client_encoding) - ) + first_data = textwrap.dedent('HTTP/1.1 {res.status_code} {res.reason}\r\n{reshdrs}\r\n\r\n').format( + res=response, + reshdrs=format_headers(response.headers), + ).encode(client_encoding) conn.send(first_data) for chunk in response.iter_content(chunk_size=buffer_size): conn.send(chunk) - if is_ssl and method == b"GET": + if is_ssl and method == b'GET': logger.info("[*] Trying to bypass blocked request...") remote_url = "%s://%s%s" % ( - scheme.decode(client_encoding), - webserver.decode(client_encoding), - url.decode(client_encoding), - ) - requests.get( - remote_url, - stream=True, - verify=False, - hooks={"response": bypass_callback}, - ) + scheme.decode(client_encoding), webserver.decode(client_encoding), url.decode(client_encoding)) + requests.get(remote_url, stream=True, verify=False, hooks={'response': bypass_callback}) else: - conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}') + conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}") sock_close(sock, is_ssl) - logger.info( - "[*] Received %s chunks. (%s bytes per chunk)" - % (str(i), str(buffer_size)) - ) + logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size))) # stateful mode elif server_connection_type == "stateful": proxy_data = { - "headers": { - "User-Agent": "php-httpproxy/0.1.5 (Client; Python " - + python_version() - + "; abuse@catswords.net)", + 'headers': { + "User-Agent": "php-httpproxy/0.1.5 (Client; Python " + python_version() + "; abuse@catswords.net)", }, - "data": { + 'data': { "buffer_size": str(buffer_size), "client_address": str(addr[0]), "client_port": str(listening_port), @@ -444,56 +355,41 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): "remote_address": webserver.decode(client_encoding), "remote_port": str(port), "scheme": scheme.decode(client_encoding), - "datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"), - }, + "datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") + } } # get client address logger.info("[*] resolving the client address...") while len(resolved_address_list) == 0: try: - _, query_data = jsonrpc2_encode("get_client_address") - query = requests.post( - server_url, - headers=proxy_data["headers"], - data=query_data, - timeout=1, - auth=auth, - ) + _, query_data = jsonrpc2_encode('get_client_address') + query = requests.post(server_url, headers=proxy_data['headers'], data=query_data, timeout=1, + auth=auth) if query.status_code == 200: - result = query.json()["result"] - resolved_address_list.append(result["data"]) - logger.info("[*] resolved IP: %s" % (result["data"])) + result = query.json()['result'] + resolved_address_list.append(result['data']) + logger.info("[*] resolved IP: %s" % (result['data'])) except requests.exceptions.ReadTimeout as e: pass - proxy_data["data"]["client_address"] = resolved_address_list[0] + proxy_data['data']['client_address'] = resolved_address_list[0] # build a tunnel def relay_connect(id, raw_data, proxy_data): try: # The tunnel connect forever until the client destroy it - relay = requests.post( - server_url, - headers=proxy_data["headers"], - data=raw_data, - stream=True, - timeout=None, - auth=auth, - ) + relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, + timeout=None, auth=auth) for chunk in relay.iter_content(chunk_size=buffer_size): - jsondata = json.loads( - chunk.decode(client_encoding, errors="ignore") - ) - if jsondata["jsonrpc"] == "2.0" and ("error" in jsondata): - e = jsondata["error"] - logger.error( - "[*] Error received from the relay server: (%s) %s" - % (str(e["code"]), str(e["message"])) - ) + jsondata = json.loads(chunk.decode(client_encoding, errors='ignore')) + if jsondata['jsonrpc'] == "2.0" and ("error" in jsondata): + e = jsondata['error'] + logger.error("[*] Error received from the relay server: (%s) %s" % ( + str(e['code']), str(e['message']))) except requests.exceptions.ReadTimeout as e: pass - id, raw_data = jsonrpc2_encode("relay_connect", proxy_data["data"]) + id, raw_data = jsonrpc2_encode('relay_connect', proxy_data['data']) start_new_thread(relay_connect, (id, raw_data, proxy_data)) # wait for the relay @@ -515,7 +411,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): # get response i = 0 - buffered = b"" + buffered = b'' while True: chunk = sock.recv(buffer_size) if not chunk: @@ -523,29 +419,24 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): buffered += chunk if proxy_check_filtered(buffered, webserver, port, scheme, method, url): sock_close(sock, is_ssl) - add_filtered_host(webserver.decode(client_encoding), "127.0.0.1") + add_filtered_host(webserver.decode(client_encoding), '127.0.0.1') raise Exception("Filtered response") conn.send(chunk) if len(buffered) > buffer_size * 2: - buffered = buffered[-buffer_size * 2 :] + buffered = buffered[-buffer_size * 2:] i += 1 sock_close(sock, is_ssl) - logger.info( - "[*] Received %s chunks. (%s bytes per chunk)" - % (str(i), str(buffer_size)) - ) + logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size))) # stateless mode elif server_connection_type == "stateless": proxy_data = { - "headers": { - "User-Agent": "php-httpproxy/0.1.5 (Client; Python " - + python_version() - + "; abuse@catswords.net)", + 'headers': { + "User-Agent": "php-httpproxy/0.1.5 (Client; Python " + python_version() + "; abuse@catswords.net)", }, - "data": { + 'data': { "buffer_size": str(buffer_size), "request_data": base64.b64encode(data).decode(client_encoding), "request_length": str(len(data)), @@ -555,36 +446,27 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): "remote_address": webserver.decode(client_encoding), "remote_port": str(port), "scheme": scheme.decode(client_encoding), - "datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"), - }, + "datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f") + } } - _, raw_data = jsonrpc2_encode("relay_request", proxy_data["data"]) + _, raw_data = jsonrpc2_encode('relay_request', proxy_data['data']) logger.info("[*] Sending %s bytes..." % (str(len(raw_data)))) i = 0 - relay = requests.post( - server_url, - headers=proxy_data["headers"], - data=raw_data, - stream=True, - auth=auth, - ) - buffered = b"" + relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, auth=auth) + buffered = b'' for chunk in relay.iter_content(chunk_size=buffer_size): buffered += chunk if proxy_check_filtered(buffered, webserver, port, scheme, method, url): - add_filtered_host(webserver.decode(client_encoding), "127.0.0.1") + add_filtered_host(webserver.decode(client_encoding), '127.0.0.1') raise Exception("Filtered response") conn.send(chunk) if len(buffered) > buffer_size * 2: - buffered = buffered[-buffer_size * 2 :] + buffered = buffered[-buffer_size * 2:] i += 1 - logger.info( - "[*] Received %s chunks. (%s bytes per chunk)" - % (str(i), str(buffer_size)) - ) + logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size))) # nothing at all else: @@ -599,27 +481,27 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data): except Exception as e: print(traceback.format_exc()) logger.error("[*] Exception on requesting the data.", exc_info=e) - conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}') + conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}") conn.close() # journaling a filtered hosts def add_filtered_host(domain, ip_address): - hosts_path = "./filtered.hosts" - with open(hosts_path, "r") as file: + hosts_path = './filtered.hosts' + with open(hosts_path, 'r') as file: lines = file.readlines() domain_exists = any(domain in line for line in lines) if not domain_exists: lines.append(f"{ip_address}\t{domain}\n") - with open(hosts_path, "w") as file: + with open(hosts_path, 'w') as file: file.writelines(lines) -def start(): # Main Program +def start(): #Main Program try: sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) - sock.bind(("", listening_port)) + sock.bind(('', listening_port)) sock.listen(max_connection) logger.info("[*] Server started successfully [ %d ]" % listening_port) except Exception as e: @@ -628,9 +510,9 @@ def start(): # Main Program while True: try: - conn, addr = sock.accept() # Accept connection from client browser - data = conn.recv(buffer_size) # Recieve client data - start_new_thread(conn_string, (conn, data, addr)) # Starting a thread + conn, addr = sock.accept() #Accept connection from client browser + data = conn.recv(buffer_size) #Recieve client data + start_new_thread(conn_string, (conn, data, addr)) #Starting a thread except KeyboardInterrupt: sock.close() logger.info("[*] Graceful Shutdown") @@ -641,7 +523,7 @@ if __name__ == "__main__": # Fix Value error if use_extensions: # load extensions - for s in use_extensions.split(","): + for s in use_extensions.split(','): Extension.register(s) else: logger.warning("[*] No extensions registered") diff --git a/smtp.py b/smtp.py index e64e70c..bc1c054 100644 --- a/smtp.py +++ b/smtp.py @@ -20,22 +20,14 @@ import requests from decouple import config from requests.auth import HTTPBasicAuth -from base import ( - extract_credentials, - jsonrpc2_create_id, - jsonrpc2_encode, - jsonrpc2_result_encode, - Logger, -) +from base import extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode, Logger logger = Logger(name="smtp") try: - smtp_host = config("SMTP_HOST", default="127.0.0.1") - smtp_port = config("SMTP_PORT", default=25, cast=int) - _username, _password, server_url = extract_credentials( - config("SERVER_URL", default="") - ) + smtp_host = config('SMTP_HOST', default='127.0.0.1') + smtp_port = config('SMTP_PORT', default=25, cast=int) + _username, _password, server_url = extract_credentials(config('SERVER_URL', default='')) except KeyboardInterrupt: logger.warning("[*] User has requested an interrupt") logger.warning("[*] Application Exiting.....") @@ -45,7 +37,6 @@ auth = None if _username: auth = HTTPBasicAuth(_username, _password) - class CaterpillarSMTPServer(SMTPServer): def __init__(self, localaddr, remoteaddr): self.__class__.smtpd_hostname = "CaterpillarSMTPServer" @@ -53,54 +44,47 @@ class CaterpillarSMTPServer(SMTPServer): super().__init__(localaddr, remoteaddr) def process_message(self, peer, mailfrom, rcpttos, data, **kwargs): - message_lines = data.decode("utf-8").split("\n") - subject = "" - to = "" + message_lines = data.decode('utf-8').split('\n') + subject = '' + to = '' for line in message_lines: - pos = line.find(":") + pos = line.find(':') if pos > -1: k = line[0:pos] - v = line[pos + 1 :] - if k == "Subject": + v = line[pos+1:] + if k == 'Subject': subject = v - elif k == "To": + elif k == 'To': to = v # build a data proxy_data = { - "headers": { - "User-Agent": "php-httpproxy/0.1.6 (Client; Python " - + python_version() - + "; Caterpillar; abuse@catswords.net)", + 'headers': { + "User-Agent": "php-httpproxy/0.1.6 (Client; Python " + python_version() + "; Caterpillar; abuse@catswords.net)", }, - "data": { + 'data': { "to": to, "from": mailfrom, "subject": subject, - "message": data.decode("utf-8"), - }, + "message": data.decode('utf-8') + } } - _, raw_data = jsonrpc2_encode("relay_sendmail", proxy_data["data"]) + _, raw_data = jsonrpc2_encode('relay_sendmail', proxy_data['data']) # send HTTP POST request try: - response = requests.post( - server_url, headers=proxy_data["headers"], data=raw_data, auth=auth - ) + response = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, auth=auth) if response.status_code == 200: type, id, method, rpcdata = jsonrpc2_decode(response.text) - if rpcdata["success"]: + if rpcdata['success']: logger.info("[*] Email sent successfully.") else: - raise Exception( - "(%s) %s" % (str(rpcdata["code"]), rpcdata["message"]) - ) + raise Exception("(%s) %s" % (str(rpcdata['code']), rpcdata['message'])) else: raise Exception("Status %s" % (str(response.status_code))) except Exception as e: logger.error("[*] Failed to send email", exc_info=e) - # Start SMTP server smtp_server = CaterpillarSMTPServer((smtp_host, smtp_port), None) diff --git a/web.py b/web.py index a639a3f..46fd801 100644 --- a/web.py +++ b/web.py @@ -19,51 +19,53 @@ from base import Extension, jsonrpc2_error_encode, Logger # TODO: 나중에 Flask 커스텀 핸들러 구현 해야 함 logger = Logger(name="web") app = Flask(__name__) -app.config["UPLOAD_FOLDER"] = "data/" -if not os.path.exists(app.config["UPLOAD_FOLDER"]): - os.makedirs(app.config["UPLOAD_FOLDER"]) +app.config['UPLOAD_FOLDER'] = 'data/' +if not os.path.exists(app.config['UPLOAD_FOLDER']): + os.makedirs(app.config['UPLOAD_FOLDER']) -@app.route("/") +@app.route('/') def upload_form(): - return render_template("upload.html") + return render_template('upload.html') -@app.route("/upload", methods=["POST"]) +@app.route('/upload', methods=['POST']) def process_upload(): # make connection profile from Flask request conn = Connection(request) # pass to the method - method = request.form["method"] - filename = request.files["file"].filename - params = {"filename": filename} + method = request.form['method'] + filename = request.files['file'].filename + params = { + 'filename': filename + } # just do it - return Extension.dispatch_rpcmethod(method, "call", "", params, conn) + return Extension.dispatch_rpcmethod(method, 'call', '', params, conn) -@app.route("/jsonrpc2", methods=["POST"]) +@app.route('/jsonrpc2', methods=['POST']) def process_jsonrpc2(): # make connection profile from Flask request conn = Connection(request) # JSON-RPC 2.0 request jsondata = request.get_json(silent=True) - if jsondata["jsonrpc"] == "2.0": - return Extension.dispatch_rpcmethod( - jsondata["method"], "call", jsondata["id"], jsondata["params"], conn - ) + if jsondata['jsonrpc'] == "2.0": + return Extension.dispatch_rpcmethod(jsondata['method'], 'call', jsondata['id'], jsondata['params'], conn) # when error - return jsonrpc2_error_encode({"message": "Not vaild JSON-RPC 2.0 request"}) + return jsonrpc2_error_encode({ + 'message': "Not vaild JSON-RPC 2.0 request" + }) def jsonrpc2_server(conn, id, method, params): return Extension.dispatch_rpcmethod(method, "call", id, params, conn) -class Connection: +class Connection(): def send(self, data): self.messages.append(data) @@ -81,9 +83,9 @@ class Connection: if __name__ == "__main__": # initialization try: - listening_port = config("PORT", default=5555, cast=int) - client_encoding = config("CLIENT_ENCODING", default="utf-8") - use_extensions = config("USE_EXTENSIONS", default="") + listening_port = config('PORT', default=5555, cast=int) + client_encoding = config('CLIENT_ENCODING', default='utf-8') + use_extensions = config('USE_EXTENSIONS', default='') except KeyboardInterrupt: logger.warning("[*] User has requested an interrupt") logger.warning("[*] Application Exiting.....") @@ -92,14 +94,14 @@ if __name__ == "__main__": logger.error("[*] Failed to initialize", exc_info=e) # set environment of Extension - Extension.set_protocol("http") + Extension.set_protocol('http') # Fix Value error if use_extensions: # load extensions - for s in use_extensions.split(","): + for s in use_extensions.split(','): Extension.register(s) else: logger.warning("[*] No extensions registered") - app.run(debug=True, host="0.0.0.0", port=listening_port) + app.run(debug=True, host='0.0.0.0', port=listening_port)