feat: implemented a custom logger and fixed some invalid code implementations

fixed some misspellings. Note that this modification is highly likely to cause conflicts.
This commit is contained in:
Euiseo Cha 2024-07-09 17:01:25 +09:00
parent 352fc3229f
commit cd7350655b
No known key found for this signature in database
GPG Key ID: 39F74FF9CEA87CC9
8 changed files with 113 additions and 62 deletions

3
.gitignore vendored
View File

@ -2,4 +2,5 @@ certs/
savedfiles/ savedfiles/
settings.ini settings.ini
.env .env
*.pyc *.pyc
logs/**

33
base.py
View File

@ -5,16 +5,22 @@
# #
# Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy) # Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
# Namyheon Go (Catswords Research) <gnh1201@gmail.com> # Namyheon Go (Catswords Research) <gnh1201@gmail.com>
# Euiseo Cha (Wonkwang University) <zeroday0619_dev@outlook.com>
# https://github.com/gnh1201/caterpillar # https://github.com/gnh1201/caterpillar
# Created at: 2024-05-20 # Created at: 2024-05-20
# Updated at: 2024-07-06 # Updated at: 2024-07-06
# #
import logging
import hashlib import hashlib
import json import json
import os
import re import re
import importlib import importlib
from datetime import datetime, timezone
from typing import Union, List
client_encoding = 'utf-8' client_encoding = 'utf-8'
def extract_credentials(url): def extract_credentials(url):
@ -162,3 +168,30 @@ class Extension():
def connect(self, conn, data, webserver, port, scheme, method, url): def connect(self, conn, data, webserver, port, scheme, method, url):
raise NotImplementedError raise NotImplementedError
class Logger(logging.Logger):
def __init__(self, name: str, level: int = logging.NOTSET):
super().__init__(name, level)
self.formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
if not os.path.isdir("logs"):
os.mkdir("logs")
stream_handler = logging.StreamHandler()
file_handler = logging.FileHandler('logs/' + name + "-" + self._generate_timestamp() + '.log')
self._set_formatters([stream_handler, file_handler])
self._add_handlers([stream_handler, file_handler])
@staticmethod
def _generate_timestamp():
date = datetime.now(tz=timezone.utc).strftime('%Y-%m-%d')
return date
def _set_formatters(self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]):
for handler in handlers:
handler.setFormatter(self.formatter)
def _add_handlers(self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]):
for handler in handlers:
self.addHandler(handler)

View File

@ -12,7 +12,10 @@
import docker import docker
from base import Extension from base import Extension, Logger
logger = Logger("Container")
class Container(Extension): class Container(Extension):
def __init__(self): def __init__(self):
@ -24,7 +27,7 @@ class Container(Extension):
self.client = docker.from_env() self.client = docker.from_env()
def dispatch(self, type, id, params, conn): def dispatch(self, type, id, params, conn):
print ("[*] Greeting! dispatch") logger.info("[*] Greeting! dispatch")
conn.send(b'Greeting! dispatch') conn.send(b'Greeting! dispatch')
def container_run(self, type, id, params, conn): def container_run(self, type, id, params, conn):
@ -35,7 +38,7 @@ class Container(Extension):
environment = params['environment'] environment = params['environment']
volumes = params['volumes'] volumes = params['volumes']
container = client.containers.run( container = self.client.containers.run(
image, image,
devices=devices, devices=devices,
name=name, name=name,
@ -45,13 +48,13 @@ class Container(Extension):
) )
container.logs() container.logs()
print ("[*] Running...") logger.info("[*] Running...")
def container_stop(self, type, id, params, conn): def container_stop(self, type, id, params, conn):
name = params['name'] name = params['name']
container = client.containers.get(name) container = self.client.containers.get(name)
container.stop() container.stop()
print ("[*] Stopped") logger.info("[*] Stopped")

View File

@ -10,7 +10,8 @@
# Created in: 2022-10-06 # Created in: 2022-10-06
# Updated in: 2024-07-06 # Updated in: 2024-07-06
# #
import base64
import hashlib
import io import io
import re import re
import requests import requests
@ -19,7 +20,9 @@ import os.path
from decouple import config from decouple import config
from PIL import Image from PIL import Image
from base import Extension from base import Extension, Logger
logger = Logger(name="fediverse")
try: try:
client_encoding = config('CLIENT_ENCODING', default='utf-8') client_encoding = config('CLIENT_ENCODING', default='utf-8')
@ -28,7 +31,7 @@ try:
dictionary_file = config('DICTIONARY_FILE', default='words_alpha.txt') # https://github.com/dwyl/english-words dictionary_file = config('DICTIONARY_FILE', default='words_alpha.txt') # https://github.com/dwyl/english-words
librey_apiurl = config('LIBREY_APIURL', default='https://search.catswords.net') # https://github.com/Ahwxorg/librey librey_apiurl = config('LIBREY_APIURL', default='https://search.catswords.net') # https://github.com/Ahwxorg/librey
except Exception as e: except Exception as e:
print ("[*] Invaild configration: %s" % (str(e))) logger.error("[*] Invalid configuration", exc_info=e)
class Fediverse(Extension): class Fediverse(Extension):
def __init__(self): def __init__(self):
@ -41,7 +44,7 @@ class Fediverse(Extension):
with open(dictionary_file, "r") as file: with open(dictionary_file, "r") as file:
words = file.readlines() words = file.readlines()
self.known_words = [word.strip() for word in words if len(word.strip()) > 3] self.known_words = [word.strip() for word in words if len(word.strip()) > 3]
print ("[*] Data loaded to use KnownWords4 strategy") logger.info("[*] Data loaded to use KnownWords4 strategy")
def test(self, filtered, data, webserver, port, scheme, method, url): def test(self, filtered, data, webserver, port, scheme, method, url):
# prevent cache confusing # prevent cache confusing
@ -63,11 +66,11 @@ class Fediverse(Extension):
pattern = r'\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b' pattern = r'\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b'
matches = list(set(re.findall(pattern, text))) matches = list(set(re.findall(pattern, text)))
if len(matches) > 0: if len(matches) > 0:
print ("[*] Found ID: %s" % (', '.join(matches))) logger.info("[*] Found ID: %s" % (', '.join(matches)))
try: try:
filtered = not all(map(self.pwnedpasswords_test, matches)) filtered = not all(map(self.pwnedpasswords_test, matches))
except Exception as e: except Exception as e:
print ("[*] K-Anonymity strategy not working! %s" % (str(e))) logger.error("[*] K-Anonymity strategy not working!", exc_info=e)
filtered = True filtered = True
# feedback # feedback
@ -131,20 +134,20 @@ class Fediverse(Extension):
if filtered: if filtered:
break break
print ("[*] downloading... %s" % (url)) logger.info("[*] downloading... %s" % (url))
encoded_image = webp_to_png_base64(url) encoded_image = webp_to_png_base64(url)
print ("[*] downloaded.") logger.info("[*] downloaded.")
if encoded_image: if encoded_image:
print ("[*] solving...") logger.info("[*] solving...")
try: try:
solved = truecaptcha_solve(encoded_image) solved = self.truecaptcha_solve(encoded_image)
if solved: if solved:
print ("[*] solved: %s" % (solved)) logger.info("[*] solved: %s" % (solved))
filtered = filtered or (solved.lower() in ['ctkpaarr', 'spam']) filtered = filtered or (solved.lower() in ['ctkpaarr', 'spam'])
else: else:
print ("[*] not solved") logger.info("[*] not solved")
except Exception as e: except Exception as e:
print ("[*] Not CAPTCHA strategy not working! %s" % (str(e))) logger.error("[*] Not CAPTCHA strategy not working!", exc_info=e)
return filtered return filtered

View File

@ -13,12 +13,14 @@
import requests import requests
from decouple import config from decouple import config
from base import Extension from base import Extension, Logger
logger = Logger(name="wayback")
try: try:
client_encoding = config('CLIENT_ENCODING') client_encoding = config('CLIENT_ENCODING')
except Exception as e: except Exception as e:
print ("[*] Invaild configration: %s" % (str(e))) logger.error("[*] Invalid configuration", exc_info=e)
def get_cached_page_from_google(url): def get_cached_page_from_google(url):
status_code, text = (0, '') status_code, text = (0, '')

View File

@ -32,9 +32,11 @@ from requests.auth import HTTPBasicAuth
from urllib.parse import urlparse from urllib.parse import urlparse
from decouple import config from decouple import config
from base import Extension, extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode from base import Extension, extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode, Logger
# initalization logger = Logger(name="server")
# initialization
try: try:
listening_port = config('PORT', default=5555, cast=int) listening_port = config('PORT', default=5555, cast=int)
_username, _password, server_url = extract_credentials(config('SERVER_URL', default='')) _username, _password, server_url = extract_credentials(config('SERVER_URL', default=''))
@ -48,11 +50,11 @@ try:
local_domain = config('LOCAL_DOMAIN', default='') local_domain = config('LOCAL_DOMAIN', default='')
proxy_pass = config('PROXY_PASS', default='') proxy_pass = config('PROXY_PASS', default='')
except KeyboardInterrupt: except KeyboardInterrupt:
print("\n[*] User has requested an interrupt") logger.warning("[*] User has requested an interrupt")
print("[*] Application Exiting.....") logger.warning("[*] Application Exiting.....")
sys.exit() sys.exit()
except Exception as e: except Exception as e:
print("[*] Failed to initialize:", str(e)) logger.error("[*] Failed to initialize:", exc_info=e)
parser = argparse.ArgumentParser() parser = argparse.ArgumentParser()
parser.add_argument('--max_conn', help="Maximum allowed connections", default=255, type=int) parser.add_argument('--max_conn', help="Maximum allowed connections", default=255, type=int)
@ -107,7 +109,7 @@ def parse_first_data(data):
parsed_data = (webserver, port, scheme, method, url) parsed_data = (webserver, port, scheme, method, url)
except Exception as e: except Exception as e:
print("[*] Exception on parsing the header. Cause: %s" % (str(e))) logger.error("[*] Exception on parsing the header", exc_info=e)
return parsed_data return parsed_data
@ -141,7 +143,7 @@ def conn_string(conn, data, addr):
if local_domain != '': if local_domain != '':
localserver = local_domain.encode(client_encoding) localserver = local_domain.encode(client_encoding)
if webserver == localserver or data.find(b'\nHost: ' + localserver) > -1: if webserver == localserver or data.find(b'\nHost: ' + localserver) > -1:
print ("[*] Detected the reverse proxy request: %s" % (local_domain)) logger.info("[*] Detected the reverse proxy request: %s" % local_domain)
scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b':') scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b':')
webserver = _webserver[2:] webserver = _webserver[2:]
port = int(_port.decode(client_encoding)) port = int(_port.decode(client_encoding))
@ -152,11 +154,11 @@ def jsonrpc2_server(conn, id, method, params):
if method == "relay_accept": if method == "relay_accept":
accepted_relay[id] = conn accepted_relay[id] = conn
connection_speed = params['connection_speed'] connection_speed = params['connection_speed']
print ("[*] connection speed: %s miliseconds" % (str(connection_speed))) logger.info("[*] connection speed: %s milliseconds" % (str(connection_speed)))
while conn.fileno() > -1: while conn.fileno() > -1:
time.sleep(1) time.sleep(1)
del accepted_relay[id] del accepted_relay[id]
print ("[*] relay destroyed: %s" % (id)) logger.info("[*] relay destroyed: %s" % id)
else: else:
Extension.dispatch_rpcmethod(method, "call", id, params, conn) Extension.dispatch_rpcmethod(method, "call", id, params, conn)
@ -177,7 +179,7 @@ def proxy_connect(webserver, conn):
p2 = Popen([openssl_binpath, "x509", "-req", "-days", "3650", "-CA", cacert, "-CAkey", cakey, "-set_serial", epoch, "-out", certpath], stdin=p1.stdout, stderr=PIPE) p2 = Popen([openssl_binpath, "x509", "-req", "-days", "3650", "-CA", cacert, "-CAkey", cakey, "-set_serial", epoch, "-out", certpath], stdin=p1.stdout, stderr=PIPE)
p2.communicate() p2.communicate()
except Exception as e: except Exception as e:
print("[*] Skipped generating the certificate. Cause: %s" % (str(e))) logger.error("[*] Skipped generating the certificate.", exc_info=e)
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server # https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
# https://docs.python.org/3/library/ssl.html # https://docs.python.org/3/library/ssl.html
@ -194,7 +196,7 @@ def proxy_check_filtered(data, webserver, port, scheme, method, url):
filtered = False filtered = False
filters = Extension.get_filters() filters = Extension.get_filters()
print ("[*] Checking data with %s filters..." % (str(len(filters)))) logger.info("[*] Checking data with %s filters..." % (str(len(filters))))
for f in filters: for f in filters:
filtered = f.test(filtered, data, webserver, port, scheme, method, url) filtered = f.test(filtered, data, webserver, port, scheme, method, url)
@ -202,7 +204,7 @@ def proxy_check_filtered(data, webserver, port, scheme, method, url):
def proxy_server(webserver, port, scheme, method, url, conn, addr, data): def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
try: try:
print("[*] Started the request. %s" % (str(addr[0]))) logger.info("[*] Started the request. %s" % (str(addr[0])))
# SSL negotiation # SSL negotiation
is_ssl = scheme in [b'https', b'tls', b'ssl'] is_ssl = scheme in [b'https', b'tls', b'ssl']
@ -295,7 +297,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
# when blocked # when blocked
if is_http_403: if is_http_403:
print ("[*] Blocked the request by remote server: %s" % (webserver.decode(client_encoding))) logger.warning("[*] Blocked the request by remote server: %s" % (webserver.decode(client_encoding)))
def bypass_callback(response, *args, **kwargs): def bypass_callback(response, *args, **kwargs):
if response.status_code != 200: if response.status_code != 200:
@ -315,7 +317,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
conn.send(chunk) conn.send(chunk)
if is_ssl and method == b'GET': if is_ssl and method == b'GET':
print ("[*] Trying to bypass blocked request...") logger.info("[*] Trying to bypass blocked request...")
remote_url = "%s://%s%s" % (scheme.decode(client_encoding), webserver.decode(client_encoding), url.decode(client_encoding)) remote_url = "%s://%s%s" % (scheme.decode(client_encoding), webserver.decode(client_encoding), url.decode(client_encoding))
requests.get(remote_url, stream=True, verify=False, hooks={'response': bypass_callback}) requests.get(remote_url, stream=True, verify=False, hooks={'response': bypass_callback})
else: else:
@ -323,7 +325,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
sock_close(sock, is_ssl) sock_close(sock, is_ssl)
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size))) logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
# stateful mode # stateful mode
elif server_connection_type == "stateful": elif server_connection_type == "stateful":
@ -344,7 +346,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
} }
# get client address # get client address
print ("[*] resolving the client address...") logger.info("[*] resolving the client address...")
while len(resolved_address_list) == 0: while len(resolved_address_list) == 0:
try: try:
_, query_data = jsonrpc2_encode('get_client_address') _, query_data = jsonrpc2_encode('get_client_address')
@ -352,7 +354,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
if query.status_code == 200: if query.status_code == 200:
result = query.json()['result'] result = query.json()['result']
resolved_address_list.append(result['data']) resolved_address_list.append(result['data'])
print ("[*] resolved IP: %s" % (result['data'])) logger.info("[*] resolved IP: %s" % (result['data']))
except requests.exceptions.ReadTimeout as e: except requests.exceptions.ReadTimeout as e:
pass pass
proxy_data['data']['client_address'] = resolved_address_list[0] proxy_data['data']['client_address'] = resolved_address_list[0]
@ -366,14 +368,14 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
jsondata = json.loads(chunk.decode(client_encoding, errors='ignore')) jsondata = json.loads(chunk.decode(client_encoding, errors='ignore'))
if jsondata['jsonrpc'] == "2.0" and ("error" in jsondata): if jsondata['jsonrpc'] == "2.0" and ("error" in jsondata):
e = jsondata['error'] e = jsondata['error']
print ("[*] Error received from the relay server: (%s) %s" % (str(e['code']), str(e['message']))) logger.error("[*] Error received from the relay server: (%s) %s" % (str(e['code']), str(e['message'])))
except requests.exceptions.ReadTimeout as e: except requests.exceptions.ReadTimeout as e:
pass pass
id, raw_data = jsonrpc2_encode('relay_connect', proxy_data['data']) id, raw_data = jsonrpc2_encode('relay_connect', proxy_data['data'])
start_new_thread(relay_connect, (id, raw_data, proxy_data)) start_new_thread(relay_connect, (id, raw_data, proxy_data))
# wait for the relay # wait for the relay
print ("[*] waiting for the relay... %s" % (id)) logger.info("[*] waiting for the relay... %s" % id)
max_reties = 30 max_reties = 30
t = 0 t = 0
while t < max_reties and not id in accepted_relay: while t < max_reties and not id in accepted_relay:
@ -381,11 +383,11 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
t += 1 t += 1
if t < max_reties: if t < max_reties:
sock = accepted_relay[id] sock = accepted_relay[id]
print ("[*] connected the relay. %s" % (id)) logger.info("[*] connected the relay. %s" % id)
sendall(sock, conn, data) sendall(sock, conn, data)
else: else:
resolved_address_list.remove(resolved_address_list[0]) resolved_address_list.remove(resolved_address_list[0])
print ("[*] the relay is gone. %s" % (id)) logger.info("[*] the relay is gone. %s" % id)
sock_close(sock, is_ssl) sock_close(sock, is_ssl)
return return
@ -408,7 +410,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
sock_close(sock, is_ssl) sock_close(sock, is_ssl)
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size))) logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
# stateless mode # stateless mode
elif server_connection_type == "stateless": elif server_connection_type == "stateless":
@ -431,7 +433,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
} }
_, raw_data = jsonrpc2_encode('relay_request', proxy_data['data']) _, raw_data = jsonrpc2_encode('relay_request', proxy_data['data'])
print("[*] Sending %s bytes..." % (str(len(raw_data)))) logger.info("[*] Sending %s bytes..." % (str(len(raw_data))))
i = 0 i = 0
relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, auth=auth) relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, auth=auth)
@ -446,7 +448,7 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
buffered = buffered[-buffer_size*2:] buffered = buffered[-buffer_size*2:]
i += 1 i += 1
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size))) logger.info("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
# nothing at all # nothing at all
else: else:
@ -456,11 +458,11 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
else: else:
raise Exception("Unsupported connection type") raise Exception("Unsupported connection type")
print("[*] Request and received. Done. %s" % (str(addr[0]))) logger.info("[*] Request and received. Done. %s" % (str(addr[0])))
conn.close() conn.close()
except Exception as e: except Exception as e:
print(traceback.format_exc()) print(traceback.format_exc())
print("[*] Exception on requesting the data. Cause: %s" % (str(e))) logger.error("[*] Exception on requesting the data.", exc_info=e)
conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}") conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
conn.close() conn.close()
@ -481,9 +483,9 @@ def start(): #Main Program
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM) sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('', listening_port)) sock.bind(('', listening_port))
sock.listen(max_connection) sock.listen(max_connection)
print("[*] Server started successfully [ %d ]" %(listening_port)) logger.info("[*] Server started successfully [ %d ]" % listening_port)
except Exception as e: except Exception as e:
print("[*] Unable to Initialize Socket:", str(e)) logger.error("[*] Unable to Initialize Socket", exc_info=e)
sys.exit(2) sys.exit(2)
while True: while True:
@ -493,7 +495,7 @@ def start(): #Main Program
start_new_thread(conn_string, (conn, data, addr)) #Starting a thread start_new_thread(conn_string, (conn, data, addr)) #Starting a thread
except KeyboardInterrupt: except KeyboardInterrupt:
sock.close() sock.close()
print("\n[*] Graceful Shutdown") logger.info("[*] Graceful Shutdown")
sys.exit(1) sys.exit(1)
if __name__== "__main__": if __name__== "__main__":

14
smtp.py
View File

@ -12,21 +12,25 @@
import asyncore import asyncore
from smtpd import SMTPServer from smtpd import SMTPServer
import re import re
import sys
import json import json
import requests import requests
from decouple import config from decouple import config
from requests.auth import HTTPBasicAuth from requests.auth import HTTPBasicAuth
from base import extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode from base import extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode, Logger
logger = Logger(name="smtp")
try: try:
smtp_host = config('SMTP_HOST', default='127.0.0.1') smtp_host = config('SMTP_HOST', default='127.0.0.1')
smtp_port = config('SMTP_PORT', default=25, cast=int) smtp_port = config('SMTP_PORT', default=25, cast=int)
_username, _password, server_url = extract_credentials(config('SERVER_URL', default='')) _username, _password, server_url = extract_credentials(config('SERVER_URL', default=''))
except KeyboardInterrupt: except KeyboardInterrupt:
print("\n[*] User has requested an interrupt") logger.warning("[*] User has requested an interrupt")
print("[*] Application Exiting.....") logger.warning("[*] Application Exiting.....")
sys.exit() sys.exit()
auth = None auth = None
@ -73,13 +77,13 @@ class CaterpillarSMTPServer(SMTPServer):
if response.status_code == 200: if response.status_code == 200:
type, id, method, rpcdata = jsonrpc2_decode(response.text) type, id, method, rpcdata = jsonrpc2_decode(response.text)
if rpcdata['success']: if rpcdata['success']:
print("[*] Email sent successfully.") logger.info("[*] Email sent successfully.")
else: else:
raise Exception("(%s) %s" % (str(rpcdata['code']), rpcdata['message'])) raise Exception("(%s) %s" % (str(rpcdata['code']), rpcdata['message']))
else: else:
raise Exception("Status %s" % (str(response.status_code))) raise Exception("Status %s" % (str(response.status_code)))
except Exception as e: except Exception as e:
print("[*] Failed to send email:", str(e)) logger.error("[*] Failed to send email", exc_info=e)
# Start SMTP server # Start SMTP server
smtp_server = CaterpillarSMTPServer((smtp_host, smtp_port), None) smtp_server = CaterpillarSMTPServer((smtp_host, smtp_port), None)

15
web.py
View File

@ -19,11 +19,14 @@ import importlib
import hashlib import hashlib
from decouple import config from decouple import config
from base import Extension, jsonrpc2_create_id, jsonrpc2_result_encode, jsonrpc2_error_encode from base import Extension, jsonrpc2_create_id, jsonrpc2_result_encode, jsonrpc2_error_encode, Logger
# TODO: 나중에 Flask 커스텀 핸들러 구현 해야 함
logger = Logger(name="web")
app = Flask(__name__) app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'data/' app.config['UPLOAD_FOLDER'] = 'data/'
if not os.path.exists(app.config['UPLOAD_FOLDER']): if not os.path.exists(app.config['UPLOAD_FOLDER']):
os.makedirs(app.config['UPLOAD_FOLDER']) os.makedirs(app.config['UPLOAD_FOLDER'])
@ -69,10 +72,10 @@ class Connection():
self.messages.append(data) self.messages.append(data)
def recv(self, size): def recv(self, size):
print ("Not allowed method") logger.info("Not allowed method")
def close(self): def close(self):
print ("Not allowed method") logger.info("Not allowed method")
def __init__(self, req): def __init__(self, req):
self.messages = [] self.messages = []
@ -84,11 +87,11 @@ if __name__ == "__main__":
listening_port = config('PORT', default=5555, cast=int) listening_port = config('PORT', default=5555, cast=int)
client_encoding = config('CLIENT_ENCODING', default='utf-8') client_encoding = config('CLIENT_ENCODING', default='utf-8')
except KeyboardInterrupt: except KeyboardInterrupt:
print("\n[*] User has requested an interrupt") logger.warning("[*] User has requested an interrupt")
print("[*] Application Exiting.....") logger.warning("[*] Application Exiting.....")
sys.exit() sys.exit()
except Exception as e: except Exception as e:
print("[*] Failed to initialize:", str(e)) logger.error("[*] Failed to initialize", exc_info=e)
# set environment of Extension # set environment of Extension
Extension.set_protocol('http') Extension.set_protocol('http')