2024-02-19 05:13:35 +00:00
|
|
|
# Caterpillar - The simple and parasitic web proxy with spam filter
|
2024-02-17 08:44:19 +00:00
|
|
|
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
2024-02-19 04:45:40 +00:00
|
|
|
# https://github.com/gnh1201/caterpillar
|
2022-10-07 04:54:40 +00:00
|
|
|
# Created at: 2022-10-06
|
2024-02-20 02:03:25 +00:00
|
|
|
# Updated at: 2024-12-20
|
2022-10-07 04:54:40 +00:00
|
|
|
|
2022-10-05 17:19:38 +00:00
|
|
|
import argparse
|
|
|
|
import socket
|
|
|
|
import sys
|
2022-11-24 09:02:32 +00:00
|
|
|
import os
|
2022-10-05 17:19:38 +00:00
|
|
|
from _thread import *
|
|
|
|
import base64
|
2022-10-08 04:03:15 +00:00
|
|
|
import json
|
2022-11-24 09:02:32 +00:00
|
|
|
import ssl
|
2022-11-25 08:12:58 +00:00
|
|
|
import time
|
2024-02-17 12:21:24 +00:00
|
|
|
import re
|
2024-02-18 22:58:44 +00:00
|
|
|
import hashlib
|
2024-02-18 04:20:14 +00:00
|
|
|
import resource
|
2024-02-18 23:06:11 +00:00
|
|
|
#import traceback
|
2024-02-19 07:07:55 +00:00
|
|
|
import io
|
2022-11-25 08:12:58 +00:00
|
|
|
from subprocess import Popen, PIPE
|
2022-10-05 17:19:38 +00:00
|
|
|
from datetime import datetime
|
2022-10-06 12:09:34 +00:00
|
|
|
from platform import python_version
|
2024-02-19 07:07:55 +00:00
|
|
|
from PIL import Image
|
2022-10-05 17:19:38 +00:00
|
|
|
|
2022-10-06 12:09:34 +00:00
|
|
|
import requests
|
2022-10-05 17:19:38 +00:00
|
|
|
from decouple import config
|
|
|
|
|
|
|
|
try:
|
|
|
|
listening_port = config('PORT', cast=int)
|
2022-11-24 09:05:58 +00:00
|
|
|
server_url = config('SERVER_URL')
|
2022-11-25 12:35:02 +00:00
|
|
|
cakey = config('CA_KEY')
|
|
|
|
cacert = config('CA_CERT')
|
|
|
|
certkey = config('CERT_KEY')
|
|
|
|
certdir = config('CERT_DIR')
|
2022-11-25 12:54:25 +00:00
|
|
|
openssl_binpath = config('OPENSSL_BINPATH')
|
2022-11-25 12:35:02 +00:00
|
|
|
client_encoding = config('CLIENT_ENCODING')
|
2024-02-17 19:04:02 +00:00
|
|
|
local_domain = config('LOCAL_DOMAIN')
|
|
|
|
proxy_pass = config('PROXY_PASS')
|
2024-02-19 05:24:47 +00:00
|
|
|
mastodon_server = config('MASTODON_SERVER') # catswords.social
|
|
|
|
mastodon_user_token = config('MASTODON_USER_TOKEN') # catswords.social
|
|
|
|
truecaptcha_userid = config('TRUECAPTCHA_USERID') # truecaptcha.org
|
|
|
|
truecaptcha_apikey = config('TRUECAPTCHA_APIKEY') # truecaptcha.org
|
2022-10-05 17:19:38 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
print("\n[*] User has requested an interrupt")
|
|
|
|
print("[*] Application Exiting.....")
|
|
|
|
sys.exit()
|
|
|
|
|
|
|
|
parser = argparse.ArgumentParser()
|
|
|
|
|
2024-02-17 09:35:35 +00:00
|
|
|
parser.add_argument('--max_conn', help="Maximum allowed connections", default=255, type=int)
|
2024-02-17 09:39:11 +00:00
|
|
|
parser.add_argument('--buffer_size', help="Number of samples to be used", default=8192, type=int)
|
2022-10-05 17:19:38 +00:00
|
|
|
|
|
|
|
args = parser.parse_args()
|
|
|
|
max_connection = args.max_conn
|
|
|
|
buffer_size = args.buffer_size
|
|
|
|
|
2024-02-18 04:20:14 +00:00
|
|
|
# https://stackoverflow.com/questions/25475906/set-ulimit-c-from-outside-shell
|
|
|
|
resource.setrlimit(
|
|
|
|
resource.RLIMIT_CORE,
|
|
|
|
(resource.RLIM_INFINITY, resource.RLIM_INFINITY))
|
|
|
|
|
2022-10-05 17:19:38 +00:00
|
|
|
def start(): #Main Program
|
|
|
|
try:
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
|
|
|
sock.bind(('', listening_port))
|
|
|
|
sock.listen(max_connection)
|
|
|
|
print("[*] Server started successfully [ %d ]" %(listening_port))
|
|
|
|
except Exception:
|
|
|
|
print("[*] Unable to Initialize Socket")
|
|
|
|
print(Exception)
|
|
|
|
sys.exit(2)
|
|
|
|
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
conn, addr = sock.accept() #Accept connection from client browser
|
|
|
|
data = conn.recv(buffer_size) #Recieve client data
|
2022-11-25 08:12:58 +00:00
|
|
|
start_new_thread(conn_string, (conn, data, addr)) #Starting a thread
|
2022-10-05 17:19:38 +00:00
|
|
|
except KeyboardInterrupt:
|
|
|
|
sock.close()
|
|
|
|
print("\n[*] Graceful Shutdown")
|
|
|
|
sys.exit(1)
|
|
|
|
|
|
|
|
def conn_string(conn, data, addr):
|
2022-11-25 10:32:17 +00:00
|
|
|
try:
|
|
|
|
first_line = data.split(b'\n')[0]
|
|
|
|
|
|
|
|
method, url = first_line.split()[0:2]
|
|
|
|
|
|
|
|
http_pos = url.find(b'://') #Finding the position of ://
|
|
|
|
scheme = b'http' # check http/https or other protocol
|
|
|
|
if http_pos == -1:
|
|
|
|
temp = url
|
|
|
|
else:
|
|
|
|
temp = url[(http_pos+3):]
|
|
|
|
scheme = url[0:http_pos]
|
|
|
|
|
|
|
|
port_pos = temp.find(b':')
|
|
|
|
|
|
|
|
webserver_pos = temp.find(b'/')
|
|
|
|
if webserver_pos == -1:
|
|
|
|
webserver_pos = len(temp)
|
2024-02-17 19:04:02 +00:00
|
|
|
webserver = b''
|
2022-11-25 10:32:17 +00:00
|
|
|
port = -1
|
|
|
|
if port_pos == -1 or webserver_pos < port_pos:
|
|
|
|
port = 80
|
|
|
|
webserver = temp[:webserver_pos]
|
|
|
|
else:
|
|
|
|
port = int((temp[(port_pos+1):])[:webserver_pos-port_pos-1])
|
|
|
|
webserver = temp[:port_pos]
|
|
|
|
if port == 443:
|
|
|
|
scheme = b'https'
|
|
|
|
except Exception as e:
|
|
|
|
conn.close()
|
2024-02-18 18:26:46 +00:00
|
|
|
print("[*] Exception on parsing the header of %s. Cause: %s" % (str(addr[0]), str(e)))
|
2022-11-25 10:32:17 +00:00
|
|
|
return
|
2022-11-25 08:12:58 +00:00
|
|
|
|
2024-02-17 19:04:02 +00:00
|
|
|
# if it is reverse proxy
|
2024-02-18 01:13:58 +00:00
|
|
|
if local_domain != '':
|
|
|
|
localserver = local_domain.encode(client_encoding)
|
|
|
|
if webserver == localserver or data.find(b'\nHost: ' + localserver) > -1:
|
|
|
|
print ("[*] ** Detected the reverse proxy request: %s" % (local_domain))
|
|
|
|
scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b':')
|
|
|
|
webserver = _webserver[2:]
|
|
|
|
port = int(_port.decode(client_encoding))
|
2024-02-17 19:04:02 +00:00
|
|
|
|
2022-11-25 08:12:58 +00:00
|
|
|
proxy_server(webserver, port, scheme, method, url, conn, addr, data)
|
|
|
|
|
|
|
|
def proxy_connect(webserver, conn):
|
2022-11-25 12:35:02 +00:00
|
|
|
hostname = webserver.decode(client_encoding)
|
2022-11-25 08:12:58 +00:00
|
|
|
certpath = "%s/%s.crt" % (certdir.rstrip('/'), hostname)
|
|
|
|
|
2022-11-25 10:32:17 +00:00
|
|
|
# https://stackoverflow.com/questions/24055036/handle-https-request-in-proxy-server-by-c-sharp-connect-tunnel
|
|
|
|
conn.send(b'HTTP/1.1 200 Connection Established\r\n\r\n')
|
2022-11-25 08:12:58 +00:00
|
|
|
|
2022-11-25 10:32:17 +00:00
|
|
|
# https://github.com/inaz2/proxy2/blob/master/proxy2.py
|
2022-10-05 17:19:38 +00:00
|
|
|
try:
|
2022-11-25 08:12:58 +00:00
|
|
|
if not os.path.isfile(certpath):
|
|
|
|
epoch = "%d" % (time.time() * 1000)
|
2022-11-25 12:54:25 +00:00
|
|
|
p1 = Popen([openssl_binpath, "req", "-new", "-key", certkey, "-subj", "/CN=%s" % hostname], stdout=PIPE)
|
|
|
|
p2 = Popen([openssl_binpath, "x509", "-req", "-days", "3650", "-CA", cacert, "-CAkey", cakey, "-set_serial", epoch, "-out", certpath], stdin=p1.stdout, stderr=PIPE)
|
2022-11-25 08:12:58 +00:00
|
|
|
p2.communicate()
|
2022-10-06 02:24:19 +00:00
|
|
|
except Exception as e:
|
2024-02-18 18:26:46 +00:00
|
|
|
print("[*] Skipped generating the certificate. Cause: %s" % (str(e)))
|
2022-11-25 08:12:58 +00:00
|
|
|
|
2022-11-25 10:32:17 +00:00
|
|
|
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
|
|
|
|
# https://docs.python.org/3/library/ssl.html
|
2022-11-25 08:12:58 +00:00
|
|
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
|
|
|
context.load_cert_chain(certpath, certkey)
|
|
|
|
|
2022-11-25 10:32:17 +00:00
|
|
|
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
|
2022-11-25 08:12:58 +00:00
|
|
|
conn = context.wrap_socket(conn, server_side=True)
|
2022-11-25 10:32:17 +00:00
|
|
|
data = conn.recv(buffer_size)
|
2022-10-05 17:19:38 +00:00
|
|
|
|
2022-11-25 10:32:17 +00:00
|
|
|
return (conn, data)
|
2022-11-24 09:02:32 +00:00
|
|
|
|
2024-02-17 19:19:54 +00:00
|
|
|
def proxy_check_filtered(data, webserver, port, scheme, method, url):
|
2024-02-17 12:50:36 +00:00
|
|
|
filtered = False
|
2024-02-17 12:21:24 +00:00
|
|
|
|
2024-02-17 23:15:17 +00:00
|
|
|
# prevent cache confusing
|
2024-02-17 23:27:23 +00:00
|
|
|
if data.find(b'<title>Welcome to nginx!</title>') > -1:
|
2024-02-17 23:15:17 +00:00
|
|
|
return True
|
|
|
|
|
2024-02-19 06:00:32 +00:00
|
|
|
# ctkpaarr
|
|
|
|
if data.find(b'ctkpaarr') > -1:
|
|
|
|
return True
|
|
|
|
|
2024-02-17 19:04:02 +00:00
|
|
|
# allowed conditions
|
2024-02-17 19:24:33 +00:00
|
|
|
if method == b'GET' or url.find(b'/api') > -1:
|
2024-02-17 23:15:17 +00:00
|
|
|
return False
|
2024-02-17 19:04:02 +00:00
|
|
|
|
2024-02-17 17:10:01 +00:00
|
|
|
# convert to text
|
2024-02-20 16:52:26 +00:00
|
|
|
data_length = len(data)
|
2024-02-18 17:26:04 +00:00
|
|
|
text = data.decode(client_encoding, errors='ignore')
|
2024-02-20 16:52:26 +00:00
|
|
|
error_rate = (data_length - len(text)) / data_length
|
2024-02-20 16:54:24 +00:00
|
|
|
if error_rate > 0.9: # it is a binary data
|
2024-02-20 16:52:26 +00:00
|
|
|
return False
|
2024-02-17 16:49:29 +00:00
|
|
|
|
2024-02-20 05:49:47 +00:00
|
|
|
# check ID with K-Anonymity strategy
|
2024-02-18 23:26:53 +00:00
|
|
|
pattern = r'\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b'
|
2024-02-18 23:08:41 +00:00
|
|
|
matches = list(set(re.findall(pattern, text)))
|
2024-02-18 22:58:44 +00:00
|
|
|
if len(matches) > 0:
|
2024-02-18 23:10:20 +00:00
|
|
|
print ("[*] Found ID: %s" % (', '.join(matches)))
|
2024-02-20 02:03:25 +00:00
|
|
|
try:
|
|
|
|
filtered = not all(map(pwnedpasswords_test, matches))
|
|
|
|
except Exception as e:
|
|
|
|
print ("[*] K-Anonymity strategy not working! %s" % (str(e)))
|
|
|
|
filtered = True
|
2024-02-17 16:49:29 +00:00
|
|
|
|
2024-02-20 05:48:54 +00:00
|
|
|
# check ID with VowelRatio10 strategy
|
2024-02-20 05:28:21 +00:00
|
|
|
if filtered and len(matches) > 0:
|
|
|
|
def vowel_ratio_test(s):
|
|
|
|
ratio = calculate_vowel_ratio(s)
|
|
|
|
return ratio > 0.2 and ratio < 0.7
|
|
|
|
filtered = not all(map(vowel_ratio_test, matches))
|
|
|
|
|
2024-02-20 09:08:50 +00:00
|
|
|
# check ID with Palindrome5 strategy
|
|
|
|
if filtered and len(matches) > 0:
|
|
|
|
filtered = not all(map(has_palindrome, matches))
|
|
|
|
|
2024-02-20 05:49:47 +00:00
|
|
|
# check an attached images (check images with Not-CAPTCHA strategy)
|
2024-02-19 07:25:07 +00:00
|
|
|
if not filtered and len(matches) > 0 and truecaptcha_userid != '':
|
2024-02-19 07:07:55 +00:00
|
|
|
def webp_to_png_base64(url):
|
2024-02-19 06:06:16 +00:00
|
|
|
try:
|
|
|
|
response = requests.get(url)
|
2024-02-19 07:07:55 +00:00
|
|
|
img = Image.open(io.BytesIO(response.content))
|
|
|
|
img_png = img.convert("RGBA")
|
|
|
|
buffered = io.BytesIO()
|
|
|
|
img_png.save(buffered, format="PNG")
|
|
|
|
encoded_image = base64.b64encode(buffered.getvalue()).decode(client_encoding)
|
|
|
|
return encoded_image
|
2024-02-19 06:06:16 +00:00
|
|
|
except:
|
2024-02-19 07:07:55 +00:00
|
|
|
return None
|
2024-02-19 06:00:32 +00:00
|
|
|
|
2024-02-19 07:25:07 +00:00
|
|
|
urls = re.findall(r'https://[^\s"]+\.webp', text)
|
2024-02-19 06:06:16 +00:00
|
|
|
if len(urls) > 0:
|
|
|
|
for url in urls:
|
2024-02-20 02:03:25 +00:00
|
|
|
if filtered:
|
|
|
|
break
|
|
|
|
|
|
|
|
print ("[*] downloading... %s" % (url))
|
|
|
|
encoded_image = webp_to_png_base64(url)
|
|
|
|
print ("[*] downloaded.")
|
|
|
|
if encoded_image:
|
|
|
|
print ("[*] solving...")
|
|
|
|
try:
|
2024-02-19 07:18:47 +00:00
|
|
|
solved = truecaptcha_solve(encoded_image)
|
2024-02-19 07:16:41 +00:00
|
|
|
if solved:
|
|
|
|
print ("[*] solved: %s" % (solved))
|
2024-02-20 02:03:25 +00:00
|
|
|
filtered = solved.lower() in ['ctkpaarr', 'spam']
|
2024-02-19 07:16:41 +00:00
|
|
|
else:
|
|
|
|
print ("[*] not solved")
|
2024-02-20 02:03:25 +00:00
|
|
|
except Exception as e:
|
|
|
|
print ("[*] Not CAPTCHA strategy not working! %s" % (str(e)))
|
2024-02-19 06:00:32 +00:00
|
|
|
|
2024-02-18 22:58:44 +00:00
|
|
|
# take action
|
2024-02-17 12:53:55 +00:00
|
|
|
if filtered:
|
2024-02-18 08:54:28 +00:00
|
|
|
print ("[*] Filtered from %s:%s" % (webserver.decode(client_encoding), str(port)))
|
|
|
|
|
|
|
|
try:
|
2024-02-18 09:04:53 +00:00
|
|
|
savedir = './savedfiles'
|
|
|
|
if not os.path.exists(savedir):
|
|
|
|
os.makedirs(savedir)
|
2024-02-18 08:54:28 +00:00
|
|
|
current_time = datetime.now().strftime("%Y%m%d%H%M%S")
|
2024-02-18 09:04:53 +00:00
|
|
|
file_path = os.path.join(savedir, ("%s_%s.bin" % (current_time, webserver.decode(client_encoding))))
|
2024-02-18 08:54:28 +00:00
|
|
|
with open(file_path, 'wb') as file:
|
|
|
|
file.write(data)
|
|
|
|
print ("[*] Saved the file: %s" % (file_path))
|
|
|
|
except Exception as e:
|
|
|
|
print ("[*] Failed to save the file: %s" % (str(e)))
|
|
|
|
|
2024-02-17 08:44:19 +00:00
|
|
|
return filtered
|
2024-02-17 05:40:33 +00:00
|
|
|
|
2022-11-24 09:02:32 +00:00
|
|
|
def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
|
2022-10-05 17:19:38 +00:00
|
|
|
try:
|
2024-02-17 05:26:27 +00:00
|
|
|
print("[*] Started the request. %s" % (str(addr[0])))
|
2022-10-08 04:23:40 +00:00
|
|
|
|
2024-02-18 03:59:24 +00:00
|
|
|
# SSL negotiation
|
|
|
|
if scheme in [b'https', b'tls', b'ssl'] and method == b'CONNECT':
|
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
conn, data = proxy_connect(webserver, conn)
|
|
|
|
break # success
|
2024-02-18 04:43:59 +00:00
|
|
|
#except OSError as e:
|
|
|
|
# print ("[*] Retrying SSL negotiation... (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
|
2024-02-18 03:59:24 +00:00
|
|
|
except Exception as e:
|
|
|
|
raise Exception("SSL negotiation failed. (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
|
2022-11-24 09:02:32 +00:00
|
|
|
|
2024-02-17 21:23:58 +00:00
|
|
|
# Wait to see if there is more data to transmit
|
2024-02-18 17:16:25 +00:00
|
|
|
def sendall(sock, conn, data):
|
2024-02-18 17:10:13 +00:00
|
|
|
# send first chuck
|
2024-02-18 17:37:37 +00:00
|
|
|
if proxy_check_filtered(data, webserver, port, scheme, method, url):
|
2024-02-18 18:24:22 +00:00
|
|
|
sock.close()
|
2024-02-18 17:37:37 +00:00
|
|
|
raise Exception("Filtered request")
|
2024-02-18 17:10:13 +00:00
|
|
|
sock.send(data)
|
2024-02-18 17:05:28 +00:00
|
|
|
if len(data) < buffer_size:
|
|
|
|
return
|
|
|
|
|
2024-02-18 17:10:13 +00:00
|
|
|
# send following chunks
|
2024-02-18 17:05:28 +00:00
|
|
|
buffered = b''
|
2024-02-17 22:41:17 +00:00
|
|
|
conn.settimeout(1)
|
2024-02-17 21:23:58 +00:00
|
|
|
while True:
|
|
|
|
try:
|
|
|
|
chunk = conn.recv(buffer_size)
|
|
|
|
if not chunk:
|
|
|
|
break
|
2024-02-18 17:05:28 +00:00
|
|
|
buffered += chunk
|
|
|
|
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
2024-02-18 18:24:22 +00:00
|
|
|
sock.close()
|
2024-02-18 17:05:28 +00:00
|
|
|
raise Exception("Filtered request")
|
2024-02-18 17:10:13 +00:00
|
|
|
sock.send(chunk)
|
2024-02-18 17:26:04 +00:00
|
|
|
if len(buffered) > buffer_size*2:
|
2024-02-20 02:03:25 +00:00
|
|
|
buffered = buffered[-buffer_size*2:]
|
2024-02-17 21:23:58 +00:00
|
|
|
except:
|
|
|
|
break
|
|
|
|
|
2024-02-18 16:50:43 +00:00
|
|
|
# do response
|
2024-02-17 05:23:51 +00:00
|
|
|
if server_url == "localhost":
|
|
|
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
2024-02-17 05:40:33 +00:00
|
|
|
|
2024-02-17 08:44:19 +00:00
|
|
|
if scheme in [b'https', b'tls', b'ssl']:
|
|
|
|
context = ssl.create_default_context()
|
|
|
|
context.check_hostname = False
|
|
|
|
context.verify_mode = ssl.CERT_NONE
|
2024-02-17 05:40:33 +00:00
|
|
|
|
2024-02-17 08:44:19 +00:00
|
|
|
sock = context.wrap_socket(sock, server_hostname=webserver.decode(client_encoding))
|
|
|
|
sock.connect((webserver, port))
|
2024-02-18 17:05:28 +00:00
|
|
|
#sock.sendall(data)
|
2024-02-18 17:16:25 +00:00
|
|
|
sendall(sock, conn, data)
|
2024-02-17 08:44:19 +00:00
|
|
|
else:
|
|
|
|
sock.connect((webserver, port))
|
2024-02-18 17:05:28 +00:00
|
|
|
#sock.sendall(data)
|
2024-02-18 17:16:25 +00:00
|
|
|
sendall(sock, conn, data)
|
2024-02-17 05:23:51 +00:00
|
|
|
|
2024-02-17 05:26:27 +00:00
|
|
|
i = 0
|
2024-02-18 16:48:15 +00:00
|
|
|
buffered = b''
|
2024-02-17 05:23:51 +00:00
|
|
|
while True:
|
2024-02-17 08:44:19 +00:00
|
|
|
chunk = sock.recv(buffer_size)
|
2024-02-17 05:26:27 +00:00
|
|
|
if not chunk:
|
2024-02-17 05:23:51 +00:00
|
|
|
break
|
2024-02-18 16:48:15 +00:00
|
|
|
buffered += chunk
|
|
|
|
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
2024-02-18 18:24:22 +00:00
|
|
|
sock.close()
|
2024-02-18 16:50:43 +00:00
|
|
|
add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
|
2024-02-18 17:05:28 +00:00
|
|
|
raise Exception("Filtered response")
|
2024-02-18 16:48:15 +00:00
|
|
|
conn.send(chunk)
|
2024-02-18 17:26:04 +00:00
|
|
|
if len(buffered) > buffer_size*2:
|
2024-02-20 02:03:25 +00:00
|
|
|
buffered = buffered[-buffer_size*2:]
|
2024-02-17 05:40:33 +00:00
|
|
|
i += 1
|
2024-02-17 05:26:27 +00:00
|
|
|
|
2024-02-17 12:08:02 +00:00
|
|
|
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
|
2024-02-17 08:44:19 +00:00
|
|
|
|
2024-02-17 05:23:51 +00:00
|
|
|
else:
|
2024-02-17 08:44:19 +00:00
|
|
|
|
2024-02-17 05:23:51 +00:00
|
|
|
proxy_data = {
|
|
|
|
'headers': {
|
2024-02-20 02:45:14 +00:00
|
|
|
"User-Agent": "php-httpproxy/0.1.4 (Client; Python " + python_version() + "; abuse@catswords.net)",
|
2024-02-17 05:23:51 +00:00
|
|
|
},
|
|
|
|
'data': {
|
|
|
|
"data": base64.b64encode(data).decode(client_encoding),
|
|
|
|
"client": str(addr[0]),
|
|
|
|
"server": webserver.decode(client_encoding),
|
|
|
|
"port": str(port),
|
|
|
|
"scheme": scheme.decode(client_encoding),
|
|
|
|
"url": url.decode(client_encoding),
|
|
|
|
"length": str(len(data)),
|
|
|
|
"chunksize": str(buffer_size),
|
|
|
|
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
|
|
|
}
|
2022-11-24 09:02:32 +00:00
|
|
|
}
|
2024-02-17 05:23:51 +00:00
|
|
|
raw_data = json.dumps(proxy_data['data'])
|
2024-02-17 05:40:33 +00:00
|
|
|
|
2024-02-17 05:23:51 +00:00
|
|
|
print("[*] Sending %s bytes..." % (str(len(raw_data))))
|
2024-02-17 05:40:33 +00:00
|
|
|
|
2024-02-17 05:23:51 +00:00
|
|
|
i = 0
|
|
|
|
relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True)
|
2024-02-20 02:03:25 +00:00
|
|
|
buffered = b''
|
2024-02-17 05:23:51 +00:00
|
|
|
for chunk in relay.iter_content(chunk_size=buffer_size):
|
2024-02-18 16:48:15 +00:00
|
|
|
buffered += chunk
|
|
|
|
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
2024-02-18 16:50:43 +00:00
|
|
|
add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
|
2024-02-18 17:05:28 +00:00
|
|
|
raise Exception("Filtered response")
|
2024-02-18 16:48:15 +00:00
|
|
|
conn.send(chunk)
|
2024-02-18 17:26:04 +00:00
|
|
|
if len(buffered) > buffer_size*2:
|
2024-02-20 02:03:25 +00:00
|
|
|
buffered = buffered[-buffer_size*2:]
|
2024-02-17 05:40:33 +00:00
|
|
|
i += 1
|
|
|
|
|
2024-02-17 12:08:02 +00:00
|
|
|
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
|
2022-10-05 17:19:38 +00:00
|
|
|
|
2024-02-17 05:26:27 +00:00
|
|
|
print("[*] Request and received. Done. %s" % (str(addr[0])))
|
2022-10-05 17:19:38 +00:00
|
|
|
conn.close()
|
2022-11-25 08:12:58 +00:00
|
|
|
except Exception as e:
|
2024-02-18 23:06:11 +00:00
|
|
|
#print(traceback.format_exc())
|
2024-02-18 17:05:28 +00:00
|
|
|
print("[*] Exception on requesting the data. Cause: %s" % (str(e)))
|
|
|
|
conn.sendall(b"HTTP/1.1 403 Forbidden\n\n{\"status\":403}")
|
2022-10-05 17:19:38 +00:00
|
|
|
conn.close()
|
|
|
|
|
2024-02-18 08:16:14 +00:00
|
|
|
# journaling a filtered hosts
|
|
|
|
def add_filtered_host(domain, ip_address):
|
|
|
|
hosts_path = './filtered.hosts'
|
2024-02-17 16:49:29 +00:00
|
|
|
with open(hosts_path, 'r') as file:
|
|
|
|
lines = file.readlines()
|
|
|
|
|
|
|
|
domain_exists = any(domain in line for line in lines)
|
|
|
|
if not domain_exists:
|
|
|
|
lines.append(f"{ip_address}\t{domain}\n")
|
|
|
|
with open(hosts_path, 'w') as file:
|
|
|
|
file.writelines(lines)
|
2024-02-19 05:24:47 +00:00
|
|
|
if mastodon_user_token != '': # notify to catswords.social
|
|
|
|
post_status_to_mastodon(f"[{mastodon_server} user]\r\n\r\n{domain} is a domain with suspicious spam activity.\r\n\r\n#catswords")
|
2024-02-18 08:16:14 +00:00
|
|
|
|
2024-02-19 05:24:47 +00:00
|
|
|
# notify to mastodon server
|
|
|
|
def post_status_to_mastodon(text, media_ids=None, poll_options=None, poll_expires_in=None, scheduled_at=None, idempotency_key=None):
|
|
|
|
url = f"https://{mastodon_server}/api/v1/statuses"
|
2024-02-18 08:16:14 +00:00
|
|
|
headers = {
|
|
|
|
"Authorization": f"Bearer {user_token}",
|
|
|
|
"Content-Type": "application/x-www-form-urlencoded",
|
|
|
|
}
|
|
|
|
form_data = {
|
|
|
|
"status": text,
|
|
|
|
"media_ids[]": media_ids,
|
|
|
|
"poll[options][]": poll_options,
|
|
|
|
"poll[expires_in]": poll_expires_in,
|
|
|
|
"scheduled_at": scheduled_at,
|
|
|
|
}
|
|
|
|
if idempotency_key:
|
|
|
|
headers["Idempotency-Key"] = idempotency_key
|
|
|
|
|
|
|
|
response = requests.post(url, headers=headers, data=form_data)
|
|
|
|
return response.json()
|
2024-02-17 16:49:29 +00:00
|
|
|
|
2024-02-20 05:28:21 +00:00
|
|
|
# Strategy: K-Anonymity test - use api.pwnedpasswords.com
|
2024-02-19 05:27:02 +00:00
|
|
|
def pwnedpasswords_test(s):
|
2024-02-18 22:58:44 +00:00
|
|
|
# SHA1 of the password
|
|
|
|
p_sha1 = hashlib.sha1(s.encode()).hexdigest()
|
|
|
|
|
|
|
|
# First 5 char of SHA1 for k-anonymity API use
|
|
|
|
f5_sha1 = p_sha1[:5]
|
|
|
|
|
|
|
|
# Last 5 char of SHA1 to match API output
|
|
|
|
l5_sha1 = p_sha1[-5:]
|
|
|
|
|
|
|
|
# Making GET request using Requests library
|
|
|
|
response = requests.get(f'https://api.pwnedpasswords.com/range/{f5_sha1}')
|
|
|
|
|
|
|
|
# Checking if request was successful
|
|
|
|
if response.status_code == 200:
|
|
|
|
# Parsing response text
|
|
|
|
hashes = response.text.split('\r\n')
|
|
|
|
|
|
|
|
# Using list comprehension to find matching hashes
|
2024-02-19 09:56:41 +00:00
|
|
|
matching_hashes = [line.split(':')[0] for line in hashes if line.endswith(l5_sha1)]
|
2024-02-18 22:58:44 +00:00
|
|
|
|
|
|
|
# If there are matching hashes, return True, else return False
|
|
|
|
return bool(matching_hashes)
|
|
|
|
else:
|
2024-02-20 02:03:25 +00:00
|
|
|
raise Exception("api.pwnedpasswords.com response status: %s" % (str(response.status_code)))
|
|
|
|
|
|
|
|
return False
|
2024-02-18 22:58:44 +00:00
|
|
|
|
2024-02-20 05:48:54 +00:00
|
|
|
# Strategy: Not-CAPTCHA - use truecaptcha.org
|
2024-02-19 07:08:32 +00:00
|
|
|
def truecaptcha_solve(encoded_image):
|
2024-02-19 05:29:41 +00:00
|
|
|
url = 'https://api.apitruecaptcha.org/one/gettext'
|
2024-02-20 02:03:25 +00:00
|
|
|
data = {
|
|
|
|
'userid': truecaptcha_userid,
|
|
|
|
'apikey': truecaptcha_apikey,
|
2024-02-19 07:08:32 +00:00
|
|
|
'data': encoded_image,
|
2024-02-19 05:30:40 +00:00
|
|
|
'mode': 'human'
|
2024-02-19 05:29:41 +00:00
|
|
|
}
|
|
|
|
response = requests.post(url = url, json = data)
|
2024-02-19 07:16:41 +00:00
|
|
|
|
2024-02-20 02:03:25 +00:00
|
|
|
if response.status_code == 200:
|
|
|
|
data = response.json()
|
|
|
|
|
|
|
|
if 'error_message' in data:
|
|
|
|
print ("[*] Error: %s" % (data['error_message']))
|
|
|
|
return None
|
|
|
|
if 'result' in data:
|
|
|
|
return data['result']
|
|
|
|
else:
|
|
|
|
raise Exception("api.apitruecaptcha.org response status: %s" % (str(response.status_code)))
|
2024-02-19 07:16:41 +00:00
|
|
|
|
|
|
|
return None
|
2024-02-19 05:29:41 +00:00
|
|
|
|
2024-02-20 05:48:54 +00:00
|
|
|
# Strategy: VowelRatio10
|
2024-02-20 05:28:21 +00:00
|
|
|
def calculate_vowel_ratio(s):
|
|
|
|
# Calculate the length of the string.
|
|
|
|
length = len(s)
|
|
|
|
if length == 0:
|
|
|
|
return 0.0
|
|
|
|
|
|
|
|
# Count the number of vowels ('a', 'e', 'i', 'o', 'u') in the string.
|
2024-02-20 05:56:50 +00:00
|
|
|
vowel_count = sum(1 for char in s if char.lower() in 'aeiou')
|
2024-02-20 05:28:21 +00:00
|
|
|
|
|
|
|
# Calculate the ratio of vowels to the total length of the string.
|
|
|
|
vowel_ratio = vowel_count / length
|
|
|
|
|
|
|
|
return vowel_ratio
|
|
|
|
|
2024-02-20 09:08:50 +00:00
|
|
|
# Strategy: Palindrome5
|
|
|
|
def has_palindrome(input_string):
|
|
|
|
def is_palindrome(s):
|
|
|
|
return s == s[::-1]
|
|
|
|
|
|
|
|
n = len(input_string)
|
|
|
|
for i in range(n):
|
|
|
|
for j in range(i + 5, n + 1): # Find substrings of at least 5 characters
|
|
|
|
substring = input_string[i:j]
|
|
|
|
if is_palindrome(substring):
|
|
|
|
return True
|
|
|
|
return False
|
|
|
|
|
2022-10-05 17:19:38 +00:00
|
|
|
if __name__== "__main__":
|
|
|
|
start()
|