mirror of
https://github.com/gnh1201/caterpillar.git
synced 2025-06-18 02:59:07 +00:00
Compare commits
276 Commits
Author | SHA1 | Date | |
---|---|---|---|
f493d026b8 | |||
f7256c674a | |||
feee46aabd | |||
02554d75a9 | |||
4eea005aa0 | |||
b7f9b96bc4 | |||
75dba7093a | |||
caf0afa73a | |||
4f46d3e75f | |||
6d368eb9e6 | |||
cb215bb423 | |||
efb2401a66 | |||
f2ead73592 | |||
cc41ac4a2e | |||
479eb560da | |||
768dad77cf | |||
3b24c6c209 | |||
54c6f6f33e | |||
4486c1d411 | |||
7efc6612c7 | |||
d34f68c8a1 | |||
0c634f6da0 | |||
ddac81a036 | |||
03b2315211 | |||
a97160f9a0 | |||
895cc03d31 | |||
10c91d5045 | |||
51de2628bf | |||
376fd71b07 | |||
0bfc39a5e9 | |||
be5496aa16 | |||
02befd1c17 | |||
9926e1564d | |||
18ec101d84 | |||
d7cc297a80 | |||
24e05065f1 | |||
9a6b68cf9a | |||
359d070b54 | |||
09c542431d | |||
604a4d7886 | |||
bb0710b723 | |||
3977d0c719 | |||
3d0f55c1ee | |||
297f0595f7 | |||
e3b5a344e3 | |||
78eb86800c | |||
8803fb7f05 | |||
0c0cbd5247 | |||
8d22483225 | |||
40a42c2811 | |||
286d75642a | |||
e783b641be | |||
660cfb3818 | |||
2ddef30daf | |||
cac5b29280 | |||
5960bb5732 | |||
3ffc8ca29c | |||
85f2b19b46 | |||
9cc6bb3b08 | |||
69a3c5f323 | |||
943ff478aa | |||
50265ad56b | |||
a50edb3c77 | |||
1ebbd96340 | |||
0ad61d0a30 | |||
65f387dbeb | |||
d592109de4 | |||
fbe2f6fa87 | |||
b0cc2652ba | |||
f343020ae6 | |||
d8dd92f9c0 | |||
ed91362515 | |||
c19a38a008 | |||
5567325620 | |||
75aec1d8bf | |||
0791e79be9 | |||
fedfc5f041 | |||
2f828252c5 | |||
09ac94bf00 | |||
ff381b8e3e | |||
7810e85dec | |||
1c77b640dd | |||
564d3dba03 | |||
006b1b17bd | |||
add701f92d | |||
be2f38d276 | |||
a1abaee646 | |||
36804b3763 | |||
ea0a24ee5f | |||
6c16083d9b | |||
7e63b0b00b | |||
6c0d5193a6 | |||
e79a7cf68a | |||
e067afc735 | |||
9f069b48e6 | |||
18738fe80b | |||
99f960307d | |||
3af8879adb | |||
965423addb | |||
549cc9a8f9 | |||
20ddfbbcbb | |||
441fd81a0e | |||
5efe392ace | |||
05b51f7e7f | |||
0759dbffaf | |||
44425dbb8b | |||
08212459eb | |||
ef72ba9296 | |||
2fa3f1471f | |||
a71b6023ae | |||
dc65f9a827 | |||
e75a5a4b2d | |||
0e936a044d | |||
a6fd4515f1 | |||
66b73730dc | |||
5a11042f7f | |||
d473dd569c | |||
22dcce06ab | |||
1a65c9fdb8 | |||
bf8635c8b7 | |||
0c3f32d4f8 | |||
db9454a568 | |||
94252ba409 | |||
acc6393658 | |||
a7371b1fa2 | |||
0d543d2da9 | |||
7472260de7 | |||
2fb49ccf5f | |||
796123f83b | |||
1d43b64ce9 | |||
071e768c53 | |||
a6ea467f6c | |||
bd2e017598 | |||
447b152f85 | |||
3de3620b1f | |||
eafb738ad2 | |||
539b7c3b58 | |||
a376b8084d | |||
56c8c62aa6 | |||
58e7322555 | |||
c272efe8b1 | |||
a0775bd15a | |||
f81d2f4649 | |||
579a7fe89c | |||
0466dffb07 | |||
d5b65c71b1 | |||
c0ac6151c2 | |||
ea10dd83fd | |||
0b1bfadd8a | |||
5618186699 | |||
eb701292ce | |||
f01c5d26a0 | |||
bc08241aa2 | |||
![]() |
6b99ee97ce | ||
![]() |
910e5e4ed5 | ||
![]() |
93e0b4edd9 | ||
9c2b66fb07 | |||
44d68203fe | |||
ea379fb750 | |||
148e9a20cf | |||
d1ba38ca0a | |||
b5cd9d79ab | |||
2314327358 | |||
b10a58f502 | |||
0c52169f7a | |||
0daa8840ef | |||
08d60f4248 | |||
889f21d484 | |||
fe8738a2a4 | |||
3648be7e94 | |||
7f644eed54 | |||
1fcebe78b4 | |||
ec4d38ed6f | |||
f937a0314b | |||
3ec236e955 | |||
3a5ed1d983 | |||
![]() |
33b7e075c5 | ||
e18e288beb | |||
fbe0d7f1e2 | |||
0d1eea08eb | |||
b47c89db14 | |||
6fa63100b7 | |||
6353cb69ad | |||
![]() |
b13a55a18b | ||
![]() |
bdd6615670 | ||
724f9f071e | |||
c23d2adefa | |||
b845fe9356 | |||
6fa48ac64b | |||
cccae65676 | |||
7139092c12 | |||
67dc16d976 | |||
486b12f643 | |||
![]() |
13494e285b | ||
7abc36d66f | |||
feb7cff398 | |||
5bff160d17 | |||
![]() |
4be3fa4df8 | ||
d7acbf42f0 | |||
![]() |
27bb1616c2 | ||
7b2d3529f5 | |||
68ef47b569 | |||
52b0949ce1 | |||
b5c8cc7b87 | |||
1d39e8a3b6 | |||
ce3c6e7623 | |||
43c3ff3466 | |||
9bf5078294 | |||
c206ee99e5 | |||
1832801918 | |||
f25ed75eb1 | |||
1414824f86 | |||
6de1888077 | |||
e2442a6290 | |||
77ae320f40 | |||
1a8022df73 | |||
57ed60fd01 | |||
f1d2d58374 | |||
60bcc14a93 | |||
afc974ae37 | |||
67160232d0 | |||
7078a1a3b8 | |||
9fa17e8c7b | |||
7fdf83be9d | |||
![]() |
8f38f3d5de | ||
![]() |
391fc021d6 | ||
![]() |
4d97e006e5 | ||
caca4e3f65 | |||
aa009b5a27 | |||
fffac9dcb7 | |||
![]() |
1b47fb744a | ||
4d3af6c128 | |||
e262348e75 | |||
![]() |
e9aba0f803 | ||
2072b06dd3 | |||
5d4d70a33a | |||
![]() |
c527b1d831 | ||
b3e4165fab | |||
1fdb788e0c | |||
83b46d3ede | |||
a9783c6081 | |||
f5caf1cac7 | |||
![]() |
cd7350655b | ||
1064dc017b | |||
810d5041cb | |||
16bbddcd94 | |||
d3f3b423c6 | |||
bbb8c7fe55 | |||
2d2e54cd2d | |||
0b94de24e9 | |||
2e23938ca7 | |||
e539e3e670 | |||
c087312455 | |||
d0b1cc2bf5 | |||
3f185a237f | |||
bf8ea7be95 | |||
352fc3229f | |||
e72e835f7d | |||
dc995854ba | |||
9f8d221aea | |||
e27f5c34ab | |||
f214120c1c | |||
6e4413a010 | |||
f953341330 | |||
![]() |
9133f75c38 | ||
32af8bd701 | |||
823c97015f | |||
7d5d997881 | |||
4bef7a2417 | |||
![]() |
001852956c | ||
96f77b956f | |||
a73ff414c2 | |||
65d5e26c1e | |||
b8fa3e6722 | |||
0c393a1338 | |||
243cadd5d0 |
13
.env.example
Normal file
13
.env.example
Normal file
|
@ -0,0 +1,13 @@
|
||||||
|
[settings]
|
||||||
|
PORT=5555
|
||||||
|
SERVER_URL=localhost
|
||||||
|
SERVER_CONNECTION_TYPE=proxy
|
||||||
|
CA_KEY=ca.key
|
||||||
|
CA_CERT=ca.crt
|
||||||
|
CERT_KEY=cert.key
|
||||||
|
CERT_DIR=certs/
|
||||||
|
#OPENSSL_BINPATH=openssl
|
||||||
|
CLIENT_ENCODING=utf-8
|
||||||
|
USE_EXTENSIONS=wayback.Wayback,bio.PyBio,alwaysonline.AlwaysOnline
|
||||||
|
ES_HOST=http://127.0.0.1:9200
|
||||||
|
ES_INDEX=alwaysonline
|
8
.github/FUNDING.yml
vendored
8
.github/FUNDING.yml
vendored
|
@ -1,8 +1,2 @@
|
||||||
# These are supported funding model platforms
|
|
||||||
|
|
||||||
github: gnh1201
|
github: gnh1201
|
||||||
open_collective: welsonjs
|
custom: ['https://gnh1201.link']
|
||||||
liberapay: catswords
|
|
||||||
custom: ['https://www.buymeacoffee.com/catswords', 'https://toss.me/catswords']
|
|
||||||
patreon: catswords # Replace with a single Patreon username
|
|
||||||
ko_fi: catswords
|
|
||||||
|
|
23
.github/workflows/llm-code-review.yml
vendored
Normal file
23
.github/workflows/llm-code-review.yml
vendored
Normal file
|
@ -0,0 +1,23 @@
|
||||||
|
name: AI Code Review
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
types: [opened, synchronize, reopened]
|
||||||
|
issues:
|
||||||
|
types: [opened, reopened]
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
repofix:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Run RepoFixAI
|
||||||
|
uses: Manav916/llm-code-review@main
|
||||||
|
with:
|
||||||
|
groq_api_key: ${{ secrets.GROQ_API_KEY }}
|
||||||
|
groq_model: 'gemma-2-9b-it'
|
||||||
|
github_token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
# exclude_extensions: 'txt'
|
||||||
|
repo_owner: ${{ github.repository_owner }}
|
||||||
|
repo_name: ${{ github.event.repository.name }}
|
||||||
|
event_number: ${{ github.event.number || github.event.issue.number }} # when listening for both pull requests and issues
|
||||||
|
event_name: ${{ github.event_name }}
|
175
.gitignore
vendored
175
.gitignore
vendored
|
@ -1,4 +1,179 @@
|
||||||
certs/
|
certs/
|
||||||
savedfiles/
|
savedfiles/
|
||||||
|
logs/
|
||||||
settings.ini
|
settings.ini
|
||||||
.env
|
.env
|
||||||
|
*.crt
|
||||||
|
*.key
|
||||||
|
|
||||||
|
### Python ###
|
||||||
|
# Byte-compiled / optimized / DLL files
|
||||||
|
__pycache__/
|
||||||
|
*.py[cod]
|
||||||
|
*$py.class
|
||||||
|
|
||||||
|
# C extensions
|
||||||
|
*.so
|
||||||
|
|
||||||
|
# Distribution / packaging
|
||||||
|
.Python
|
||||||
|
build/
|
||||||
|
develop-eggs/
|
||||||
|
dist/
|
||||||
|
downloads/
|
||||||
|
eggs/
|
||||||
|
.eggs/
|
||||||
|
lib/
|
||||||
|
lib64/
|
||||||
|
parts/
|
||||||
|
sdist/
|
||||||
|
var/
|
||||||
|
wheels/
|
||||||
|
share/python-wheels/
|
||||||
|
*.egg-info/
|
||||||
|
.installed.cfg
|
||||||
|
*.egg
|
||||||
|
MANIFEST
|
||||||
|
|
||||||
|
# PyInstaller
|
||||||
|
# Usually these files are written by a python script from a template
|
||||||
|
# before PyInstaller builds the exe, so as to inject date/other infos into it.
|
||||||
|
*.manifest
|
||||||
|
*.spec
|
||||||
|
|
||||||
|
# Installer logs
|
||||||
|
pip-log.txt
|
||||||
|
pip-delete-this-directory.txt
|
||||||
|
|
||||||
|
# Unit test / coverage reports
|
||||||
|
htmlcov/
|
||||||
|
.tox/
|
||||||
|
.nox/
|
||||||
|
.coverage
|
||||||
|
.coverage.*
|
||||||
|
.cache
|
||||||
|
nosetests.xml
|
||||||
|
coverage.xml
|
||||||
|
*.cover
|
||||||
|
*.py,cover
|
||||||
|
.hypothesis/
|
||||||
|
.pytest_cache/
|
||||||
|
cover/
|
||||||
|
|
||||||
|
# Translations
|
||||||
|
*.mo
|
||||||
|
*.pot
|
||||||
|
|
||||||
|
# Django stuff:
|
||||||
|
*.log
|
||||||
|
local_settings.py
|
||||||
|
db.sqlite3
|
||||||
|
db.sqlite3-journal
|
||||||
|
|
||||||
|
# Flask stuff:
|
||||||
|
instance/
|
||||||
|
.webassets-cache
|
||||||
|
|
||||||
|
# Scrapy stuff:
|
||||||
|
.scrapy
|
||||||
|
|
||||||
|
# Sphinx documentation
|
||||||
|
docs/_build/
|
||||||
|
|
||||||
|
# PyBuilder
|
||||||
|
.pybuilder/
|
||||||
|
target/
|
||||||
|
|
||||||
|
# Jupyter Notebook
|
||||||
|
.ipynb_checkpoints
|
||||||
|
|
||||||
|
# IPython
|
||||||
|
profile_default/
|
||||||
|
ipython_config.py
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
# For a library or package, you might want to ignore these files since the code is
|
||||||
|
# intended to run in multiple environments; otherwise, check them in:
|
||||||
|
# .python-version
|
||||||
|
|
||||||
|
# pipenv
|
||||||
|
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
|
||||||
|
# However, in case of collaboration, if having platform-specific dependencies or dependencies
|
||||||
|
# having no cross-platform support, pipenv may install dependencies that don't work, or not
|
||||||
|
# install all needed dependencies.
|
||||||
|
#Pipfile.lock
|
||||||
|
|
||||||
|
# poetry
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
|
||||||
|
# This is especially recommended for binary packages to ensure reproducibility, and is more
|
||||||
|
# commonly ignored for libraries.
|
||||||
|
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
|
||||||
|
#poetry.lock
|
||||||
|
|
||||||
|
# pdm
|
||||||
|
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
|
||||||
|
#pdm.lock
|
||||||
|
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
|
||||||
|
# in version control.
|
||||||
|
# https://pdm.fming.dev/#use-with-ide
|
||||||
|
.pdm.toml
|
||||||
|
|
||||||
|
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
|
||||||
|
__pypackages__/
|
||||||
|
|
||||||
|
# Celery stuff
|
||||||
|
celerybeat-schedule
|
||||||
|
celerybeat.pid
|
||||||
|
|
||||||
|
# SageMath parsed files
|
||||||
|
*.sage.py
|
||||||
|
|
||||||
|
# Environments
|
||||||
|
.env
|
||||||
|
.venv
|
||||||
|
env/
|
||||||
|
venv/
|
||||||
|
ENV/
|
||||||
|
env.bak/
|
||||||
|
venv.bak/
|
||||||
|
|
||||||
|
# Spyder project settings
|
||||||
|
.spyderproject
|
||||||
|
.spyproject
|
||||||
|
|
||||||
|
# Rope project settings
|
||||||
|
.ropeproject
|
||||||
|
|
||||||
|
# mkdocs documentation
|
||||||
|
/site
|
||||||
|
|
||||||
|
# mypy
|
||||||
|
.mypy_cache/
|
||||||
|
.dmypy.json
|
||||||
|
dmypy.json
|
||||||
|
|
||||||
|
# Pyre type checker
|
||||||
|
.pyre/
|
||||||
|
|
||||||
|
# pytype static type analyzer
|
||||||
|
.pytype/
|
||||||
|
|
||||||
|
# Cython debug symbols
|
||||||
|
cython_debug/
|
||||||
|
|
||||||
|
# PyCharm
|
||||||
|
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
|
||||||
|
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
|
||||||
|
# and can be added to the global gitignore or merged into this file. For a more nuclear
|
||||||
|
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
|
||||||
|
#.idea/
|
||||||
|
|
||||||
|
### Python Patch ###
|
||||||
|
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
|
||||||
|
poetry.toml
|
||||||
|
|
||||||
|
# ruff
|
||||||
|
.ruff_cache/
|
||||||
|
|
||||||
|
# LSP config files
|
||||||
|
pyrightconfig.json
|
3
.gitmodules
vendored
Normal file
3
.gitmodules
vendored
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[submodule "plugins"]
|
||||||
|
path = plugins
|
||||||
|
url = https://github.com/gnh1201/caterpillar-plugins
|
77
README.md
77
README.md
|
@ -1,23 +1,43 @@
|
||||||
# gnh1201/caterpillar
|
# Caterpillar Proxy (Songchoongi Project)
|
||||||
Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
|
|
||||||
|
|
||||||

|
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fgnh1201%2Fcaterpillar?ref=badge_shield)
|
||||||
|
[](https://doi.org/10.5281/zenodo.13346533)
|
||||||
|
[](#)
|
||||||
|
[](https://www.slideshare.net/slideshow/2024-caterpillar-project-in-2024-korea-oss-contest/273031732)
|
||||||
|
[](https://discord.gg/9VVTHpfsVW)
|
||||||
|
[](https://github.com/gnh1201/welsonjs/discussions/167)
|
||||||
|
|
||||||
|
Caterpillar Proxy (Songchoongi Project) - The simple web debugging proxy (formerly, php-httpproxy)
|
||||||
|
|
||||||
|

|
||||||
|
|
||||||
|
You can connect all physical and logical channels with communication capabilities to the web!
|
||||||
|
|
||||||
|
Imagine various means such as time machines, satellites, quantum technology, sound, light, the Philosopher's Stone, or Excalibur, just like in science fiction movies! Caterpillar Proxy supports the implementation of extensions for Connectors, Filters, and RPC methods to bring your imagination to life.
|
||||||
|
|
||||||
|
:rocket: [Open the Caterpillar Proxy Web Console](https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/console.html)
|
||||||
|
|
||||||
## Use cases
|
## Use cases
|
||||||
* [Build a network tunnel using Python and the LAMP(PHP) stack.](https://qiita.com/gnh1201/items/40f9350ca6d308def6d4)
|
* [Build a network tunnel using Python and the LAMP(PHP) stack (qiita.com)](https://qiita.com/gnh1201/items/40f9350ca6d308def6d4)
|
||||||
* [K-Anonymity for Spam Filtering: Case with Mastodon, and Misskey](https://qiita.com/gnh1201/items/09f4081f84610db3a9d3)
|
* [K-Anonymity for Spam Filtering: Case with Mastodon, and Misskey (qiita.com)](https://qiita.com/gnh1201/items/09f4081f84610db3a9d3)
|
||||||
* [File Upload Attack Test with Caterpillar Proxy](https://youtu.be/sPZOCgYtLRw)
|
* [File Upload Vulnerability Attack Test (Caterpillar Proxy) (youtu.be) ](https://youtu.be/sPZOCgYtLRw)
|
||||||
|
* [Real-time processing of emergency disaster sensor data (e.g., fire detection).](https://catswords.social/@catswords_oss/114016647285923011)
|
||||||
|
|
||||||
## How it works
|
## How it works
|
||||||
|
|
||||||
### Basic structure
|
### Basic structure
|
||||||
```
|
```
|
||||||
You <-> Proxy client (Python) <-> Parasitized proxy server (Optional, PHP) <-> On the Web
|
* You <-> Proxy client (Python) <-> Parasitized proxy server (Optional, PHP/LAMP) <-> On the Web
|
||||||
|
* You <-> Proxy client (Python) <-> Connector extensions (Optional, Python) <-> On the Web
|
||||||
```
|
```
|
||||||
|
|
||||||
For example, build a simple web debugging proxy on the shared servers.
|
For example, build a simple web debugging proxy on the shared servers.
|
||||||
|
|
||||||
### Stateful mode
|
### Stateful mode
|
||||||
This project supports two modes of connection. The default is stateless. You can use the stateful mode to avoid being constrained by transfer capacity limits. See the [Stateful mode (github.com/gnh1201/caterpillar wiki)](https://github.com/gnh1201/caterpillar/wiki/Stateful-mode).
|
This project supports two modes of connection. The default is stateless. You can use the stateful mode to avoid being constrained by transfer capacity limits. See the [Stateful mode (catswords-oss.rdbl.io)](https://catswords-oss.rdbl.io/1155378128/5211324242).
|
||||||
|
|
||||||
|
### Connector extensions
|
||||||
|
This project supports the implementation of Connector extensions. The provided basic examples include implementations of web archives (caches) and serial communication as Connector extensions. Go to the [caterpillar-plugins repository (github.com)](https://github.com/gnh1201/caterpillar-plugins)
|
||||||
|
|
||||||
## (Optional) Before to use
|
## (Optional) Before to use
|
||||||
If you have a server that ***will be parasitized*** and you want to proxy it, you should upload the `index.php` file to a shared server. The index.php file is located in the `assets/php` directory within this repository.
|
If you have a server that ***will be parasitized*** and you want to proxy it, you should upload the `index.php` file to a shared server. The index.php file is located in the `assets/php` directory within this repository.
|
||||||
|
@ -27,17 +47,21 @@ If you have a server that ***will be parasitized*** and you want to proxy it, yo
|
||||||
|
|
||||||
```
|
```
|
||||||
[settings]
|
[settings]
|
||||||
|
CONNECTION_TIMEOUT=1
|
||||||
PORT=5555
|
PORT=5555
|
||||||
SERVER_URL=http://example.org
|
SERVER_URL=localhost
|
||||||
SERVER_CONNECTION_TYPE=stateless
|
SERVER_CONNECTION_TYPE=
|
||||||
CA_KEY=ca.key
|
CA_KEY=ca.key
|
||||||
CA_CERT=ca.crt
|
CA_CERT=ca.crt
|
||||||
CERT_KEY=cert.key
|
CERT_KEY=cert.key
|
||||||
CERT_DIR=certs/
|
CERT_DIR=certs/
|
||||||
OPENSSL_BINPATH=openssl
|
OPENSSL_BINPATH=openssl
|
||||||
CLIENT_ENCODING=utf-8
|
CLIENT_ENCODING=utf-8
|
||||||
|
USE_EXTENSIONS=wayback.Wayback,bio.PyBio
|
||||||
```
|
```
|
||||||
|
|
||||||
|
***Note***: If using Caterpillar Proxy (Python) alone, set `SERVER_URL=localhost`. Otherwise, use the endpoint URL of the Worker script (PHP or Java), e.g., `SERVER_URL=http://example.org`.
|
||||||
|
|
||||||
- (Optional) Create a certificate for SSL decryption
|
- (Optional) Create a certificate for SSL decryption
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
|
@ -57,14 +81,35 @@ sudo update-ca-certificates
|
||||||
4. (Optional) With [Cloudflare](https://cloudflare.com), we can expect to accelerate the 4x speed and reduce the network stuck.
|
4. (Optional) With [Cloudflare](https://cloudflare.com), we can expect to accelerate the 4x speed and reduce the network stuck.
|
||||||
|
|
||||||
## Extensions
|
## Extensions
|
||||||
* [Web Console Available](https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/console.html)
|
* [Web Console](https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/console.html)
|
||||||
* Fediverse (e.g., Mastodon): See the [Fediverse (github.com/gnh1201/caterpillar wiki)](https://github.com/gnh1201/caterpillar/wiki/Fediverse).
|
* Fediverse (e.g., Mastodon): See the [Fediverse (catswords-oss.rdbl.io)](https://catswords-oss.rdbl.io/1155378128/3821602484).
|
||||||
* Wayback (Private browsing with Google or Wayback cache): See the [Wayback (github.com/gnh1201/caterpillar wiki)](https://github.com/gnh1201/caterpillar/wiki/Wayback).
|
* Wayback (Private browsing with Google or Wayback cache): See the [Wayback (catswords-oss.rdbl.io)](https://catswords-oss.rdbl.io/1155378128/6994492654)
|
||||||
|
|
||||||
## Thanks to
|
## Thanks to
|
||||||
* Pan Art by [@yeohangdang@i.peacht.art](#): 
|
* Pan Art by [@yeohangdang@i.peacht.art](#): [Image File](assets/img/logo.png)
|
||||||
* [GitHub Sponsors](https://github.com/sponsors/gnh1201)
|
* [GitHub Sponsors](https://github.com/sponsors/gnh1201)
|
||||||
|
|
||||||
|
## Contributors
|
||||||
|
<a href="https://github.com/gnh1201/caterpillar/graphs/contributors">
|
||||||
|
<img src="https://contrib.rocks/image?repo=gnh1201/caterpillar" alt="Contributors" />
|
||||||
|
</a>
|
||||||
|
|
||||||
|
## Our roadmap
|
||||||
|

|
||||||
|
|
||||||
## Report abuse
|
## Report abuse
|
||||||
* ActivityPub [@gnh1201@catswords.social](https://catswords.social/@gnh1201)
|
- abuse@catswords.net
|
||||||
* abuse@catswords.net
|
- [GitHub Security Advisories (gnh1201/caterpillar)](https://github.com/gnh1201/caterpillar/security)
|
||||||
|
|
||||||
|
## Join the community
|
||||||
|
- ActivityPub [@catswords_oss@catswords.social](https://catswords.social/@catswords_oss)
|
||||||
|
- XMPP [catswords@conference.omemo.id](xmpp:catswords@conference.omemo.id?join)
|
||||||
|
- [Join Catswords OSS on Microsoft Teams (teams.live.com)](https://teams.live.com/l/community/FEACHncAhq8ldnojAI)
|
||||||
|
- [Join Catswords OSS #caterpillar on Discord (discord.gg)](https://discord.gg/9VVTHpfsVW)
|
||||||
|
|
||||||
|
## Special channels
|
||||||
|
- [A paid consultation channel (m.expert.naver.com)](https://m.expert.naver.com/mobile/expert/product/detail?storeId=100051156&productId=100144540) is available for Korean (한국어) region.
|
||||||
|
- [Join the private operations channel (forms.gle)](https://forms.gle/ZKAAaGTiGamksHoo8) is available for all regions.
|
||||||
|
|
||||||
|
## License
|
||||||
|
[](https://app.fossa.com/projects/git%2Bgithub.com%2Fgnh1201%2Fcaterpillar?ref=badge_large)
|
||||||
|
|
BIN
assets/img/cover.png
Normal file
BIN
assets/img/cover.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 2.3 MiB |
Before Width: | Height: | Size: 143 KiB After Width: | Height: | Size: 143 KiB |
BIN
assets/img/roadmap.png
Normal file
BIN
assets/img/roadmap.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 500 KiB |
|
@ -1,182 +0,0 @@
|
||||||
// https://github.com/gnh1201/caterpillar
|
|
||||||
|
|
||||||
const express = require('express');
|
|
||||||
const bodyParser = require('body-parser');
|
|
||||||
const net = require('net');
|
|
||||||
const tls = require('tls');
|
|
||||||
|
|
||||||
const DEFAULT_SOCKET_TIMEOUT = 1000; // milliseconds
|
|
||||||
const STATEFUL_SOCKET_TIMEOUT = 30000; // milliseconds
|
|
||||||
|
|
||||||
const app = express();
|
|
||||||
const port = 3000; // listening port number
|
|
||||||
|
|
||||||
app.use(bodyParser.json());
|
|
||||||
|
|
||||||
function jsonrpc2_encode(method, params, id = '') {
|
|
||||||
const data = {
|
|
||||||
jsonrpc: '2.0',
|
|
||||||
method: method,
|
|
||||||
params: params,
|
|
||||||
id: id
|
|
||||||
};
|
|
||||||
return JSON.stringify(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
function jsonrpc2_error_encode(error, id = '') {
|
|
||||||
const data = {
|
|
||||||
jsonrpc: '2.0',
|
|
||||||
error: error,
|
|
||||||
id: id
|
|
||||||
};
|
|
||||||
return JSON.stringify(data);
|
|
||||||
}
|
|
||||||
|
|
||||||
function read_from_remote_server(remote_address, remote_port, scheme, data = null, conn = null, buffer_size = 8192, id = '') {
|
|
||||||
const sock = scheme === "https" || scheme === "ssl" || scheme === "tls"
|
|
||||||
? tls.connect(remote_port, remote_address)
|
|
||||||
: net.connect(remote_port, remote_address);
|
|
||||||
|
|
||||||
sock.on('error', error => {
|
|
||||||
const err = {
|
|
||||||
status: 502,
|
|
||||||
code: error.code,
|
|
||||||
message: error.message
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!conn) {
|
|
||||||
console.log(jsonrpc2_error_encode(err, id));
|
|
||||||
} else {
|
|
||||||
let buf = `HTTP/1.1 502 Bad Gateway\r\n\r\n`;
|
|
||||||
buf += jsonrpc2_error_encode(err, id);
|
|
||||||
conn.write(buf);
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
sock.on('connect', () => {
|
|
||||||
if (!conn) {
|
|
||||||
sock.write(data);
|
|
||||||
|
|
||||||
sock.on('data', buf => {
|
|
||||||
console.log(buf.toString());
|
|
||||||
});
|
|
||||||
} else {
|
|
||||||
conn.on('data', buf => {
|
|
||||||
sock.write(buf);
|
|
||||||
});
|
|
||||||
|
|
||||||
sock.on('data', buf => {
|
|
||||||
conn.write(buf);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
sock.on('end', () => {
|
|
||||||
sock.end();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function relay_request(params, id = '') {
|
|
||||||
const { buffer_size, request_data, request_length, client_address, client_port, client_encoding, remote_address, remote_port, scheme, datetime } = params;
|
|
||||||
|
|
||||||
const request_header = parse_headers(Buffer.from(request_data, 'base64').toString());
|
|
||||||
|
|
||||||
switch (request_header['@method'][0]) {
|
|
||||||
case 'CONNECT':
|
|
||||||
const err = {
|
|
||||||
status: 405,
|
|
||||||
code: -1,
|
|
||||||
message: "Method Not Allowed"
|
|
||||||
};
|
|
||||||
console.log(jsonrpc2_error_encode(err, id));
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
read_from_remote_server(remote_address, remote_port, scheme, Buffer.from(request_data, 'base64'), null, buffer_size, id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
function relay_connect(params, id = '') {
|
|
||||||
const { buffer_size, client_address, client_port, client_encoding, remote_address, remote_port, scheme, datetime } = params;
|
|
||||||
|
|
||||||
const starttime = Date.now();
|
|
||||||
const sock = net.connect(client_port, client_address);
|
|
||||||
|
|
||||||
sock.on('error', error => {
|
|
||||||
const err = {
|
|
||||||
status: 502,
|
|
||||||
code: error.code,
|
|
||||||
message: error.message,
|
|
||||||
_params: params
|
|
||||||
};
|
|
||||||
console.log(jsonrpc2_error_encode(err, id));
|
|
||||||
});
|
|
||||||
|
|
||||||
sock.on('connect', () => {
|
|
||||||
const stoptime = Date.now();
|
|
||||||
const connection_speed = Math.floor((stoptime - starttime));
|
|
||||||
const data = jsonrpc2_encode("relay_accept", {
|
|
||||||
success: true,
|
|
||||||
connection_speed: connection_speed
|
|
||||||
}, id);
|
|
||||||
sock.write(data + '\r\n\r\n');
|
|
||||||
|
|
||||||
read_from_remote_server(remote_address, remote_port, scheme, null, sock, buffer_size, id);
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
function parse_headers(str) {
|
|
||||||
const headers = {};
|
|
||||||
|
|
||||||
const lines = str.split(/\r?\n/);
|
|
||||||
|
|
||||||
const first_line = lines.shift();
|
|
||||||
headers['@method'] = first_line.split(' ');
|
|
||||||
|
|
||||||
lines.forEach(line => {
|
|
||||||
const match = line.match(/^([^:]+):(.*)$/);
|
|
||||||
if (match) {
|
|
||||||
headers[match[1]] = match[2].trim();
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
return headers;
|
|
||||||
}
|
|
||||||
|
|
||||||
function get_client_address(req, res) {
|
|
||||||
const client_address = req.ip;
|
|
||||||
const response = {
|
|
||||||
client_address: client_address
|
|
||||||
};
|
|
||||||
res.json(response);
|
|
||||||
}
|
|
||||||
|
|
||||||
app.post('/', (req, res) => {
|
|
||||||
const context = req.body;
|
|
||||||
if (context.jsonrpc === '2.0') {
|
|
||||||
const method = context.method;
|
|
||||||
switch (method) {
|
|
||||||
case 'relay_request':
|
|
||||||
relay_request(context.params, context.id);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case 'relay_connect':
|
|
||||||
relay_connect(context.params, context.id);
|
|
||||||
break;
|
|
||||||
|
|
||||||
case 'get_client_address':
|
|
||||||
get_client_address(req, res);
|
|
||||||
break;
|
|
||||||
|
|
||||||
default:
|
|
||||||
res.status(400).send('Invalid method');
|
|
||||||
break;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
res.status(400).send('Invalid JSON-RPC version');
|
|
||||||
}
|
|
||||||
});
|
|
||||||
|
|
||||||
app.listen(port, () => {
|
|
||||||
console.log(`Server is running on port ${port}`);
|
|
||||||
});
|
|
|
@ -1,204 +0,0 @@
|
||||||
# https://github.com/gnh1201/caterpillar
|
|
||||||
|
|
||||||
use JSON;
|
|
||||||
use IO::Socket::INET;
|
|
||||||
use IO::Socket::SSL;
|
|
||||||
use Time::HiRes qw(time);
|
|
||||||
|
|
||||||
use constant DEFAULT_SOCKET_TIMEOUT => 1;
|
|
||||||
use constant STATEFUL_SOCKET_TIMEOUT => 30;
|
|
||||||
|
|
||||||
sub jsonrpc2_encode {
|
|
||||||
my ($method, $params, $id) = @_;
|
|
||||||
my $data = {
|
|
||||||
jsonrpc => "2.0",
|
|
||||||
method => $method,
|
|
||||||
params => $params,
|
|
||||||
id => $id
|
|
||||||
};
|
|
||||||
return encode_json($data);
|
|
||||||
}
|
|
||||||
|
|
||||||
sub jsonrpc2_result_encode {
|
|
||||||
my ($result, $id) = @_;
|
|
||||||
my $data = {
|
|
||||||
jsonrpc => "2.0",
|
|
||||||
result => $result,
|
|
||||||
id => $id
|
|
||||||
};
|
|
||||||
return encode_json($data);
|
|
||||||
}
|
|
||||||
|
|
||||||
sub jsonrpc2_error_encode {
|
|
||||||
my ($error, $id) = @_;
|
|
||||||
my $data = {
|
|
||||||
jsonrpc => "2.0",
|
|
||||||
error => $error,
|
|
||||||
id => $id
|
|
||||||
};
|
|
||||||
return encode_json($data);
|
|
||||||
}
|
|
||||||
|
|
||||||
sub parse_headers {
|
|
||||||
my ($str) = @_;
|
|
||||||
my %headers;
|
|
||||||
|
|
||||||
my @lines = split(/\r?\n/, $str);
|
|
||||||
|
|
||||||
my $first_line = shift(@lines);
|
|
||||||
$headers{'@method'} = [split(' ', $first_line)];
|
|
||||||
|
|
||||||
foreach my $line (@lines) {
|
|
||||||
if ($line =~ /^([^:]+):(.*)$/) {
|
|
||||||
$headers{$1} = trim($2);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return \%headers;
|
|
||||||
}
|
|
||||||
|
|
||||||
sub read_from_remote_server {
|
|
||||||
my ($remote_address, $remote_port, $scheme, $data, $conn, $buffer_size, $id) = @_;
|
|
||||||
my $sock;
|
|
||||||
if ($scheme ~~ ["https", "ssl", "tls"]) {
|
|
||||||
$sock = IO::Socket::SSL->new(
|
|
||||||
PeerAddr => $remote_address,
|
|
||||||
PeerPort => $remote_port,
|
|
||||||
SSL_verify_mode => 0, # You may adjust SSL options as needed
|
|
||||||
Timeout => DEFAULT_SOCKET_TIMEOUT
|
|
||||||
);
|
|
||||||
} else {
|
|
||||||
$sock = IO::Socket::INET->new(
|
|
||||||
PeerAddr => $remote_address,
|
|
||||||
PeerPort => $remote_port,
|
|
||||||
Proto => 'tcp',
|
|
||||||
Timeout => DEFAULT_SOCKET_TIMEOUT
|
|
||||||
);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!$sock) {
|
|
||||||
my $error = {
|
|
||||||
status => 502,
|
|
||||||
code => $!,
|
|
||||||
message => $@
|
|
||||||
};
|
|
||||||
|
|
||||||
if (!$conn) {
|
|
||||||
print jsonrpc2_error_encode($error, $id);
|
|
||||||
} else {
|
|
||||||
my $buf = sprintf("HTTP/1.1 502 Bad Gateway\r\n\r\n");
|
|
||||||
$buf .= jsonrpc2_error_encode($error, $id);
|
|
||||||
print $conn $buf;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
if (!$conn) {
|
|
||||||
# send data
|
|
||||||
print $sock $data;
|
|
||||||
|
|
||||||
# receive data
|
|
||||||
my $buf;
|
|
||||||
while (!eof($sock) && defined($buf = <$sock>)) {
|
|
||||||
print $buf;
|
|
||||||
}
|
|
||||||
} else {
|
|
||||||
# send data
|
|
||||||
my $buf;
|
|
||||||
while (!eof($conn) && defined($buf = <$conn>)) {
|
|
||||||
print $sock $buf;
|
|
||||||
}
|
|
||||||
|
|
||||||
# receive data
|
|
||||||
$buf = "";
|
|
||||||
while (!eof($sock) && defined($buf = <$sock>)) {
|
|
||||||
print $conn $buf;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
close($sock);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sub relay_request {
|
|
||||||
my ($params, $id) = @_;
|
|
||||||
my $buffer_size = $params->{'buffer_size'};
|
|
||||||
my $request_data = decode_base64($params->{'request_data'});
|
|
||||||
my $request_header = parse_headers($request_data);
|
|
||||||
my $request_length = int($params->{'request_length'});
|
|
||||||
my $client_address = $params->{'client_address'};
|
|
||||||
my $client_port = int($params->{'client_port'});
|
|
||||||
my $client_encoding = $params->{'client_encoding'};
|
|
||||||
my $remote_address = $params->{'remote_address'};
|
|
||||||
my $remote_port = int($params->{'remote_port'});
|
|
||||||
my $scheme = $params->{'scheme'};
|
|
||||||
my $datetime = $params->{'datetime'};
|
|
||||||
|
|
||||||
given ($request_header->{'@method'}[0]) {
|
|
||||||
when ("CONNECT") {
|
|
||||||
my $error = {
|
|
||||||
status => 405,
|
|
||||||
code => -1,
|
|
||||||
message => "Method Not Allowed"
|
|
||||||
};
|
|
||||||
print jsonrpc2_error_encode($error, $id);
|
|
||||||
}
|
|
||||||
default {
|
|
||||||
read_from_remote_server($remote_address, $remote_port, $scheme, $request_data, undef, $buffer_size, $id);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
sub relay_connect {
|
|
||||||
my ($params, $id) = @_;
|
|
||||||
my $buffer_size = $params->{'buffer_size'};
|
|
||||||
my $client_address = $params->{'client_address'};
|
|
||||||
my $client_port = int($params->{'client_port'});
|
|
||||||
my $client_encoding = $params->{'client_encoding'};
|
|
||||||
my $remote_address = $params->{'remote_address'};
|
|
||||||
my $remote_port = int($params->{'remote_port'});
|
|
||||||
my $scheme = $params->{'scheme'};
|
|
||||||
my $datetime = $params->{'datetime'};
|
|
||||||
|
|
||||||
my $starttime = time();
|
|
||||||
my $conn = IO::Socket::INET->new(
|
|
||||||
PeerAddr => $client_address,
|
|
||||||
PeerPort => $client_port,
|
|
||||||
Proto => 'tcp',
|
|
||||||
Timeout => STATEFUL_SOCKET_TIMEOUT
|
|
||||||
);
|
|
||||||
if (!$conn) {
|
|
||||||
my $error = {
|
|
||||||
status => 502,
|
|
||||||
code => $!,
|
|
||||||
message => $@
|
|
||||||
};
|
|
||||||
print jsonrpc2_error_encode($error, $id);
|
|
||||||
} else {
|
|
||||||
my $stoptime = time();
|
|
||||||
my $connection_speed = int(($stoptime - $starttime) * 1000);
|
|
||||||
my $data = jsonrpc2_encode("relay_accept", {
|
|
||||||
success => 1,
|
|
||||||
connection_speed => $connection_speed
|
|
||||||
}, $id);
|
|
||||||
print $conn $data . "\r\n\r\n";
|
|
||||||
|
|
||||||
read_from_remote_server($remote_address, $remote_port, $scheme, undef, $conn, $buffer_size, $id);
|
|
||||||
close($conn);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
# Parse a context
|
|
||||||
my $json_input = do { local $/; <STDIN> };
|
|
||||||
my $context = decode_json($json_input);
|
|
||||||
|
|
||||||
# Check if it's JSON-RPC 2 (stateless)
|
|
||||||
if ($context->{'jsonrpc'} eq "2.0") {
|
|
||||||
my $method = $context->{'method'};
|
|
||||||
given ($method) {
|
|
||||||
when ("relay_request") {
|
|
||||||
relay_request($context->{'params'}, $context->{'id'}); # stateless mode
|
|
||||||
}
|
|
||||||
when ("relay_connect") {
|
|
||||||
relay_connect($context->{'params'}, $context->{'id'}); # stateful mode
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
61
assets/php/class.tfa.php
Normal file
61
assets/php/class.tfa.php
Normal file
|
@ -0,0 +1,61 @@
|
||||||
|
<?php
|
||||||
|
// https://github.com/dimamedia/PHP-Simple-TOTP-and-PubKey
|
||||||
|
|
||||||
|
class tfa {
|
||||||
|
|
||||||
|
// RFC4648 Base32 alphabet
|
||||||
|
private $alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
|
||||||
|
|
||||||
|
function getOtp($key) {
|
||||||
|
|
||||||
|
/* Base32 decoder */
|
||||||
|
|
||||||
|
// Remove spaces from the given public key and converting to an array
|
||||||
|
$key = str_split(str_replace(" ","",$key));
|
||||||
|
|
||||||
|
$n = 0;
|
||||||
|
$j = 0;
|
||||||
|
$binary_key = "";
|
||||||
|
|
||||||
|
// Decode public key's each character to base32 and save into binary chunks
|
||||||
|
foreach($key as $char) {
|
||||||
|
$n = $n << 5;
|
||||||
|
$n = $n + stripos($this->alphabet, $char);
|
||||||
|
$j += 5;
|
||||||
|
|
||||||
|
if($j >= 8) {
|
||||||
|
$j -= 8;
|
||||||
|
$binary_key .= chr(($n & (0xFF << $j)) >> $j);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
/* End of Base32 decoder */
|
||||||
|
|
||||||
|
// current unix time 30sec period as binary
|
||||||
|
$binary_timestamp = pack('N*', 0) . pack('N*', floor(microtime(true)/30));
|
||||||
|
// generate keyed hash
|
||||||
|
$hash = hash_hmac('sha1', $binary_timestamp, $binary_key, true);
|
||||||
|
|
||||||
|
// generate otp from hash
|
||||||
|
$offset = ord($hash[19]) & 0xf;
|
||||||
|
$otp = (
|
||||||
|
((ord($hash[$offset+0]) & 0x7f) << 24 ) |
|
||||||
|
((ord($hash[$offset+1]) & 0xff) << 16 ) |
|
||||||
|
((ord($hash[$offset+2]) & 0xff) << 8 ) |
|
||||||
|
(ord($hash[$offset+3]) & 0xff)
|
||||||
|
) % pow(10, 6);
|
||||||
|
|
||||||
|
return $otp;
|
||||||
|
}
|
||||||
|
|
||||||
|
function getPubKey() {
|
||||||
|
$alphabet = str_split($this->alphabet);
|
||||||
|
$key = '';
|
||||||
|
// generate 16 chars public key from Base32 alphabet
|
||||||
|
for ($i = 0; $i < 16; $i++) $key .= $alphabet[mt_rand(0,31)];
|
||||||
|
// split into 4x4 chunks for easy reading
|
||||||
|
return implode(" ", str_split($key, 4));
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
?>
|
70
assets/php/coupang.class.php
Normal file
70
assets/php/coupang.class.php
Normal file
|
@ -0,0 +1,70 @@
|
||||||
|
<?php
|
||||||
|
// coupang.class.php
|
||||||
|
// Coupang Product Search API integration class
|
||||||
|
// Namhyeon Go <gnh1201@gmail.com>
|
||||||
|
// https://github.com/gnh1201/welsonjs
|
||||||
|
//
|
||||||
|
date_default_timezone_set("GMT+0");
|
||||||
|
|
||||||
|
class CoupangProductSearch {
|
||||||
|
private $accessKey = "";
|
||||||
|
private $secretKey = "";
|
||||||
|
private $baseUrl = "https://api-gateway.coupang.com";
|
||||||
|
|
||||||
|
private function generateSignature($method, $path, $query = "") {
|
||||||
|
$datetime = (new \DateTime("now", new \DateTimeZone("GMT")))->format("ymd\THis\Z");
|
||||||
|
$message = $datetime . $method . $path . $query;
|
||||||
|
|
||||||
|
$signature = hash_hmac('sha256', $message, $this->secretKey);
|
||||||
|
return [
|
||||||
|
'authorization' => "CEA algorithm=HmacSHA256, access-key={$this->accessKey}, signed-date={$datetime}, signature={$signature}",
|
||||||
|
'datetime' => $datetime
|
||||||
|
];
|
||||||
|
}
|
||||||
|
|
||||||
|
public function searchProducts($keyword, $limit = 10, $subId = null, $imageSize = null, $srpLinkOnly = false) {
|
||||||
|
$path = "/v2/providers/affiliate_open_api/apis/openapi/products/search";
|
||||||
|
$queryParams = http_build_query([
|
||||||
|
'keyword' => $keyword,
|
||||||
|
'limit' => $limit,
|
||||||
|
'subId' => $subId,
|
||||||
|
'imageSize' => $imageSize,
|
||||||
|
'srpLinkOnly' => $srpLinkOnly
|
||||||
|
]);
|
||||||
|
$fullPath = $path . '?' . $queryParams;
|
||||||
|
$url = $this->baseUrl . $fullPath;
|
||||||
|
|
||||||
|
$signatureData = $this->generateSignature("GET", $path, $queryParams);
|
||||||
|
$authorization = $signatureData['authorization'];
|
||||||
|
$datetime = $signatureData['datetime'];
|
||||||
|
|
||||||
|
$headers = [
|
||||||
|
"Content-Type: application/json;charset=UTF-8",
|
||||||
|
"Authorization: $authorization"
|
||||||
|
];
|
||||||
|
|
||||||
|
$curl = curl_init();
|
||||||
|
curl_setopt($curl, CURLOPT_URL, $url);
|
||||||
|
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, "GET");
|
||||||
|
curl_setopt($curl, CURLOPT_HTTPHEADER, $headers);
|
||||||
|
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
|
||||||
|
|
||||||
|
$response = curl_exec($curl);
|
||||||
|
$httpCode = curl_getinfo($curl, CURLINFO_HTTP_CODE);
|
||||||
|
|
||||||
|
curl_close($curl);
|
||||||
|
|
||||||
|
if ($httpCode === 200) {
|
||||||
|
return json_decode($response, true);
|
||||||
|
} else {
|
||||||
|
try {
|
||||||
|
return json_decode($response, true);
|
||||||
|
} catch (Exception $e) {
|
||||||
|
return [
|
||||||
|
"status" => $httpCode,
|
||||||
|
"message" => $e->getMessage()
|
||||||
|
];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
|
@ -1,37 +1,72 @@
|
||||||
<?php
|
<?php
|
||||||
/* index.php
|
/* index.php
|
||||||
* Caterpillar Worker on PHP
|
* Caterpillar Proxy Worker on PHP runtime
|
||||||
*
|
*
|
||||||
* Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
|
* Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
|
||||||
* Namhyeon Go (Catswords Research) <abuse@catswords.net>
|
* Namhyeon Go (Catswords Research) <abuse@catswords.net>
|
||||||
* https://github.com/gnh1201/caterpillar
|
* https://github.com/gnh1201/caterpillar
|
||||||
* Created at: 2022-10-06
|
* Created at: 2022-10-06
|
||||||
* Updated at: 2024-06-25
|
* Updated at: 2025-03-11
|
||||||
*/
|
*/
|
||||||
|
define("PERF_START_TIME", microtime(true));
|
||||||
define("PHP_HTTPPROXY_VERSION", "0.1.5.23");
|
define("PHP_HTTPPROXY_VERSION", "0.1.6.10");
|
||||||
define("DEFAULT_SOCKET_TIMEOUT", 1);
|
define("DEFAULT_SOCKET_TIMEOUT", 1);
|
||||||
define("STATEFUL_SOCKET_TIMEOUT", 30);
|
define("STATEFUL_SOCKET_TIMEOUT", 30);
|
||||||
define("MAX_EXECUTION_TIME", 0);
|
define("MAX_EXECUTION_TIME", 0);
|
||||||
define("DEFAULT_USER_AGENT", $_SERVER['HTTP_USER_AGENT'] . '</p><hr><p>php-httpproxy/' . PHP_HTTPPROXY_VERSION . ' (Server; PHP ' . phpversion() . '; Caterpillar; abuse@catswords.net)');
|
define("ALLOW_INVOKE_INSECURE_METHOD", false);
|
||||||
|
define("ALLOW_LOAD_INSECURE_SCRIPT", true);
|
||||||
|
define("DEFAULT_USER_AGENT", 'php-httpproxy/' . PHP_HTTPPROXY_VERSION . ' (Server; PHP ' . phpversion() . '; Caterpillar Proxy)');
|
||||||
|
define("RELAY_ALLOW_METHODS", ""); // e.g., GET,POST
|
||||||
|
define("RELAY_PROXY_PASS", ""); // e.g., https://example.org
|
||||||
|
define("RELAY_IMAGE_FILE_EXTENSIONS", ".png,.gif,.jpg");
|
||||||
|
define("RELAY_STATIC_FILE_EXTENSIONS", ".js,.css");
|
||||||
|
define("RELAY_ENABLE_JS_REDIRECT", false);
|
||||||
|
|
||||||
|
error_reporting(E_ALL);
|
||||||
|
ini_set("display_errors", 0);
|
||||||
|
ini_set("default_socket_timeout", DEFAULT_SOCKET_TIMEOUT); // must be. because of `feof()` works
|
||||||
|
ini_set("max_execution_time", MAX_EXECUTION_TIME);
|
||||||
|
|
||||||
header('Access-Control-Allow-Origin: *');
|
header('Access-Control-Allow-Origin: *');
|
||||||
header('Access-Control-Allow-Methods: *');
|
header('Access-Control-Allow-Methods: *');
|
||||||
header("Access-Control-Allow-Headers: *");
|
header("Access-Control-Allow-Headers: *");
|
||||||
|
|
||||||
if (strpos($_SERVER['HTTP_USER_AGENT'], "php-httpproxy/") !== 0 && strpos($_SERVER['HTTP_X_USER_AGENT'], "php-httpproxy/") !== 0) {
|
function get_current_execution_time() {
|
||||||
exit('<!DOCTYPE html><html><head><title>It works!</title><meta charset="utf-8"></head><body><h1>It works!</h1><p><a href="https://github.com/gnh1201/caterpillar">Download the client</a></p><p>' . DEFAULT_USER_AGENT . '</p></body></html>');
|
$end_time = microtime(true);
|
||||||
|
return $end_time - PERF_START_TIME;
|
||||||
}
|
}
|
||||||
|
|
||||||
ini_set("default_socket_timeout", DEFAULT_SOCKET_TIMEOUT); // must be. because of `feof()` works
|
function array_get($key, $arr, $default = null) {
|
||||||
ini_set("max_execution_time", MAX_EXECUTION_TIME);
|
return array_key_exists($key, $arr) ? $arr[$key] : $default;
|
||||||
|
}
|
||||||
|
|
||||||
|
function server_env_get($key) {
|
||||||
|
return array_get($key, $_SERVER, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
function verity_integrity($data, $integrity) {
|
||||||
|
if (strpos($integrity, 'sha384-') !== 0) {
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
$encoded_hash = substr($integrity, 7);
|
||||||
|
$decoded_hash = base64_decode($encoded_hash);
|
||||||
|
$calculated_hash = hash('sha384', $data, true);
|
||||||
|
|
||||||
|
return hash_equals($calculated_hash, $decoded_hash);
|
||||||
|
}
|
||||||
|
|
||||||
|
function cast_to_array($data) {
|
||||||
|
return is_array($data) ? $data : array($data);
|
||||||
|
}
|
||||||
|
|
||||||
function jsonrpc2_encode($method, $params, $id = '') {
|
function jsonrpc2_encode($method, $params, $id = '') {
|
||||||
$data = array(
|
$data = array(
|
||||||
"jsonrpc" => "2.0",
|
"jsonrpc" => "2.0",
|
||||||
"method" => $method,
|
"method" => $method,
|
||||||
"params" => $params,
|
"params" => $params,
|
||||||
"id" => $id
|
"id" => $id,
|
||||||
|
"_execution_time" => get_current_execution_time()
|
||||||
);
|
);
|
||||||
return json_encode($data);
|
return json_encode($data);
|
||||||
}
|
}
|
||||||
|
@ -40,7 +75,8 @@ function jsonrpc2_result_encode($result, $id = '') {
|
||||||
$data = array(
|
$data = array(
|
||||||
"jsonrpc" => "2.0",
|
"jsonrpc" => "2.0",
|
||||||
"result" => $result,
|
"result" => $result,
|
||||||
"id" => $id
|
"id" => $id,
|
||||||
|
"_execution_time" => get_current_execution_time()
|
||||||
);
|
);
|
||||||
return json_encode($data);
|
return json_encode($data);
|
||||||
}
|
}
|
||||||
|
@ -49,7 +85,8 @@ function jsonrpc2_error_encode($error, $id = '') {
|
||||||
$data = array(
|
$data = array(
|
||||||
"jsonrpc" => "2.0",
|
"jsonrpc" => "2.0",
|
||||||
"error" => $error,
|
"error" => $error,
|
||||||
"id" => $id
|
"id" => $id,
|
||||||
|
"_execution_time" => get_current_execution_time()
|
||||||
);
|
);
|
||||||
return json_encode($data);
|
return json_encode($data);
|
||||||
}
|
}
|
||||||
|
@ -71,7 +108,7 @@ function fatal_handler() {
|
||||||
$errstr = $error["message"];
|
$errstr = $error["message"];
|
||||||
|
|
||||||
header("HTTP/1.1 200 OK");
|
header("HTTP/1.1 200 OK");
|
||||||
exit(jsonrpc2_error_encode(array(
|
exit("\r\n\r\n" . jsonrpc2_error_encode(array(
|
||||||
"status" => 503,
|
"status" => 503,
|
||||||
"code" => $errno,
|
"code" => $errno,
|
||||||
"message"=> "Error occurred in file '$errfile' at line $errline: $errstr"
|
"message"=> "Error occurred in file '$errfile' at line $errline: $errstr"
|
||||||
|
@ -80,6 +117,27 @@ function fatal_handler() {
|
||||||
}
|
}
|
||||||
register_shutdown_function("fatal_handler");
|
register_shutdown_function("fatal_handler");
|
||||||
|
|
||||||
|
function load_script($data) {
|
||||||
|
$loaded_script = false;
|
||||||
|
|
||||||
|
if (!ALLOW_LOAD_INSECURE_SCRIPT) {
|
||||||
|
return $loaded_script;
|
||||||
|
}
|
||||||
|
|
||||||
|
$fh = tmpfile();
|
||||||
|
if ($fh !== false) {
|
||||||
|
if (!(strpos($data, "<?") !== false)) {
|
||||||
|
$data = "<?php\r\n\r\n" . $data . "\r\n\r\n?>";
|
||||||
|
}
|
||||||
|
fwrite($fh, $data);
|
||||||
|
$path = stream_get_meta_data($fh)['uri'];
|
||||||
|
$loaded_script = include($path);
|
||||||
|
fclose($fh);
|
||||||
|
}
|
||||||
|
|
||||||
|
return $loaded_script;
|
||||||
|
}
|
||||||
|
|
||||||
// https://stackoverflow.com/questions/16934409/curl-as-proxy-deal-with-https-connect-method
|
// https://stackoverflow.com/questions/16934409/curl-as-proxy-deal-with-https-connect-method
|
||||||
// https://stackoverflow.com/questions/12433958/how-to-parse-response-headers-in-php
|
// https://stackoverflow.com/questions/12433958/how-to-parse-response-headers-in-php
|
||||||
function parse_headers($str) { // Parses HTTP headers into an array
|
function parse_headers($str) { // Parses HTTP headers into an array
|
||||||
|
@ -218,12 +276,12 @@ function relay_connect($params, $id = '') {
|
||||||
}
|
}
|
||||||
|
|
||||||
function relay_mysql_connect($params) {
|
function relay_mysql_connect($params) {
|
||||||
$hostname = $params['hostname'];
|
$hostname = array_get("hostname", $params, "localhost");
|
||||||
$username = $params['username'];
|
$username = array_get("username", $params, "root");
|
||||||
$password = $params['password'];
|
$password = array_get("password", $params, "");
|
||||||
$database = array_key_exists('database', $params) ? $params['database'] : null;
|
$database = array_get("database", $params, null);
|
||||||
$port = array_key_exists('port', $params) ? intval($params['port']) : 3306;
|
$port = intval(array_get("port", $params, 3306));
|
||||||
$charset = array_key_exists('charset', $params) ? $params['charset'] : "utf8";
|
$charset = array_get("charset", $params, "utf8");
|
||||||
|
|
||||||
try {
|
try {
|
||||||
$mysqli = new mysqli($hostname, $username, $password, $database, $port);
|
$mysqli = new mysqli($hostname, $username, $password, $database, $port);
|
||||||
|
@ -289,12 +347,20 @@ function relay_mysql_query($params, $mysqli) {
|
||||||
case "show":
|
case "show":
|
||||||
case "select":
|
case "select":
|
||||||
$success = true;
|
$success = true;
|
||||||
$result['data'] = mysqli_fetch_all($query_result, MYSQLI_ASSOC);
|
if (function_exists("mysqli_fetch_all")) {
|
||||||
|
$result['data'] = mysqli_fetch_all($query_result, MYSQLI_ASSOC);
|
||||||
|
} else {
|
||||||
|
$data = array();
|
||||||
|
while ($row = $query_result->fetch_assoc()) {
|
||||||
|
$data[] = $row;
|
||||||
|
}
|
||||||
|
$result['data'] = $data;
|
||||||
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
case "insert":
|
case "insert":
|
||||||
$success = (bool) $query_result;
|
$success = (bool) $query_result;
|
||||||
$result['last_id'] = @$mysqli->insert_id();
|
$result['last_id'] = @$mysqli->insert_id;
|
||||||
break;
|
break;
|
||||||
|
|
||||||
default:
|
default:
|
||||||
|
@ -357,6 +423,24 @@ function relay_get_phpversion() {
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function relay_get_env_hash() {
|
||||||
|
$params = array(
|
||||||
|
"php_version" => phpversion(),
|
||||||
|
"php_os" => PHP_OS,
|
||||||
|
"php_sapi" => PHP_SAPI,
|
||||||
|
"loaded_extensions" => get_loaded_extensions(),
|
||||||
|
"ini_settings" => ini_get_all(null, false)
|
||||||
|
);
|
||||||
|
$serialized_params = serialize($params);
|
||||||
|
|
||||||
|
return array(
|
||||||
|
"data" => array(
|
||||||
|
sha1($serialized_params),
|
||||||
|
md5($serialized_params)
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
function relay_get_loaded_extensions() {
|
function relay_get_loaded_extensions() {
|
||||||
return array(
|
return array(
|
||||||
"data" => get_loaded_extensions()
|
"data" => get_loaded_extensions()
|
||||||
|
@ -389,15 +473,85 @@ function relay_dns_get_record($params) {
|
||||||
|
|
||||||
function relay_fetch_url($params) {
|
function relay_fetch_url($params) {
|
||||||
$url = $params['url'];
|
$url = $params['url'];
|
||||||
|
$method = array_get("method", $params, "GET");
|
||||||
|
$headers = array_get("headers", $params, array());
|
||||||
|
$data = array_get("data", $params, '');
|
||||||
|
|
||||||
|
// from local source
|
||||||
|
$local_prefix = "file:";
|
||||||
|
$pos = strpos($url, $local_prefix);
|
||||||
|
if ($pos !== false && $pos === 0) {
|
||||||
|
$path = realpath(substr($url, strlen($local_prefix)));
|
||||||
|
$basedir = realpath(__DIR__);
|
||||||
|
|
||||||
|
if ($path && strpos($path, $basedir) === 0) {
|
||||||
|
if (file_exists($path)) {
|
||||||
|
$response = file_get_contents($path);
|
||||||
|
return array(
|
||||||
|
"success" => true,
|
||||||
|
"result" => array(
|
||||||
|
"status" => 200,
|
||||||
|
"data" => $response
|
||||||
|
)
|
||||||
|
);
|
||||||
|
} else {
|
||||||
|
return array(
|
||||||
|
"success" => false,
|
||||||
|
"error" => array(
|
||||||
|
"status" => 404,
|
||||||
|
"code" => -1,
|
||||||
|
"message" => "Not found"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
return array(
|
||||||
|
"success" => false,
|
||||||
|
"error" => array(
|
||||||
|
"status" => 403,
|
||||||
|
"code" => -1,
|
||||||
|
"message" => "Access denied"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// from remote source
|
||||||
|
$_headers = array();
|
||||||
|
if (is_array($headers) && count($headers) > 0) {
|
||||||
|
foreach ($headers as $header_line) {
|
||||||
|
$pos = strpos($header_line, ':');
|
||||||
|
if ($pos !== false) {
|
||||||
|
$header_key = trim(substr($header_line, 0, $pos));
|
||||||
|
$header_value = trim(substr($header_line, $pos + 1));
|
||||||
|
$_header_line = sprintf("%s: %s", $header_key, $header_value);
|
||||||
|
array_push($_headers, $_header_line);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
$ch = curl_init();
|
$ch = curl_init();
|
||||||
curl_setopt($ch, CURLOPT_URL, $url);
|
curl_setopt($ch, CURLOPT_URL, $url);
|
||||||
curl_setopt($ch, CURLOPT_USERAGENT, DEFAULT_USER_AGENT);
|
curl_setopt($ch, CURLOPT_USERAGENT, DEFAULT_USER_AGENT);
|
||||||
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
|
||||||
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 1);
|
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 30);
|
||||||
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
|
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
|
||||||
|
curl_setopt($ch, CURLOPT_DNS_USE_GLOBAL_CACHE, false);
|
||||||
|
curl_setopt($ch, CURLOPT_DNS_CACHE_TIMEOUT, 30);
|
||||||
|
|
||||||
|
// check the request headers
|
||||||
|
if (count($_headers) > 0) {
|
||||||
|
curl_setopt($ch, CURLOPT_HTTPHEADER, $_headers);
|
||||||
|
}
|
||||||
|
|
||||||
|
// check it is POST request
|
||||||
|
if ($method == "POST") {
|
||||||
|
curl_setopt($ch, CURLOPT_POSTFIELDS, cast_to_array($data));
|
||||||
|
curl_setopt($ch, CURLOPT_POST, true);
|
||||||
|
}
|
||||||
|
|
||||||
|
// make cURL instance
|
||||||
$response = curl_exec($ch);
|
$response = curl_exec($ch);
|
||||||
$error_code = curl_errno($ch);
|
$error_code = curl_errno($ch);
|
||||||
if ($error_code) {
|
if ($error_code) {
|
||||||
|
@ -451,10 +605,58 @@ function relay_get_geolocation() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
function relay_invoke_method($params) {
|
function relay_invoke_method($params) {
|
||||||
$callback = $params['callback'];
|
$callback = $params['callback'];
|
||||||
$args = (is_array($params['args']) ? $params['args'] : array());
|
$requires = cast_to_array($params['requires']);
|
||||||
|
$args = cast_to_array($params['args']);
|
||||||
|
|
||||||
|
if (!ALLOW_INVOKE_INSECURE_METHOD) {
|
||||||
|
$allow_callbacks = array("phpinfo", "idn_to_ascii", "idn_to_utf8", "load_script");
|
||||||
|
if (!in_array($callback, $allow_callbacks)) {
|
||||||
|
return array(
|
||||||
|
"success" => false,
|
||||||
|
"error" => array(
|
||||||
|
"status" => 403,
|
||||||
|
"code" => -1,
|
||||||
|
"message" => $callback . " is not allowed"
|
||||||
|
)
|
||||||
|
);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
foreach($requires as $require_ctx) {
|
||||||
|
$resource_url = "";
|
||||||
|
$resource_integrity = "";
|
||||||
|
|
||||||
|
if (is_string($require_ctx)) {
|
||||||
|
$resource_url = $require_ctx;
|
||||||
|
} else if (is_array($require_ctx)) {
|
||||||
|
$resource_url = array_get("url", $require_ctx, "");
|
||||||
|
$resource_integrity = array_get("integrity", $require_ctx, "");
|
||||||
|
}
|
||||||
|
|
||||||
|
if (empty($resource_url))
|
||||||
|
continue;
|
||||||
|
|
||||||
|
try {
|
||||||
|
$result = relay_fetch_url(array(
|
||||||
|
"url" => $resource_url
|
||||||
|
));
|
||||||
|
|
||||||
|
if ($result['success'] && $result['result']['status'] == 200) {
|
||||||
|
$response = $result['result']['data'];
|
||||||
|
if (!empty($resource_integrity)) {
|
||||||
|
if (verify_integrity($response, $resource_integrity)) {
|
||||||
|
load_script($response);
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
load_script($response);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
} catch (Exception $e) {
|
||||||
|
//echo $e->message; // ignore an exception
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
try {
|
try {
|
||||||
$data = call_user_func_array($callback, $args);
|
$data = call_user_func_array($callback, $args);
|
||||||
|
@ -481,21 +683,104 @@ function relay_invoke_method($params) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
function get_client_address() {
|
function relay_web_search($params) {
|
||||||
$client_address = '';
|
$page = $params['page'];
|
||||||
if (!empty($_SERVER['HTTP_CLIENT_IP'])) {
|
$search_params = array(
|
||||||
$client_address = $_SERVER['HTTP_CLIENT_IP'];
|
"q" => $params['keyword'],
|
||||||
} elseif (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) {
|
"p" => ($page > 0 ? $page - 1 : 0),
|
||||||
$client_address = $_SERVER['HTTP_X_FORWARDED_FOR'];
|
"t" => "0" // text only
|
||||||
|
);
|
||||||
|
$result = relay_fetch_url(array(
|
||||||
|
"url" => "https://farside.link/librex/api.php?" . http_build_query($search_params)
|
||||||
|
));
|
||||||
|
if ($result['success']) {
|
||||||
|
return array(
|
||||||
|
"success" => true,
|
||||||
|
"result" => array(
|
||||||
|
"status" => 200,
|
||||||
|
"data" => json_decode($result['result']['data'], true)
|
||||||
|
)
|
||||||
|
);
|
||||||
} else {
|
} else {
|
||||||
$client_address = $_SERVER['REMOTE_ADDR'];
|
return $result;
|
||||||
}
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
function get_client_address() {
|
||||||
|
$client_address = "";
|
||||||
|
|
||||||
|
$client_address_candidates = array_filter(array_map("server_env_get", array(
|
||||||
|
"HTTP_CLIENT_IP",
|
||||||
|
"HTTP_X_FORWARDED_FOR",
|
||||||
|
"HTTP_X_FORWARDED",
|
||||||
|
"HTTP_X_CLUSTER_CLIENT_IP",
|
||||||
|
"HTTP_FORWARDED_FOR",
|
||||||
|
"HTTP_FORWARDED",
|
||||||
|
"REMOTE_ADDR"
|
||||||
|
)));
|
||||||
|
if (count($client_address_candidates) > 0) {
|
||||||
|
$client_address = $client_address_candidates[0];
|
||||||
|
}
|
||||||
|
|
||||||
return array(
|
return array(
|
||||||
"data" => $client_address,
|
"data" => $client_address_candidates,
|
||||||
"client_address" => $client_address // compatible under version 0.1.5.18
|
"client_address" => $client_address // compatible under version 0.1.5.18
|
||||||
);
|
);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
function get_user_agent() {
|
||||||
|
$user_agents = array_filter(array_map("server_env_get", array(
|
||||||
|
"HTTP_X_USER_AGENT",
|
||||||
|
"HTTP_USER_AGENT"
|
||||||
|
)));
|
||||||
|
return implode(", ", $user_agents);
|
||||||
|
}
|
||||||
|
|
||||||
|
// check the user agent
|
||||||
|
$is_httpproxy = (strpos(get_user_agent(), "php-httpproxy/") === 0);
|
||||||
|
if (!$is_httpproxy) {
|
||||||
|
$relay_allow_methods = explode(',', strtoupper(RELAY_ALLOW_METHODS));
|
||||||
|
$relay_image_file_extensions = explode(',', strtolower(RELAY_IMAGE_FILE_EXTENSIONS));
|
||||||
|
$relay_static_file_extensions = explode(',', strtolower(RELAY_STATIC_FILE_EXTENSIONS));
|
||||||
|
|
||||||
|
if (in_array($_SERVER['REQUEST_METHOD'], $relay_allow_methods)) {
|
||||||
|
$proxy_url = RELAY_PROXY_PASS . $_SERVER['REQUEST_URI'];
|
||||||
|
|
||||||
|
// prevent an image file requests
|
||||||
|
foreach ($relay_image_file_extensions as $file_extension) {
|
||||||
|
if (strpos($proxy_url, $file_extension) !== false) {
|
||||||
|
header("Location: https://http.cat/images/200.jpg");
|
||||||
|
exit("");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
// prevent an static file requests
|
||||||
|
foreach ($relay_static_file_extensions as $file_extension) {
|
||||||
|
if (strpos($proxy_url, $file_extension) !== false) {
|
||||||
|
exit("");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
$result = relay_fetch_url(array(
|
||||||
|
"url" => $proxy_url
|
||||||
|
));
|
||||||
|
if ($result['success']) {
|
||||||
|
$response = str_replace(RELAY_PROXY_PASS, sprintf("%s://%s", $_SERVER['REQUEST_SCHEME'], $_SERVER['HTTP_HOST']), $result['result']['data']);
|
||||||
|
if (RELAY_ENABLE_JS_REDIRECT) {
|
||||||
|
if (strpos(strtolower(trim(substr($response, 0, 16))), "<!doctype html") === 0) {
|
||||||
|
$response .= "<script>setTimeout(function() { var a = document.createElement('a'); a.href = '" . $proxy_url . "'; document.body.appendChild(a); a.click(); }, 3000);</script>";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
exit($response);
|
||||||
|
} else {
|
||||||
|
http_response_code(500);
|
||||||
|
exit($proxy_url . " is down.");
|
||||||
|
}
|
||||||
|
} else {
|
||||||
|
exit('<!DOCTYPE html><html><head><title>It works!</title><meta charset="utf-8"></head><body><h1>It works!</h1><p><a href="https://github.com/gnh1201/caterpillar">Download the client</a></p><p>' . $_SERVER['HTTP_USER_AGENT'] . '</p><hr><p>' . DEFAULT_USER_AGENT . '</p></body></html>');
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// parse a context
|
// parse a context
|
||||||
$context = json_decode(file_get_contents('php://input'), true);
|
$context = json_decode(file_get_contents('php://input'), true);
|
||||||
|
|
||||||
|
@ -543,6 +828,10 @@ if ($context['jsonrpc'] == "2.0") {
|
||||||
echo jsonrpc2_result_encode(relay_get_phpversion(), $context['id']);
|
echo jsonrpc2_result_encode(relay_get_phpversion(), $context['id']);
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case "relay_get_env_hash":
|
||||||
|
echo jsonrpc2_result_encode(relay_get_env_hash(), $context['id']);
|
||||||
|
break;
|
||||||
|
|
||||||
case "relay_get_loaded_extensions":
|
case "relay_get_loaded_extensions":
|
||||||
echo jsonrpc2_result_encode(relay_get_loaded_extensions(), $context['id']);
|
echo jsonrpc2_result_encode(relay_get_loaded_extensions(), $context['id']);
|
||||||
break;
|
break;
|
||||||
|
@ -583,6 +872,15 @@ if ($context['jsonrpc'] == "2.0") {
|
||||||
}
|
}
|
||||||
break;
|
break;
|
||||||
|
|
||||||
|
case "relay_web_search":
|
||||||
|
$result = relay_web_search($context['params']);
|
||||||
|
if ($result['success']) {
|
||||||
|
echo jsonrpc2_result_encode($result['result'], $context['id']);
|
||||||
|
} else {
|
||||||
|
echo jsonrpc2_error_encode($result['error'], $context['id']);
|
||||||
|
}
|
||||||
|
break;
|
||||||
|
|
||||||
case "get_client_address":
|
case "get_client_address":
|
||||||
echo jsonrpc2_result_encode(get_client_address(), $context['id']);
|
echo jsonrpc2_result_encode(get_client_address(), $context['id']);
|
||||||
break;
|
break;
|
||||||
|
|
418
assets/php/punycode.class.php
Normal file
418
assets/php/punycode.class.php
Normal file
|
@ -0,0 +1,418 @@
|
||||||
|
<?php
|
||||||
|
/**
|
||||||
|
* The MIT License (MIT)
|
||||||
|
*
|
||||||
|
* Copyright (c) 2013 mk-j, zedwood.com
|
||||||
|
*
|
||||||
|
* Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
* of this software and associated documentation files (the "Software"), to deal
|
||||||
|
* in the Software without restriction, including without limitation the rights
|
||||||
|
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
* copies of the Software, and to permit persons to whom the Software is
|
||||||
|
* furnished to do so, subject to the following conditions:
|
||||||
|
*
|
||||||
|
* The above copyright notice and this permission notice shall be included in all
|
||||||
|
* copies or substantial portions of the Software.
|
||||||
|
*
|
||||||
|
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
* SOFTWARE.
|
||||||
|
*/
|
||||||
|
|
||||||
|
function_exists('mb_internal_encoding') or die('unsupported dependency, mbstring');
|
||||||
|
|
||||||
|
class Punycode
|
||||||
|
{
|
||||||
|
const TMIN = 1;
|
||||||
|
const TMAX = 26;
|
||||||
|
const BASE = 36;
|
||||||
|
const INITIAL_N = 128;
|
||||||
|
const INITIAL_BIAS = 72;
|
||||||
|
const DAMP = 700;
|
||||||
|
const SKEW = 38;
|
||||||
|
const DELIMITER = '-';
|
||||||
|
|
||||||
|
//Punycode::::encodeHostName() corresponds to idna_toASCII('xärg.örg');
|
||||||
|
public static function encodeHostName($hostname)
|
||||||
|
{
|
||||||
|
if (!self::is_valid_utf8($hostname))
|
||||||
|
{
|
||||||
|
return $hostname;//invalid
|
||||||
|
}
|
||||||
|
|
||||||
|
if (function_exists('idn_to_ascii') && 0)
|
||||||
|
{
|
||||||
|
return idn_to_ascii($hostname);//php 5.3+
|
||||||
|
}
|
||||||
|
|
||||||
|
$old_encoding = mb_internal_encoding();
|
||||||
|
mb_internal_encoding("UTF-8");
|
||||||
|
|
||||||
|
$pieces = explode(".", self::mb_strtolower($hostname) );
|
||||||
|
$punycode_pieces = array();
|
||||||
|
foreach($pieces as $piece)
|
||||||
|
{
|
||||||
|
if (preg_match("/[\x{80}-\x{FFFF}]/u", $piece))//is multi byte utf8
|
||||||
|
{
|
||||||
|
$punycode_pieces[] = "xn--".self::encode($piece);
|
||||||
|
}
|
||||||
|
else if (preg_match('/^[a-z\d][a-z\d-]{0,62}$/i', $piece) && !preg_match('/-$/', $piece) )//is valid ascii hostname
|
||||||
|
{
|
||||||
|
$punycode_pieces[] = $piece;
|
||||||
|
}
|
||||||
|
else
|
||||||
|
{
|
||||||
|
mb_internal_encoding($old_encoding);
|
||||||
|
return $hostname;//invalid domain
|
||||||
|
}
|
||||||
|
}
|
||||||
|
mb_internal_encoding($old_encoding);
|
||||||
|
return implode(".", $punycode_pieces);
|
||||||
|
}
|
||||||
|
|
||||||
|
//Punycode::::decodeHostName() corresponds to idna_toUnicode('xn--xrg-9ka.xn--rg-eka');
|
||||||
|
public static function decodeHostName($encoded_hostname)
|
||||||
|
{
|
||||||
|
if (!preg_match('/[a-z\d.-]{1,255}/', $encoded_hostname))
|
||||||
|
{
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (function_exists('idn_to_utf8') && 0)
|
||||||
|
{
|
||||||
|
return idn_to_utf8($encoded_hostname);
|
||||||
|
}
|
||||||
|
|
||||||
|
$old_encoding = mb_internal_encoding();
|
||||||
|
mb_internal_encoding("UTF-8");
|
||||||
|
|
||||||
|
$pieces = explode(".", strtolower($encoded_hostname));
|
||||||
|
foreach($pieces as $piece)
|
||||||
|
{
|
||||||
|
if (!preg_match('/^[a-z\d][a-z\d-]{0,62}$/i', $piece) || preg_match('/-$/', $piece) )
|
||||||
|
{
|
||||||
|
mb_internal_encoding($old_encoding);
|
||||||
|
return $encoded_hostname;//invalid
|
||||||
|
}
|
||||||
|
$punycode_pieces[] = strpos($piece, "xn--")===0 ? self::decode(substr($piece,4)) : $piece;
|
||||||
|
}
|
||||||
|
mb_internal_encoding($old_encoding);
|
||||||
|
return implode(".", $punycode_pieces);
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function encode($input)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
$n = self::INITIAL_N;
|
||||||
|
$delta = 0;
|
||||||
|
$bias = self::INITIAL_BIAS;
|
||||||
|
$output='';
|
||||||
|
$input_length = self::mb_strlen($input);
|
||||||
|
|
||||||
|
$b=0;
|
||||||
|
for($i=0; $i<$input_length; $i++)
|
||||||
|
{
|
||||||
|
$chr = self::mb_substr($input,$i,1);
|
||||||
|
$c = self::uniord( $chr );//autoloaded class
|
||||||
|
if ($c < self::INITIAL_N)
|
||||||
|
{
|
||||||
|
$output.= $chr;
|
||||||
|
$b++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($b==$input_length)//no international chars to convert to punycode here
|
||||||
|
{
|
||||||
|
throw new Exception("PunycodeException.BAD_INPUT");
|
||||||
|
}
|
||||||
|
else if ($b>0)
|
||||||
|
{
|
||||||
|
$output.= self::DELIMITER;
|
||||||
|
}
|
||||||
|
|
||||||
|
$h = $b;
|
||||||
|
while($h < $input_length)
|
||||||
|
{
|
||||||
|
$m = PHP_INT_MAX;
|
||||||
|
|
||||||
|
// Find the minimum code point >= n
|
||||||
|
for($i=0; $i<$input_length; $i++)
|
||||||
|
{
|
||||||
|
$chr = self::mb_substr($input,$i,1);
|
||||||
|
$c = self::uniord( $chr );
|
||||||
|
if ($c >= $n && $c < $m)
|
||||||
|
{
|
||||||
|
$m = $c;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
if (($m - $n) > (PHP_INT_MAX - $delta) / ($h+1))
|
||||||
|
{
|
||||||
|
throw new Exception("PunycodeException.OVERFLOW");
|
||||||
|
}
|
||||||
|
$delta = $delta + ($m - $n) * ($h + 1);
|
||||||
|
$n = $m;
|
||||||
|
|
||||||
|
|
||||||
|
for($j=0; $j<$input_length; $j++)
|
||||||
|
{
|
||||||
|
$chr = self::mb_substr($input,$j,1);
|
||||||
|
$c = self::uniord( $chr );
|
||||||
|
if ($c < $n)
|
||||||
|
{
|
||||||
|
$delta++;
|
||||||
|
if (0==$delta)
|
||||||
|
{
|
||||||
|
throw new Exception("PunycodeException.OVERFLOW");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if ($c == $n)
|
||||||
|
{
|
||||||
|
$q = $delta;
|
||||||
|
for($k= self::BASE;; $k+=self::BASE)
|
||||||
|
{
|
||||||
|
$t=0;
|
||||||
|
if ($k <= $bias)
|
||||||
|
{
|
||||||
|
$t= self::TMIN;
|
||||||
|
} else if ($k >= $bias + self::TMAX) {
|
||||||
|
$t= self::TMAX;
|
||||||
|
} else {
|
||||||
|
$t = $k - $bias;
|
||||||
|
}
|
||||||
|
if ($q < $t)
|
||||||
|
{
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
$output.= chr( self::digit2codepoint($t + ($q - $t) % (self::BASE - $t)) );
|
||||||
|
$q = floor( ($q-$t) / (self::BASE - $t) );//integer division
|
||||||
|
}
|
||||||
|
$output.= chr( self::digit2codepoint($q) );
|
||||||
|
$bias = self::adapt($delta, $h+1, $h==$b);
|
||||||
|
$delta=0;
|
||||||
|
$h++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
$delta++;
|
||||||
|
$n++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch (Exception $e)
|
||||||
|
{
|
||||||
|
error_log("[PUNYCODE] error ".$e->getMessage());
|
||||||
|
return $input;
|
||||||
|
}
|
||||||
|
return $output;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function decode($input)
|
||||||
|
{
|
||||||
|
try
|
||||||
|
{
|
||||||
|
$n = self::INITIAL_N;
|
||||||
|
$i = 0;
|
||||||
|
$bias = self::INITIAL_BIAS;
|
||||||
|
$output = '';
|
||||||
|
|
||||||
|
$d = self::rstrpos($input, self::DELIMITER);
|
||||||
|
if ($d>0) {
|
||||||
|
for($j=0; $j<$d; $j++) {
|
||||||
|
$chr = self::mb_substr($input,$j,1);
|
||||||
|
$c = self::uniord( $chr );
|
||||||
|
if ($c>=self::INITIAL_N) {
|
||||||
|
throw new Exception("PunycodeException.BAD_INPUT");
|
||||||
|
}
|
||||||
|
$output.=$chr;
|
||||||
|
}
|
||||||
|
$d++;
|
||||||
|
} else {
|
||||||
|
$d = 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
$input_length = self::mb_strlen($input);
|
||||||
|
while ($d < $input_length) {
|
||||||
|
$oldi = $i;
|
||||||
|
$w = 1;
|
||||||
|
|
||||||
|
for($k= self::BASE;; $k += self::BASE) {
|
||||||
|
if ($d == $input_length) {
|
||||||
|
throw new Exception("PunycodeException.BAD_INPUT");
|
||||||
|
}
|
||||||
|
$chr = self::mb_substr($input,$d++,1);
|
||||||
|
$c = self::uniord( $chr );
|
||||||
|
$digit = self::codepoint2digit($c);
|
||||||
|
if ($digit > (PHP_INT_MAX - $i) / $w) {
|
||||||
|
throw new Exception("PunycodeException.OVERFLOW");
|
||||||
|
}
|
||||||
|
|
||||||
|
$i = $i + $digit * $w;
|
||||||
|
|
||||||
|
$t=0;
|
||||||
|
if ($k <= $bias) {
|
||||||
|
$t = self::TMIN;
|
||||||
|
} else if ($k >= $bias + self::TMAX) {
|
||||||
|
$t = self::TMAX;
|
||||||
|
} else {
|
||||||
|
$t = $k - $bias;
|
||||||
|
}
|
||||||
|
if ($digit < $t) {
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
$w = $w * (self::BASE - $t);
|
||||||
|
}
|
||||||
|
$output_length = self::mb_strlen($output);
|
||||||
|
|
||||||
|
$bias = self::adapt($i - $oldi, $output_length + 1, $oldi == 0);
|
||||||
|
|
||||||
|
if ($i / ($output_length + 1) > PHP_INT_MAX - $n) {
|
||||||
|
throw new Exception("PunycodeException.OVERFLOW");
|
||||||
|
}
|
||||||
|
$n = floor($n + $i / ($output_length + 1));
|
||||||
|
$i = $i % ($output_length + 1);
|
||||||
|
$output = self::mb_strinsert($output, self::utf8($n), $i);
|
||||||
|
$i++;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
catch(Exception $e)
|
||||||
|
{
|
||||||
|
error_log("[PUNYCODE] error ".$e->getMessage());
|
||||||
|
return $input;
|
||||||
|
}
|
||||||
|
return $output;
|
||||||
|
}
|
||||||
|
|
||||||
|
//adapt patched from:
|
||||||
|
//https://github.com/takezoh/php-PunycodeEncoder/blob/master/punycode.php
|
||||||
|
protected static function adapt($delta, $numpoints, $firsttime)
|
||||||
|
{
|
||||||
|
$delta = (int)($firsttime ? $delta / self::DAMP : $delta / 2);
|
||||||
|
$delta += (int)($delta / $numpoints);
|
||||||
|
$k = 0;
|
||||||
|
while ($delta > (((self::BASE - self::TMIN) * self::TMAX) / 2)) {
|
||||||
|
$delta = (int)($delta / (self::BASE - self::TMIN));
|
||||||
|
$k += self::BASE;
|
||||||
|
}
|
||||||
|
return $k + (int)((self::BASE - self::TMIN + 1) * $delta / ($delta + self::SKEW));
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function digit2codepoint($d)
|
||||||
|
{
|
||||||
|
if ($d < 26) {
|
||||||
|
// 0..25 : 'a'..'z'
|
||||||
|
return $d + ord('a');
|
||||||
|
} else if ($d < 36) {
|
||||||
|
// 26..35 : '0'..'9';
|
||||||
|
return $d - 26 + ord('0');
|
||||||
|
} else {
|
||||||
|
throw new Exception("PunycodeException.BAD_INPUT");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function codepoint2digit($c)
|
||||||
|
{
|
||||||
|
if ($c - ord('0') < 10) {
|
||||||
|
// '0'..'9' : 26..35
|
||||||
|
return $c - ord('0') + 26;
|
||||||
|
} else if ($c - ord('a') < 26) {
|
||||||
|
// 'a'..'z' : 0..25
|
||||||
|
return $c - ord('a');
|
||||||
|
} else {
|
||||||
|
throw new Exception("PunycodeException.BAD_INPUT");
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function rstrpos($haystack, $needle)
|
||||||
|
{
|
||||||
|
$pos = strpos (strrev($haystack), $needle);
|
||||||
|
if ($pos === false)
|
||||||
|
return false;
|
||||||
|
return strlen ($haystack)-1 - $pos;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function mb_strinsert($haystack, $needle, $position)
|
||||||
|
{
|
||||||
|
$old_encoding = mb_internal_encoding();
|
||||||
|
mb_internal_encoding("UTF-8");
|
||||||
|
$r = mb_substr($haystack,0,$position).$needle.mb_substr($haystack,$position);
|
||||||
|
mb_internal_encoding($old_encoding);
|
||||||
|
return $r;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function mb_substr($str,$start,$length)
|
||||||
|
{
|
||||||
|
$old_encoding = mb_internal_encoding();
|
||||||
|
mb_internal_encoding("UTF-8");
|
||||||
|
$r = mb_substr($str,$start,$length);
|
||||||
|
mb_internal_encoding($old_encoding);
|
||||||
|
return $r;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function mb_strlen($str)
|
||||||
|
{
|
||||||
|
$old_encoding = mb_internal_encoding();
|
||||||
|
mb_internal_encoding("UTF-8");
|
||||||
|
$r = mb_strlen($str);
|
||||||
|
mb_internal_encoding($old_encoding);
|
||||||
|
return $r;
|
||||||
|
}
|
||||||
|
|
||||||
|
protected static function mb_strtolower($str)
|
||||||
|
{
|
||||||
|
$old_encoding = mb_internal_encoding();
|
||||||
|
mb_internal_encoding("UTF-8");
|
||||||
|
$r = mb_strtolower($str);
|
||||||
|
mb_internal_encoding($old_encoding);
|
||||||
|
return $r;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static function uniord($c)//cousin of ord() but for unicode
|
||||||
|
{
|
||||||
|
$ord0 = ord($c[0]); if ($ord0>=0 && $ord0<=127) return $ord0;
|
||||||
|
$ord1 = ord($c[1]); if ($ord0>=192 && $ord0<=223) return ($ord0-192)*64 + ($ord1-128);
|
||||||
|
if ($ord0==0xed && ($ord1 & 0xa0) == 0xa0) return false; //code points, 0xd800 to 0xdfff
|
||||||
|
$ord2 = ord($c[2]); if ($ord0>=224 && $ord0<=239) return ($ord0-224)*4096 + ($ord1-128)*64 + ($ord2-128);
|
||||||
|
$ord3 = ord($c[3]); if ($ord0>=240 && $ord0<=247) return ($ord0-240)*262144 + ($ord1-128)*4096 + ($ord2-128)*64 + ($ord3-128);
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
|
||||||
|
public static function utf8($num)//cousin of ascii() but for utf8
|
||||||
|
{
|
||||||
|
if($num<=0x7F) return chr($num);
|
||||||
|
if($num<=0x7FF) return chr(($num>>6)+192).chr(($num&63)+128);
|
||||||
|
if(0xd800<=$num && $num<=0xdfff) return '';//invalid block of utf8
|
||||||
|
if($num<=0xFFFF) return chr(($num>>12)+224).chr((($num>>6)&63)+128).chr(($num&63)+128);
|
||||||
|
if($num<=0x10FFFF) return chr(($num>>18)+240).chr((($num>>12)&63)+128).chr((($num>>6)&63)+128).chr(($num&63)+128);
|
||||||
|
return '';
|
||||||
|
}
|
||||||
|
|
||||||
|
public static function is_valid_utf8($string)
|
||||||
|
{
|
||||||
|
for ($i=0, $ix=strlen($string); $i < $ix; $i++)
|
||||||
|
{
|
||||||
|
$c = ord($string[$i]);
|
||||||
|
if ($c==0x09 || $c==0x0a || $c==0x0d || (0x20 <= $c && $c < 0x7e) ) $n = 0; # 0bbbbbbb
|
||||||
|
else if (($c & 0xE0) == 0xC0) $n=1; # 110bbbbb
|
||||||
|
else if ($c==0xed && (ord($string[$i+1]) & 0xa0)==0xa0) return false; //code points, 0xd800 to 0xdfff
|
||||||
|
else if (($c & 0xF0) == 0xE0) $n=2; # 1110bbbb
|
||||||
|
else if (($c & 0xF8) == 0xF0) $n=3; # 11110bbb
|
||||||
|
//else if (($c & 0xFC) == 0xF8) $n=4; # 111110bb //byte 5, unnecessary in 4 byte UTF-8
|
||||||
|
//else if (($c & 0xFE) == 0xFC) $n=5; # 1111110b //byte 6, unnecessary in 4 byte UTF-8
|
||||||
|
else return false;
|
||||||
|
for ($j=0; $j<$n; $j++) { // n bytes matching 10bbbbbb follow ?
|
||||||
|
if ((++$i == $ix) || ((ord($string[$i]) & 0xC0) != 0x80))
|
||||||
|
return false;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return true;
|
||||||
|
}
|
||||||
|
|
||||||
|
}
|
||||||
|
|
||||||
|
|
|
@ -1,192 +0,0 @@
|
||||||
# https://github.com/gnh1201/caterpillar
|
|
||||||
|
|
||||||
require 'socket'
|
|
||||||
require 'json'
|
|
||||||
require 'openssl'
|
|
||||||
require 'base64'
|
|
||||||
require 'timeout'
|
|
||||||
|
|
||||||
DEFAULT_SOCKET_TIMEOUT = 1
|
|
||||||
STATEFUL_SOCKET_TIMEOUT = 30
|
|
||||||
|
|
||||||
def jsonrpc2_encode(method, params, id = '')
|
|
||||||
{
|
|
||||||
"jsonrpc" => "2.0",
|
|
||||||
"method" => method,
|
|
||||||
"params" => params,
|
|
||||||
"id" => id
|
|
||||||
}.to_json
|
|
||||||
end
|
|
||||||
|
|
||||||
def jsonrpc2_result_encode(result, id = '')
|
|
||||||
{
|
|
||||||
"jsonrpc" => "2.0",
|
|
||||||
"result" => result,
|
|
||||||
"id" => id
|
|
||||||
}.to_json
|
|
||||||
end
|
|
||||||
|
|
||||||
def jsonrpc2_error_encode(error, id = '')
|
|
||||||
{
|
|
||||||
"jsonrpc" => "2.0",
|
|
||||||
"error" => error,
|
|
||||||
"id" => id
|
|
||||||
}.to_json
|
|
||||||
end
|
|
||||||
|
|
||||||
def parse_headers(str)
|
|
||||||
headers = {}
|
|
||||||
lines = str.split(/\r?\n/)
|
|
||||||
first_line = lines.shift.split(' ')
|
|
||||||
headers['@method'] = first_line
|
|
||||||
lines.each do |line|
|
|
||||||
if match = line.match(/^([^:]+):(.*)$/)
|
|
||||||
headers[match[1]] = match[2].strip
|
|
||||||
end
|
|
||||||
end
|
|
||||||
headers
|
|
||||||
end
|
|
||||||
|
|
||||||
def read_from_remote_server(remote_address, remote_port, scheme, data = nil, conn = nil, buffer_size = 8192, id = '')
|
|
||||||
if ["https", "ssl", "tls"].include?(scheme)
|
|
||||||
ssl_context = OpenSSL::SSL::SSLContext.new
|
|
||||||
sock = OpenSSL::SSL::SSLSocket.new(TCPSocket.open(remote_address, remote_port), ssl_context)
|
|
||||||
sock.connect
|
|
||||||
else
|
|
||||||
sock = TCPSocket.open(remote_address, remote_port)
|
|
||||||
end
|
|
||||||
|
|
||||||
if sock.nil?
|
|
||||||
error = {
|
|
||||||
"status" => 502,
|
|
||||||
"code" => error_code,
|
|
||||||
"message" => error_message
|
|
||||||
}
|
|
||||||
|
|
||||||
if conn.nil?
|
|
||||||
puts jsonrpc2_error_encode(error, id)
|
|
||||||
else
|
|
||||||
buf = "HTTP/1.1 502 Bad Gateway\r\n\r\n"
|
|
||||||
buf += jsonrpc2_error_encode(error, id)
|
|
||||||
conn.write(buf)
|
|
||||||
end
|
|
||||||
else
|
|
||||||
if conn.nil?
|
|
||||||
sock.write(data) unless data.nil?
|
|
||||||
|
|
||||||
buf = nil
|
|
||||||
while buf != false && !sock.eof?
|
|
||||||
buf = sock.gets(buffer_size)
|
|
||||||
puts buf
|
|
||||||
end
|
|
||||||
else
|
|
||||||
buf = nil
|
|
||||||
while buf != false && !conn.eof?
|
|
||||||
buf = conn.gets(buffer_size)
|
|
||||||
sock.write(buf)
|
|
||||||
end
|
|
||||||
|
|
||||||
buf = nil
|
|
||||||
while buf != false && !sock.eof?
|
|
||||||
buf = sock.gets(buffer_size)
|
|
||||||
conn.write(buf)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
sock.close
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def relay_request(params, id = '')
|
|
||||||
buffer_size = params['buffer_size']
|
|
||||||
request_data = Base64.decode64(params['request_data'])
|
|
||||||
request_header = parse_headers(request_data)
|
|
||||||
request_length = params['request_length'].to_i
|
|
||||||
client_address = params['client_address']
|
|
||||||
client_port = params['client_port'].to_i
|
|
||||||
client_encoding = params['client_encoding']
|
|
||||||
remote_address = params['remote_address']
|
|
||||||
remote_port = params['remote_port'].to_i
|
|
||||||
scheme = params['scheme']
|
|
||||||
datetime = params['datetime'] # format: %Y-%m-%d %H:%M:%S.%f
|
|
||||||
|
|
||||||
begin
|
|
||||||
Timeout.timeout(DEFAULT_SOCKET_TIMEOUT) do
|
|
||||||
if ["https", "ssl", "tls"].include?(scheme)
|
|
||||||
ssl_context = OpenSSL::SSL::SSLContext.new
|
|
||||||
sock = OpenSSL::SSL::SSLSocket.new(TCPSocket.open(remote_address, remote_port), ssl_context)
|
|
||||||
sock.connect
|
|
||||||
else
|
|
||||||
sock = TCPSocket.open(remote_address, remote_port)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
rescue Timeout::Error
|
|
||||||
error = {
|
|
||||||
"status" => 504,
|
|
||||||
"message" => "Gateway Timeout"
|
|
||||||
}
|
|
||||||
puts jsonrpc2_error_encode(error, id)
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
case request_header['@method'][0]
|
|
||||||
when "CONNECT"
|
|
||||||
error = {
|
|
||||||
"status" => 405,
|
|
||||||
"code" => -1,
|
|
||||||
"message" => "Method Not Allowed"
|
|
||||||
}
|
|
||||||
puts jsonrpc2_error_encode(error, id)
|
|
||||||
else
|
|
||||||
read_from_remote_server(remote_address, remote_port, scheme, request_data, nil, buffer_size, id)
|
|
||||||
end
|
|
||||||
end
|
|
||||||
|
|
||||||
def relay_connect(params, id = '')
|
|
||||||
buffer_size = params['buffer_size']
|
|
||||||
client_address = params['client_address']
|
|
||||||
client_port = params['client_port'].to_i
|
|
||||||
client_encoding = params['client_encoding']
|
|
||||||
remote_address = params['remote_address']
|
|
||||||
remote_port = params['remote_port'].to_i
|
|
||||||
scheme = params['scheme']
|
|
||||||
datetime = params['datetime'] # format: %Y-%m-%d %H:%M:%S.%f
|
|
||||||
|
|
||||||
starttime = Time.now.to_f
|
|
||||||
|
|
||||||
begin
|
|
||||||
Timeout.timeout(STATEFUL_SOCKET_TIMEOUT) do
|
|
||||||
conn = TCPSocket.open(client_address, client_port)
|
|
||||||
end
|
|
||||||
rescue Timeout::Error
|
|
||||||
error = {
|
|
||||||
"status" => 504,
|
|
||||||
"message" => "Gateway Timeout"
|
|
||||||
}
|
|
||||||
puts jsonrpc2_error_encode(error, id)
|
|
||||||
return
|
|
||||||
end
|
|
||||||
|
|
||||||
stoptime = Time.now.to_f
|
|
||||||
connection_speed = ((stoptime - starttime) * 1000).to_i
|
|
||||||
data = jsonrpc2_encode("relay_accept", {
|
|
||||||
"success" => true,
|
|
||||||
"connection_speed" => connection_speed
|
|
||||||
}, id)
|
|
||||||
conn.write(data + "\r\n\r\n")
|
|
||||||
|
|
||||||
read_from_remote_server(remote_address, remote_port, scheme, nil, conn, buffer_size, id)
|
|
||||||
conn.close
|
|
||||||
end
|
|
||||||
|
|
||||||
context = JSON.parse(STDIN.read)
|
|
||||||
|
|
||||||
if context['jsonrpc'] == "2.0"
|
|
||||||
method = context['method']
|
|
||||||
case method
|
|
||||||
when "relay_request"
|
|
||||||
relay_request(context['params'], context['id'])
|
|
||||||
when "relay_connect"
|
|
||||||
relay_connect(context['params'], context['id'])
|
|
||||||
end
|
|
||||||
end
|
|
233
base.py
233
base.py
|
@ -3,62 +3,135 @@
|
||||||
# base.py
|
# base.py
|
||||||
# base (common) file
|
# base (common) file
|
||||||
#
|
#
|
||||||
# Caterpillar Proxy - The simple and parasitic web proxy SPAM spam filter
|
# Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
|
||||||
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
||||||
|
# Euiseo Cha (Wonkwang University) <zeroday0619_dev@outlook.com>
|
||||||
# https://github.com/gnh1201/caterpillar
|
# https://github.com/gnh1201/caterpillar
|
||||||
# Created at: 2024-05-20
|
# Created at: 2024-05-20
|
||||||
# Updated at: 2024-05-21
|
# Updated at: 2024-11-14
|
||||||
#
|
#
|
||||||
|
import logging
|
||||||
import hashlib
|
import hashlib
|
||||||
import json
|
import json
|
||||||
|
import os
|
||||||
import re
|
import re
|
||||||
|
import importlib
|
||||||
|
import subprocess
|
||||||
|
import platform
|
||||||
|
|
||||||
|
from abc import ABC, abstractmethod
|
||||||
|
from datetime import datetime, timezone
|
||||||
|
from typing import Union, List
|
||||||
|
|
||||||
|
client_encoding = "utf-8"
|
||||||
|
|
||||||
client_encoding = 'utf-8'
|
|
||||||
|
|
||||||
def extract_credentials(url):
|
def extract_credentials(url):
|
||||||
pattern = re.compile(r'(?P<scheme>\w+://)?(?P<username>[^:/]+):(?P<password>[^@]+)@(?P<url>.+)')
|
pattern = re.compile(
|
||||||
|
r"(?P<scheme>\w+://)?(?P<username>[^:/]+):(?P<password>[^@]+)@(?P<url>.+)"
|
||||||
|
)
|
||||||
match = pattern.match(url)
|
match = pattern.match(url)
|
||||||
if match:
|
if match:
|
||||||
scheme = match.group('scheme') if match.group('scheme') else 'https://'
|
scheme = match.group("scheme") if match.group("scheme") else "https://"
|
||||||
username = match.group('username')
|
username = match.group("username")
|
||||||
password = match.group('password')
|
password = match.group("password")
|
||||||
url = match.group('url')
|
url = match.group("url")
|
||||||
return username, password, scheme + url
|
return username, password, scheme + url
|
||||||
else:
|
else:
|
||||||
return None, None, url
|
return None, None, url
|
||||||
|
|
||||||
|
|
||||||
def jsonrpc2_create_id(data):
|
def jsonrpc2_create_id(data):
|
||||||
return hashlib.sha1(json.dumps(data).encode(client_encoding)).hexdigest()
|
return hashlib.sha1(json.dumps(data).encode(client_encoding)).hexdigest()
|
||||||
|
|
||||||
def jsonrpc2_encode(method, params = None):
|
|
||||||
data = {
|
def jsonrpc2_encode(method, params=None):
|
||||||
"jsonrpc": "2.0",
|
data = {"jsonrpc": "2.0", "method": method, "params": params}
|
||||||
"method": method,
|
|
||||||
"params": params
|
|
||||||
}
|
|
||||||
id = jsonrpc2_create_id(data)
|
id = jsonrpc2_create_id(data)
|
||||||
data['id'] = id
|
data["id"] = id
|
||||||
return (id, json.dumps(data))
|
return (id, json.dumps(data))
|
||||||
|
|
||||||
def jsonrpc2_result_encode(result, id = ''):
|
|
||||||
data = {
|
def jsonrpc2_decode(text):
|
||||||
"jsonrpc": "2.0",
|
data = json.loads(text)
|
||||||
"result": result,
|
type = "error" if "error" in data else "result" if "result" in data else None
|
||||||
"id": id
|
id = data.get("id")
|
||||||
}
|
rpcdata = data.get(type) if type else None
|
||||||
|
return type, id, rpcdata
|
||||||
|
|
||||||
|
|
||||||
|
def jsonrpc2_result_encode(result, id=""):
|
||||||
|
data = {"jsonrpc": "2.0", "result": result, "id": id}
|
||||||
return json.dumps(data)
|
return json.dumps(data)
|
||||||
|
|
||||||
def jsonrpc2_error_encode(error, id = ''):
|
|
||||||
data = {
|
def jsonrpc2_error_encode(error, id=""):
|
||||||
"jsonrpc": "2.0",
|
data = {"jsonrpc": "2.0", "error": error, "id": id}
|
||||||
"error": error,
|
|
||||||
"id": id
|
|
||||||
}
|
|
||||||
return json.dumps(data)
|
return json.dumps(data)
|
||||||
|
|
||||||
class Extension():
|
|
||||||
extensions = []
|
def find_openssl_binpath():
|
||||||
|
system = platform.system()
|
||||||
|
|
||||||
|
if system == "Windows":
|
||||||
|
possible_paths = [
|
||||||
|
os.path.join(
|
||||||
|
os.getenv("ProgramFiles", "C:\\Program Files"),
|
||||||
|
"OpenSSL-Win64",
|
||||||
|
"bin",
|
||||||
|
"openssl.exe",
|
||||||
|
),
|
||||||
|
os.path.join(
|
||||||
|
os.getenv("ProgramFiles", "C:\\Program Files"),
|
||||||
|
"OpenSSL-Win32",
|
||||||
|
"bin",
|
||||||
|
"openssl.exe",
|
||||||
|
),
|
||||||
|
os.path.join(
|
||||||
|
os.getenv("ProgramFiles(x86)", "C:\\Program Files (x86)"),
|
||||||
|
"OpenSSL-Win32",
|
||||||
|
"bin",
|
||||||
|
"openssl.exe",
|
||||||
|
),
|
||||||
|
os.path.join(
|
||||||
|
os.getenv("ProgramW6432", "C:\\Program Files"),
|
||||||
|
"OpenSSL-Win64",
|
||||||
|
"bin",
|
||||||
|
"openssl.exe",
|
||||||
|
),
|
||||||
|
os.path.join(
|
||||||
|
os.getenv("ProgramW6432", "C:\\Program Files"),
|
||||||
|
"OpenSSL-Win32",
|
||||||
|
"bin",
|
||||||
|
"openssl.exe",
|
||||||
|
),
|
||||||
|
]
|
||||||
|
for path in possible_paths:
|
||||||
|
if os.path.exists(path):
|
||||||
|
return path
|
||||||
|
else:
|
||||||
|
try:
|
||||||
|
result = subprocess.run(
|
||||||
|
["which", "openssl"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
|
||||||
|
)
|
||||||
|
path = result.stdout.decode().strip()
|
||||||
|
if path:
|
||||||
|
return path
|
||||||
|
except Exception:
|
||||||
|
pass
|
||||||
|
|
||||||
|
return "openssl"
|
||||||
|
|
||||||
|
|
||||||
|
class ExtensionType:
|
||||||
|
def __init__(self):
|
||||||
|
self.type: str = None
|
||||||
|
self.method: str = None
|
||||||
|
self.exported_methods: list[str] = []
|
||||||
|
self.connection_type: str = None
|
||||||
|
|
||||||
|
class Extension:
|
||||||
|
extensions: list[ExtensionType] = []
|
||||||
protocols = []
|
protocols = []
|
||||||
buffer_size = 8192
|
buffer_size = 8192
|
||||||
|
|
||||||
|
@ -71,8 +144,16 @@ class Extension():
|
||||||
cls.buffer_size = _buffer_size
|
cls.buffer_size = _buffer_size
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def register(cls, f):
|
def register(cls, s):
|
||||||
cls.extensions.append(f)
|
module_name, class_name = s.strip().split(".")[0:2]
|
||||||
|
module_path = "plugins." + module_name
|
||||||
|
|
||||||
|
try:
|
||||||
|
module = importlib.import_module(module_path)
|
||||||
|
_class = getattr(module, class_name)
|
||||||
|
cls.extensions.append(_class())
|
||||||
|
except (ImportError, AttributeError):
|
||||||
|
raise ImportError(class_name + " in the extension " + module_name)
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_filters(cls):
|
def get_filters(cls):
|
||||||
|
@ -85,7 +166,13 @@ class Extension():
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_rpcmethod(cls, method):
|
def get_rpcmethod(cls, method):
|
||||||
for extension in cls.extensions:
|
for extension in cls.extensions:
|
||||||
is_exported_method = (method == extension.method) or (method in extension.exported_methods)
|
is_exported_method = False
|
||||||
|
try:
|
||||||
|
is_exported_method = (method == extension.method) or (
|
||||||
|
method in extension.exported_methods
|
||||||
|
)
|
||||||
|
except:
|
||||||
|
pass
|
||||||
if extension.type == "rpcmethod" and is_exported_method:
|
if extension.type == "rpcmethod" and is_exported_method:
|
||||||
return extension
|
return extension
|
||||||
return None
|
return None
|
||||||
|
@ -104,24 +191,41 @@ class Extension():
|
||||||
@classmethod
|
@classmethod
|
||||||
def get_connector(cls, connection_type):
|
def get_connector(cls, connection_type):
|
||||||
for extension in cls.extensions:
|
for extension in cls.extensions:
|
||||||
if extension.type == "connector" and extension.connection_type == connection_type:
|
if (
|
||||||
|
extension.type == "connector"
|
||||||
|
and extension.connection_type == connection_type
|
||||||
|
):
|
||||||
return extension
|
return extension
|
||||||
return None
|
return None
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def send_accept(cls, conn, method, success = True):
|
def test_connectors(cls, data):
|
||||||
if 'tcp' in cls.protocols:
|
def test(preludes, data):
|
||||||
_, message = jsonrpc2_encode(f"{method}_accept", {
|
for prelude in preludes:
|
||||||
"success": success
|
if data.find(prelude) == 0:
|
||||||
})
|
return True
|
||||||
|
return False
|
||||||
|
|
||||||
|
for extension in cls.extensions:
|
||||||
|
if (
|
||||||
|
extension.type == "connector"
|
||||||
|
and test(extension.preludes, data)
|
||||||
|
):
|
||||||
|
return extension
|
||||||
|
return None
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def send_accept(cls, conn, method, success=True):
|
||||||
|
if "tcp" in cls.protocols:
|
||||||
|
_, message = jsonrpc2_encode(f"{method}_accept", {"success": success})
|
||||||
conn.send(message.encode(client_encoding))
|
conn.send(message.encode(client_encoding))
|
||||||
|
|
||||||
print (f"Accepted request with {cls.protocols[0]} protocol")
|
print(f"Accepted request with {cls.protocols[0]} protocol")
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def readall(cls, conn):
|
def readall(cls, conn):
|
||||||
if 'tcp' in cls.protocols:
|
if "tcp" in cls.protocols:
|
||||||
data = b''
|
data = b""
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
chunk = conn.recv(cls.buffer_size)
|
chunk = conn.recv(cls.buffer_size)
|
||||||
|
@ -133,13 +237,13 @@ class Extension():
|
||||||
|
|
||||||
return data
|
return data
|
||||||
|
|
||||||
elif 'http' in cls.protocols:
|
elif "http" in cls.protocols:
|
||||||
# empty binary when an file not exists
|
# empty binary when an file not exists
|
||||||
if 'file' not in conn.request.files:
|
if "file" not in conn.request.files:
|
||||||
return b''
|
return b""
|
||||||
|
|
||||||
# read an uploaded file with binary mode
|
# read an uploaded file with binary mode
|
||||||
file = conn.request.files['file']
|
file = conn.request.files["file"]
|
||||||
return file.read()
|
return file.read()
|
||||||
|
|
||||||
def __init__(self):
|
def __init__(self):
|
||||||
|
@ -151,8 +255,43 @@ class Extension():
|
||||||
def test(self, filtered, data, webserver, port, scheme, method, url):
|
def test(self, filtered, data, webserver, port, scheme, method, url):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def dispatch(self, type, id, params, method = None, conn = None):
|
def dispatch(self, type, id, params, method=None, conn=None):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
def connect(self, conn, data, webserver, port, scheme, method, url):
|
def connect(self, conn, data, webserver, port, scheme, method, url):
|
||||||
raise NotImplementedError
|
raise NotImplementedError
|
||||||
|
|
||||||
|
|
||||||
|
class Logger(logging.Logger):
|
||||||
|
def __init__(self, name: str, level: int = logging.NOTSET):
|
||||||
|
super().__init__(name, level)
|
||||||
|
self.formatter = logging.Formatter(
|
||||||
|
"[%(asctime)s] %(levelname)s %(module)s: %(message)s"
|
||||||
|
)
|
||||||
|
|
||||||
|
if not os.path.isdir("logs"):
|
||||||
|
os.mkdir("logs")
|
||||||
|
stream_handler = logging.StreamHandler()
|
||||||
|
file_handler = logging.FileHandler(
|
||||||
|
"logs/" + name + "-" + self._generate_timestamp() + ".log"
|
||||||
|
)
|
||||||
|
|
||||||
|
self._set_formatters([stream_handler, file_handler])
|
||||||
|
self._add_handlers([stream_handler, file_handler])
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def _generate_timestamp():
|
||||||
|
date = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d")
|
||||||
|
return date
|
||||||
|
|
||||||
|
def _set_formatters(
|
||||||
|
self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]
|
||||||
|
):
|
||||||
|
for handler in handlers:
|
||||||
|
handler.setFormatter(self.formatter)
|
||||||
|
|
||||||
|
def _add_handlers(
|
||||||
|
self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]
|
||||||
|
):
|
||||||
|
for handler in handlers:
|
||||||
|
self.addHandler(handler)
|
||||||
|
|
|
@ -1,4 +0,0 @@
|
||||||
#!/bin/sh
|
|
||||||
openssl genrsa -out ca.key 2048
|
|
||||||
openssl req -new -x509 -days 3650 -key ca.key -out ca.crt -subj "/CN=php-httpproxy CA"
|
|
||||||
openssl genrsa -out cert.key 2048
|
|
341
console.html
341
console.html
|
@ -1,50 +1,80 @@
|
||||||
<!doctype html>
|
<!doctype html>
|
||||||
<html>
|
<html>
|
||||||
<head>
|
<head>
|
||||||
<title>Caterpillar Proxy Web Console</title>
|
<title>Caterpillar Proxy Console</title>
|
||||||
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
|
||||||
<meta http-equiv="Content-Security-Policy" content="upgrade-insecure-requests">
|
<!--<meta http-equiv="Content-Security-Policy" content="upgrade-insecure-requests">-->
|
||||||
<link href="https://cdnjs.cloudflare.com/ajax/libs/jquery.terminal/2.42.0/css/jquery.terminal.min.css" rel="stylesheet"/>
|
<meta name="referrer" content="unsafe-url">
|
||||||
|
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/jquery.terminal/2.44.1/css/jquery.terminal.min.css">
|
||||||
|
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css">
|
||||||
<style type="text/css">/*<!--<![CDATA[*/
|
<style type="text/css">/*<!--<![CDATA[*/
|
||||||
body {
|
html, body, main {
|
||||||
|
width: 100%;
|
||||||
|
height: 100%;
|
||||||
|
padding: 0;
|
||||||
|
margin: 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
#content {
|
||||||
|
float: right;
|
||||||
|
width: 80%;
|
||||||
|
height: 100%;
|
||||||
|
scroll: hidden;
|
||||||
|
}
|
||||||
|
|
||||||
|
#cover {
|
||||||
|
float: left;
|
||||||
|
width: 20%;
|
||||||
|
height: 100%;
|
||||||
|
scroll: hidden;
|
||||||
|
|
||||||
background: #2e8d36 url(https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/bg.jpg) no-repeat;
|
background: #2e8d36 url(https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/bg.jpg) no-repeat;
|
||||||
background-size: cover;
|
background-size: cover;
|
||||||
background-position: center;
|
background-position: center;
|
||||||
}
|
}
|
||||||
h1, p {
|
|
||||||
color: #093923;
|
#cover article {
|
||||||
|
margin: 30px;
|
||||||
}
|
}
|
||||||
p a {
|
|
||||||
color: #fff;
|
#console {
|
||||||
padding: 0 2px;
|
height: 100%;
|
||||||
text-decoration: none;
|
|
||||||
border-bottom: 2px solid #fff;
|
|
||||||
}
|
|
||||||
main {
|
|
||||||
width: 640px;
|
|
||||||
margin: 0 auto;
|
|
||||||
}
|
|
||||||
.terminal, .cmd {
|
|
||||||
background: #093923;
|
|
||||||
}
|
}
|
||||||
/*]]>-->*/</style>
|
/*]]>-->*/</style>
|
||||||
</head>
|
</head>
|
||||||
<body>
|
<body>
|
||||||
<main>
|
<main>
|
||||||
<h1>Caterpillar Proxy Web Console</h1>
|
<section id="content">
|
||||||
<p>Download an worker script of <a href="https://github.com/gnh1201/caterpillar">Caterpillar Proxy</a>.</p>
|
<div id="console"></div>
|
||||||
<div id="console"></div>
|
<div id="map"></div>
|
||||||
<p><a href="https://github.com/gnh1201/caterpillar">Fork me. gnh1201/caterpillar (GitHub)</a></p>
|
<div id="embed"></div>
|
||||||
|
</section>
|
||||||
|
<section id="cover">
|
||||||
|
<article>
|
||||||
|
<h1>Caterpillar Proxy Console</h1>
|
||||||
|
<p>Source code available</p>
|
||||||
|
<p><a href="https://github.com/gnh1201/caterpillar">gnh1201/caterpillar (GitHub)</a></p>
|
||||||
|
<p><a href="https://github.com/gnh1201/caterpillar-plugins">gnh1201/caterpillar-plugins (GitHub)</a></p>
|
||||||
|
</article>
|
||||||
|
</section>
|
||||||
</main>
|
</main>
|
||||||
|
|
||||||
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.7.1/jquery.min.js"></script>
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.7.1/jquery.min.js"></script>
|
||||||
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/jquery.terminal/2.42.0/js/jquery.terminal.min.js"></script>
|
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.terminal/2.44.1/js/jquery.terminal.min.js"></script>
|
||||||
|
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"></script>
|
||||||
<script type="text/javascript">//<!--<![CDATA[
|
<script type="text/javascript">//<!--<![CDATA[
|
||||||
var env = {
|
var env = {
|
||||||
"target": "http://localhost/",
|
"target": "https://azure-ashlan-40.tiiny.io/",
|
||||||
"method": "",
|
"method": "",
|
||||||
"filename": null
|
"filename": null
|
||||||
};
|
};
|
||||||
|
var set_default_env = function(_env) {
|
||||||
|
for (k in _env) {
|
||||||
|
if (!(k in env)) {
|
||||||
|
env[k] = _env[k];
|
||||||
|
}
|
||||||
|
}
|
||||||
|
};
|
||||||
var pretty_jsonify = function(data) {
|
var pretty_jsonify = function(data) {
|
||||||
return JSON.stringify(data, null, 4);
|
return JSON.stringify(data, null, 4);
|
||||||
};
|
};
|
||||||
|
@ -56,7 +86,27 @@
|
||||||
document.body.appendChild(element);
|
document.body.appendChild(element);
|
||||||
element.click();
|
element.click();
|
||||||
document.body.removeChild(element);
|
document.body.removeChild(element);
|
||||||
}
|
};
|
||||||
|
var show_embed = function(term, url) {
|
||||||
|
term.echo('', {
|
||||||
|
finalize: function($div) {
|
||||||
|
var $embed = $("#embed");
|
||||||
|
$embed.html($("<iframe/>").attr({
|
||||||
|
"title": "embed web page",
|
||||||
|
"src": url,
|
||||||
|
"allow": "accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share",
|
||||||
|
"referrerpolicy": "unsafe-url",
|
||||||
|
"allowfullscreen": true
|
||||||
|
}).css({
|
||||||
|
"width": "100%",
|
||||||
|
"height": "240px",
|
||||||
|
"border": "none"
|
||||||
|
}));
|
||||||
|
$div.children().last().append($embed);
|
||||||
|
term.echo();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
};
|
||||||
var jsonrpc2_request = function(term, method, params) {
|
var jsonrpc2_request = function(term, method, params) {
|
||||||
var requestData = {
|
var requestData = {
|
||||||
jsonrpc: "2.0",
|
jsonrpc: "2.0",
|
||||||
|
@ -84,7 +134,12 @@
|
||||||
// for dirty response (e.g., magic header, advertise logo)
|
// for dirty response (e.g., magic header, advertise logo)
|
||||||
try {
|
try {
|
||||||
var start = s.indexOf('{');
|
var start = s.indexOf('{');
|
||||||
var end = s.lastIndexOf('}');
|
var end = [s.indexOf("}\r\n\r\n"), s.lastIndexOf('}')].reduce(function(a, x) {
|
||||||
|
if (x > 0 && a > x) {
|
||||||
|
a = x; // set new value if x greater than 0 and x less than previous value
|
||||||
|
}
|
||||||
|
return a;
|
||||||
|
}, s.length);
|
||||||
if (start > -1 && end > -1 && end > start) {
|
if (start > -1 && end > -1 && end > start) {
|
||||||
responseData = JSON.parse(s.substring(start, end + 1));
|
responseData = JSON.parse(s.substring(start, end + 1));
|
||||||
} else {
|
} else {
|
||||||
|
@ -118,12 +173,71 @@
|
||||||
text = responseData.result.data;
|
text = responseData.result.data;
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
term.echo(text);
|
|
||||||
|
|
||||||
// save as a file
|
// save as a file
|
||||||
if (env.filename != null) {
|
if (env.filename != null) {
|
||||||
download_text(env.filename, text);
|
download_text(env.filename, text);
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// method(relay_get_geolocation)
|
||||||
|
if (env.method == "relay_get_geolocation") {
|
||||||
|
term.echo(text);
|
||||||
|
term.echo('', {
|
||||||
|
finalize: function($div) {
|
||||||
|
var geodata = responseData.result.data;
|
||||||
|
var $map = $("#map").css({
|
||||||
|
"height": "240px"
|
||||||
|
});
|
||||||
|
$div.children().last().append($map);
|
||||||
|
map.setView([geodata.lat, geodata.lon], 13);
|
||||||
|
var circle = L.circle([geodata.lat, geodata.lon], {
|
||||||
|
color: 'red',
|
||||||
|
fillColor: '#f03',
|
||||||
|
fillOpacity: 0.5,
|
||||||
|
radius: 500
|
||||||
|
}).addTo(map);
|
||||||
|
term.echo();
|
||||||
|
}
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// method(relay_web_search)
|
||||||
|
if (env.method == "relay_web_search") {
|
||||||
|
var searchdata = responseData.result.data;
|
||||||
|
|
||||||
|
if ("error" in searchdata) {
|
||||||
|
term.echo(searchdata.error.message);
|
||||||
|
term.echo('');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
var results = Object.values(searchdata);
|
||||||
|
if (results.length > 0) {
|
||||||
|
results.forEach(function(x) {
|
||||||
|
if (typeof x !== "object") return;
|
||||||
|
|
||||||
|
if ("special_response" in x) {
|
||||||
|
term.echo("< " + x.special_response.response);
|
||||||
|
term.echo("< " + x.special_response.source);
|
||||||
|
term.echo('');
|
||||||
|
} else {
|
||||||
|
var base_domain = (function(s) {
|
||||||
|
return s.split("/")[2];
|
||||||
|
})(x.base_url);
|
||||||
|
term.echo("< [[!;;;;" + x.url + ";{}]" + x.title.trim() + " (" + base_domain + ")]: " + x.description.trim());
|
||||||
|
}
|
||||||
|
});
|
||||||
|
} else {
|
||||||
|
term.echo("No any results");
|
||||||
|
}
|
||||||
|
|
||||||
|
term.echo('');
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// print a response
|
||||||
|
term.echo(text);
|
||||||
},
|
},
|
||||||
error: function(xhr, status, error) {
|
error: function(xhr, status, error) {
|
||||||
term.echo(error);
|
term.echo(error);
|
||||||
|
@ -133,32 +247,68 @@
|
||||||
|
|
||||||
jQuery(function($, undefined) {
|
jQuery(function($, undefined) {
|
||||||
$('#console').terminal({
|
$('#console').terminal({
|
||||||
set: function(k, v) {
|
set: function(...args) {
|
||||||
|
var k = (args.length > 0 ? args[0] : '');
|
||||||
|
var v = (args.length > 1 ? args.slice(1) : []).join(' ');
|
||||||
|
|
||||||
|
// "env" is the reserved word
|
||||||
if (k == "env") {
|
if (k == "env") {
|
||||||
this.echo("env is the reserved word");
|
this.echo("env is the reserved word");
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// check a variable is it Array
|
||||||
|
if (k in env && env[k] instanceof Array) {
|
||||||
|
env[k].push(v);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// method(relay_web_search)
|
||||||
|
if (env.method == "relay_web_search" && k == "page") {
|
||||||
|
env[k] = parseInt(v);
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
env[k] = v || null;
|
env[k] = v || null;
|
||||||
|
|
||||||
if (k == "method") {
|
if (k == "method") {
|
||||||
this.set_prompt('method([[b;red;black]' + env.method + '])> ');
|
this.set_prompt('method([[b;red;black]' + env.method + '])> ');
|
||||||
|
|
||||||
|
// method(relay_invoke_method)
|
||||||
|
if (env.method == "relay_invoke_method") {
|
||||||
|
set_default_env({
|
||||||
|
"requires": []
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// method(relay_sendmail)
|
||||||
|
if (env.method == "relay_sendmail") {
|
||||||
|
set_default_env({
|
||||||
|
"mail_to": "noreply@example.org",
|
||||||
|
"mail_from": "noreply@example.org",
|
||||||
|
"mail_subject": "Important Message from System Administrator"
|
||||||
|
});
|
||||||
|
}
|
||||||
|
|
||||||
|
// method(relay_mysql_query)
|
||||||
if (env.method == "relay_mysql_query") {
|
if (env.method == "relay_mysql_query") {
|
||||||
var _env = {
|
set_default_env({
|
||||||
"mysql_hostname": "localhost",
|
"mysql_hostname": "localhost",
|
||||||
"mysql_username": "root",
|
"mysql_username": "root",
|
||||||
"mysql_password": null,
|
"mysql_password": null,
|
||||||
"mysql_database": null,
|
"mysql_database": null,
|
||||||
"mysql_port": "3306",
|
"mysql_port": "3306",
|
||||||
"mysql_charset": "utf8"
|
"mysql_charset": "utf8"
|
||||||
};
|
});
|
||||||
|
}
|
||||||
|
|
||||||
for (k in _env) {
|
// method(relay_web_search)
|
||||||
if (!(k in env)) {
|
if (env.method == "relay_web_search") {
|
||||||
env[k] = _env[k];
|
set_default_env({
|
||||||
}
|
"keyword": "",
|
||||||
}
|
"page": 1
|
||||||
|
});
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
|
@ -188,6 +338,7 @@
|
||||||
|
|
||||||
jsonrpc2_request(this, env.method, {
|
jsonrpc2_request(this, env.method, {
|
||||||
"callback": args[0],
|
"callback": args[0],
|
||||||
|
"requires": env.requires,
|
||||||
"args": args.slice(1)
|
"args": args.slice(1)
|
||||||
});
|
});
|
||||||
return;
|
return;
|
||||||
|
@ -203,7 +354,6 @@
|
||||||
jsonrpc2_request(this, env.method, {
|
jsonrpc2_request(this, env.method, {
|
||||||
"hostname": args[0]
|
"hostname": args[0]
|
||||||
});
|
});
|
||||||
|
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -217,7 +367,20 @@
|
||||||
jsonrpc2_request(this, env.method, {
|
jsonrpc2_request(this, env.method, {
|
||||||
"url": args[0]
|
"url": args[0]
|
||||||
});
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// method(relay_sendmail)
|
||||||
|
if (env.method == "relay_sendmail") {
|
||||||
|
this.echo("From: " + env.mail_from + "\r\nTo: " + env.mail_to + "\r\nSubject: " + env.mail_subject);
|
||||||
|
this.read("Enter your message:\r\n", function(message) {
|
||||||
|
jsonrpc2_request(this, env.method, {
|
||||||
|
"to": env.mail_to,
|
||||||
|
"from": env.mail_from,
|
||||||
|
"subject": env.mail_subject,
|
||||||
|
"message": message
|
||||||
|
});
|
||||||
|
});
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -244,16 +407,114 @@
|
||||||
return;
|
return;
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// method(analyze_sequence)
|
||||||
|
if (env.method == "analyze_sequence") {
|
||||||
|
var _this = this;
|
||||||
|
this.read("Enter the sequence:\r\n", function(message) {
|
||||||
|
jsonrpc2_request(_this, env.method, {
|
||||||
|
"sequence": message
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// method(gc_content_calculation)
|
||||||
|
if (env.method == "gc_content_calculation") {
|
||||||
|
var _this = this;
|
||||||
|
this.read("Enter the sequence:\r\n", function(message) {
|
||||||
|
jsonrpc2_request(_this, env.method, {
|
||||||
|
"sequence": message
|
||||||
|
});
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// method(container_start)
|
||||||
|
if ([
|
||||||
|
"container_start",
|
||||||
|
"container_stop",
|
||||||
|
"container_pause",
|
||||||
|
"container_unpause",
|
||||||
|
"container_restart",
|
||||||
|
"container_kill",
|
||||||
|
"container_remove"
|
||||||
|
].indexOf(env.method) > -1) {
|
||||||
|
if (args.length < 1) {
|
||||||
|
this.echo("Please set a container name");
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
jsonrpc2_request(this, env.method, {
|
||||||
|
"name": args[0]
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
// method(relay_web_search)
|
||||||
|
if (env.method == "relay_web_search") {
|
||||||
|
jsonrpc2_request(this, env.method, {
|
||||||
|
"keyword": env.keyword,
|
||||||
|
"page": env.page,
|
||||||
|
"type": "text"
|
||||||
|
});
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
// method(*)
|
// method(*)
|
||||||
jsonrpc2_request(this, env.method, {});
|
jsonrpc2_request(this, env.method, {});
|
||||||
}
|
},
|
||||||
|
show_embed: function(url) {
|
||||||
|
show_embed(this, url);
|
||||||
|
},
|
||||||
|
youtube: function(...args) {
|
||||||
|
if (args.length < 1) {
|
||||||
|
this.echo("Please let me know what do you want to do.");
|
||||||
|
}
|
||||||
|
|
||||||
|
var action = args[0];
|
||||||
|
switch (action) {
|
||||||
|
case "play":
|
||||||
|
if (args.length < 2) {
|
||||||
|
this.echo("Please let me know the video ID");
|
||||||
|
}
|
||||||
|
var video_id = args[1];
|
||||||
|
show_embed(this, "https://www.youtube.com/embed/" + video_id);
|
||||||
|
break;
|
||||||
|
}
|
||||||
|
},
|
||||||
|
search: function(...args) {
|
||||||
|
this.exec("set method relay_web_search");
|
||||||
|
this.exec("set page 1");
|
||||||
|
this.exec("set keyword " + args.join(' '));
|
||||||
|
this.exec("do");
|
||||||
|
},
|
||||||
|
next: function() {
|
||||||
|
if (env.method == "relay_web_search") {
|
||||||
|
var num = parseInt(env.page) + 1;
|
||||||
|
this.exec("set page " + num);
|
||||||
|
this.exec("do");
|
||||||
|
}
|
||||||
|
},
|
||||||
|
prev: function() {
|
||||||
|
if (env.method == "relay_web_search") {
|
||||||
|
var num = (env.page > 1 ? env.page - 1 : 1);
|
||||||
|
this.exec("set page " + num);
|
||||||
|
this.exec("do");
|
||||||
|
}
|
||||||
|
},
|
||||||
}, {
|
}, {
|
||||||
height: 480,
|
height: "100%",
|
||||||
width: 640,
|
width: "100%",
|
||||||
prompt: '> ',
|
prompt: '> ',
|
||||||
checkArity: false
|
checkArity: false
|
||||||
});
|
});
|
||||||
});
|
});
|
||||||
|
|
||||||
|
var map = L.map('map');
|
||||||
|
L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png', {
|
||||||
|
maxZoom: 19,
|
||||||
|
attribution: '© <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
|
||||||
|
}).addTo(map);
|
||||||
//]]>--></script>
|
//]]>--></script>
|
||||||
</body>
|
</body>
|
||||||
</html>
|
</html>
|
||||||
|
|
9
download_certs.bat
Normal file
9
download_certs.bat
Normal file
|
@ -0,0 +1,9 @@
|
||||||
|
@echo off
|
||||||
|
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt %CD%\ca.crt
|
||||||
|
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key %CD%\ca.key
|
||||||
|
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key %CD%\cert.key
|
||||||
|
|
||||||
|
REM echo if you want generate a certificate...
|
||||||
|
REM openssl genrsa -out ca.key 2048
|
||||||
|
REM openssl req -new -x509 -days 3650 -key ca.key -out ca.crt -subj "/CN=php-httpproxy CA"
|
||||||
|
REM openssl genrsa -out cert.key 2048
|
9
download_certs.sh
Executable file
9
download_certs.sh
Executable file
|
@ -0,0 +1,9 @@
|
||||||
|
#!/bin/sh
|
||||||
|
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt
|
||||||
|
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key
|
||||||
|
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key
|
||||||
|
|
||||||
|
# echo "if you want generate a certificate..."
|
||||||
|
#openssl genrsa -out ca.key 2048
|
||||||
|
#openssl req -new -x509 -days 3650 -key ca.key -out ca.crt -subj "/CN=php-httpproxy CA"
|
||||||
|
#openssl genrsa -out cert.key 2048
|
1
plugins
Submodule
1
plugins
Submodule
|
@ -0,0 +1 @@
|
||||||
|
Subproject commit 59833335c31a120feb99481be1606bd0dfecc9f4
|
|
@ -1,57 +0,0 @@
|
||||||
#!/usr/bin/python3
|
|
||||||
#
|
|
||||||
# container.py
|
|
||||||
# Linux Container (e.g. Docker) plugin for Caterpillar Proxy
|
|
||||||
#
|
|
||||||
# Caterpillar Proxy - The simple and parasitic web proxy with SPAM filter
|
|
||||||
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
|
||||||
# https://github.com/gnh1201/caterpillar
|
|
||||||
# Created at: 2024-03-04
|
|
||||||
# Updated at: 2024-03-13
|
|
||||||
#
|
|
||||||
|
|
||||||
import docker
|
|
||||||
|
|
||||||
from server import Extension
|
|
||||||
|
|
||||||
class Container(Extension):
|
|
||||||
def __init__(self):
|
|
||||||
self.type = "rpcmethod"
|
|
||||||
self.method = "container_init"
|
|
||||||
self.exported_methods = ["container_run", "container_stop"]
|
|
||||||
|
|
||||||
# docker
|
|
||||||
self.client = docker.from_env()
|
|
||||||
|
|
||||||
def dispatch(self, type, id, params, conn):
|
|
||||||
print ("[*] Greeting! dispatch")
|
|
||||||
conn.send(b'Greeting! dispatch')
|
|
||||||
|
|
||||||
def container_run(self, type, id, params, conn):
|
|
||||||
devices = params['devices']
|
|
||||||
image = params['image']
|
|
||||||
devices = params['devices']
|
|
||||||
name = params['name']
|
|
||||||
environment = params['environment']
|
|
||||||
volumes = params['volumes']
|
|
||||||
|
|
||||||
container = client.containers.run(
|
|
||||||
image,
|
|
||||||
devices=devices,
|
|
||||||
name=name,
|
|
||||||
volumes=volumes,
|
|
||||||
environment=environment,
|
|
||||||
detach=True
|
|
||||||
)
|
|
||||||
container.logs()
|
|
||||||
|
|
||||||
print ("[*] Running...")
|
|
||||||
|
|
||||||
def container_stop(self, type, id, params, conn):
|
|
||||||
name = params['name']
|
|
||||||
|
|
||||||
container = client.containers.get(name)
|
|
||||||
container.stop()
|
|
||||||
|
|
||||||
print ("[*] Stopped")
|
|
||||||
|
|
|
@ -1,274 +0,0 @@
|
||||||
#!/usr/bin/python3
|
|
||||||
#
|
|
||||||
# fediverse.py
|
|
||||||
# Fediverse (Mastodon, Misskey, Pleroma, ...) SPAM filter plugin for Caterpillar Proxy
|
|
||||||
#
|
|
||||||
# Caterpillar Proxy - The simple and parasitic web proxy with SPAM filter (formerly, php-httpproxy)
|
|
||||||
# Namyheon Go (Catswords Research) <abuse@catswords.net>
|
|
||||||
# https://github.com/gnh1201/caterpillar
|
|
||||||
#
|
|
||||||
# Created in: 2022-10-06
|
|
||||||
# Updated in: 2024-06-05
|
|
||||||
#
|
|
||||||
|
|
||||||
import io
|
|
||||||
import re
|
|
||||||
import requests
|
|
||||||
import os.path
|
|
||||||
|
|
||||||
from decouple import config
|
|
||||||
from PIL import Image
|
|
||||||
|
|
||||||
from server import Extension
|
|
||||||
|
|
||||||
try:
|
|
||||||
client_encoding = config('CLIENT_ENCODING', default='utf-8')
|
|
||||||
truecaptcha_userid = config('TRUECAPTCHA_USERID') # truecaptcha.org
|
|
||||||
truecaptcha_apikey = config('TRUECAPTCHA_APIKEY') # truecaptcha.org
|
|
||||||
dictionary_file = config('DICTIONARY_FILE', default='words_alpha.txt') # https://github.com/dwyl/english-words
|
|
||||||
librey_apiurl = config('LIBREY_APIURL', default='https://search.catswords.net') # https://github.com/Ahwxorg/librey
|
|
||||||
except Exception as e:
|
|
||||||
print ("[*] Invaild configration: %s" % (str(e)))
|
|
||||||
|
|
||||||
class Fediverse(Extension):
|
|
||||||
def __init__(self):
|
|
||||||
self.type = "filter" # this is a filter
|
|
||||||
|
|
||||||
# Load data to use KnownWords4 strategy
|
|
||||||
# Download data: https://github.com/dwyl/english-words
|
|
||||||
self.known_words = []
|
|
||||||
if dictionary_file != '' and os.path.isfile(dictionary_file):
|
|
||||||
with open(dictionary_file, "r") as file:
|
|
||||||
words = file.readlines()
|
|
||||||
self.known_words = [word.strip() for word in words if len(word.strip()) > 3]
|
|
||||||
print ("[*] Data loaded to use KnownWords4 strategy")
|
|
||||||
|
|
||||||
def test(self, filtered, data, webserver, port, scheme, method, url):
|
|
||||||
# prevent cache confusing
|
|
||||||
if data.find(b'<title>Welcome to nginx!</title>') > -1:
|
|
||||||
return True
|
|
||||||
|
|
||||||
# allowed conditions
|
|
||||||
if method == b'GET' or url.find(b'/api') > -1:
|
|
||||||
return False
|
|
||||||
|
|
||||||
# convert to text
|
|
||||||
data_length = len(data)
|
|
||||||
text = data.decode(client_encoding, errors='ignore')
|
|
||||||
error_rate = (data_length - len(text)) / data_length
|
|
||||||
if error_rate > 0.2: # it is a binary data
|
|
||||||
return False
|
|
||||||
|
|
||||||
# check ID with K-Anonymity strategy
|
|
||||||
pattern = r'\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b'
|
|
||||||
matches = list(set(re.findall(pattern, text)))
|
|
||||||
if len(matches) > 0:
|
|
||||||
print ("[*] Found ID: %s" % (', '.join(matches)))
|
|
||||||
try:
|
|
||||||
filtered = not all(map(self.pwnedpasswords_test, matches))
|
|
||||||
except Exception as e:
|
|
||||||
print ("[*] K-Anonymity strategy not working! %s" % (str(e)))
|
|
||||||
filtered = True
|
|
||||||
|
|
||||||
# feedback
|
|
||||||
if filtered and len(matches) > 0:
|
|
||||||
score = 0
|
|
||||||
strategies = []
|
|
||||||
|
|
||||||
# check ID with VowelRatio10 strategy
|
|
||||||
def vowel_ratio_test(s):
|
|
||||||
ratio = self.calculate_vowel_ratio(s)
|
|
||||||
return ratio > 0.2 and ratio < 0.8
|
|
||||||
if all(map(vowel_ratio_test, matches)):
|
|
||||||
score += 1
|
|
||||||
strategies.append('VowelRatio10')
|
|
||||||
|
|
||||||
# check ID with Palindrome4 strategy
|
|
||||||
if all(map(self.has_palindrome, matches)):
|
|
||||||
score += 1
|
|
||||||
strategies.append('Palindrome4')
|
|
||||||
|
|
||||||
# check ID with KnownWords4 strategy
|
|
||||||
if all(map(self.has_known_word, matches)):
|
|
||||||
score += 2
|
|
||||||
strategies.append('KnownWords4')
|
|
||||||
|
|
||||||
# check ID with SearchEngine3 strategy
|
|
||||||
if librey_apiurl != '' and all(map(self.search_engine_test, matches)):
|
|
||||||
score += 1
|
|
||||||
strategies.append('SearchEngine3')
|
|
||||||
|
|
||||||
# check ID with RepeatedNumbers3 strategy
|
|
||||||
if all(map(self.repeated_numbers_test, matches)):
|
|
||||||
score += 1
|
|
||||||
strategies.append('RepeatedNumbers3')
|
|
||||||
|
|
||||||
# logging score
|
|
||||||
with open('score.log', 'a') as file:
|
|
||||||
file.write("%s\t%s\t%s\r\n" % ('+'.join(matches), str(score), '+'.join(strategies)))
|
|
||||||
|
|
||||||
# make decision
|
|
||||||
if score > 1:
|
|
||||||
filtered = False
|
|
||||||
|
|
||||||
# check an attached images (check images with Not-CAPTCHA strategy)
|
|
||||||
if truecaptcha_userid != '' and not filtered and len(matches) > 0:
|
|
||||||
def webp_to_png_base64(url):
|
|
||||||
try:
|
|
||||||
response = requests.get(url)
|
|
||||||
img = Image.open(io.BytesIO(response.content))
|
|
||||||
img_png = img.convert("RGBA")
|
|
||||||
buffered = io.BytesIO()
|
|
||||||
img_png.save(buffered, format="PNG")
|
|
||||||
encoded_image = base64.b64encode(buffered.getvalue()).decode(client_encoding)
|
|
||||||
return encoded_image
|
|
||||||
except:
|
|
||||||
return None
|
|
||||||
|
|
||||||
urls = re.findall(r'https://[^\s"]+\.webp', text)
|
|
||||||
if len(urls) > 0:
|
|
||||||
for url in urls:
|
|
||||||
if filtered:
|
|
||||||
break
|
|
||||||
|
|
||||||
print ("[*] downloading... %s" % (url))
|
|
||||||
encoded_image = webp_to_png_base64(url)
|
|
||||||
print ("[*] downloaded.")
|
|
||||||
if encoded_image:
|
|
||||||
print ("[*] solving...")
|
|
||||||
try:
|
|
||||||
solved = truecaptcha_solve(encoded_image)
|
|
||||||
if solved:
|
|
||||||
print ("[*] solved: %s" % (solved))
|
|
||||||
filtered = filtered or (solved.lower() in ['ctkpaarr', 'spam'])
|
|
||||||
else:
|
|
||||||
print ("[*] not solved")
|
|
||||||
except Exception as e:
|
|
||||||
print ("[*] Not CAPTCHA strategy not working! %s" % (str(e)))
|
|
||||||
|
|
||||||
return filtered
|
|
||||||
|
|
||||||
# Strategy: K-Anonymity test - use api.pwnedpasswords.com
|
|
||||||
def pwnedpasswords_test(self, s):
|
|
||||||
# convert to lowercase
|
|
||||||
s = s.lower()
|
|
||||||
|
|
||||||
# SHA1 of the password
|
|
||||||
p_sha1 = hashlib.sha1(s.encode()).hexdigest()
|
|
||||||
|
|
||||||
# First 5 char of SHA1 for k-anonymity API use
|
|
||||||
f5_sha1 = p_sha1[:5]
|
|
||||||
|
|
||||||
# Last 5 char of SHA1 to match API output
|
|
||||||
l5_sha1 = p_sha1[-5:]
|
|
||||||
|
|
||||||
# Making GET request using Requests library
|
|
||||||
response = requests.get(f'https://api.pwnedpasswords.com/range/{f5_sha1}')
|
|
||||||
|
|
||||||
# Checking if request was successful
|
|
||||||
if response.status_code == 200:
|
|
||||||
# Parsing response text
|
|
||||||
hashes = response.text.split('\r\n')
|
|
||||||
|
|
||||||
# Using list comprehension to find matching hashes
|
|
||||||
matching_hashes = [line.split(':')[0] for line in hashes if line.endswith(l5_sha1)]
|
|
||||||
|
|
||||||
# If there are matching hashes, return True, else return False
|
|
||||||
return bool(matching_hashes)
|
|
||||||
else:
|
|
||||||
raise Exception("api.pwnedpasswords.com response status: %s" % (str(response.status_code)))
|
|
||||||
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Strategy: Not-CAPTCHA - use truecaptcha.org
|
|
||||||
def truecaptcha_solve(self, encoded_image):
|
|
||||||
url = 'https://api.apitruecaptcha.org/one/gettext'
|
|
||||||
data = {
|
|
||||||
'userid': truecaptcha_userid,
|
|
||||||
'apikey': truecaptcha_apikey,
|
|
||||||
'data': encoded_image,
|
|
||||||
'mode': 'human'
|
|
||||||
}
|
|
||||||
response = requests.post(url = url, json = data)
|
|
||||||
|
|
||||||
if response.status_code == 200:
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
if 'error_message' in data:
|
|
||||||
print ("[*] Error: %s" % (data['error_message']))
|
|
||||||
return None
|
|
||||||
if 'result' in data:
|
|
||||||
return data['result']
|
|
||||||
else:
|
|
||||||
raise Exception("api.apitruecaptcha.org response status: %s" % (str(response.status_code)))
|
|
||||||
|
|
||||||
return None
|
|
||||||
|
|
||||||
# Strategy: VowelRatio10
|
|
||||||
def calculate_vowel_ratio(self, s):
|
|
||||||
# Calculate the length of the string.
|
|
||||||
length = len(s)
|
|
||||||
if length == 0:
|
|
||||||
return 0.0
|
|
||||||
|
|
||||||
# Count the number of vowels ('a', 'e', 'i', 'o', 'u', 'w', 'y') in the string.
|
|
||||||
vowel_count = sum(1 for char in s if char.lower() in 'aeiouwy')
|
|
||||||
|
|
||||||
# Define vowel-ending patterns
|
|
||||||
vowel_ending_patterns = ['ang', 'eng', 'ing', 'ong', 'ung', 'ank', 'ink', 'dge']
|
|
||||||
|
|
||||||
# Count the occurrences of vowel-ending patterns in the string.
|
|
||||||
vowel_count += sum(s.count(pattern) for pattern in vowel_ending_patterns)
|
|
||||||
|
|
||||||
# Calculate the ratio of vowels to the total length of the string.
|
|
||||||
vowel_ratio = vowel_count / length
|
|
||||||
|
|
||||||
return vowel_ratio
|
|
||||||
|
|
||||||
# Strategy: Palindrome4
|
|
||||||
def has_palindrome(self, input_string):
|
|
||||||
def is_palindrome(s):
|
|
||||||
return s == s[::-1]
|
|
||||||
|
|
||||||
input_string = input_string.lower()
|
|
||||||
n = len(input_string)
|
|
||||||
for i in range(n):
|
|
||||||
for j in range(i + 4, n + 1): # Find substrings of at least 5 characters
|
|
||||||
substring = input_string[i:j]
|
|
||||||
if is_palindrome(substring):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Strategy: KnownWords4
|
|
||||||
def has_known_word(self, input_string):
|
|
||||||
def is_known_word(s):
|
|
||||||
return s in self.known_words
|
|
||||||
|
|
||||||
input_string = input_string.lower()
|
|
||||||
n = len(input_string)
|
|
||||||
for i in range(n):
|
|
||||||
for j in range(i + 4, n + 1): # Find substrings of at least 5 characters
|
|
||||||
substring = input_string[i:j]
|
|
||||||
if is_known_word(substring):
|
|
||||||
return True
|
|
||||||
return False
|
|
||||||
|
|
||||||
# Strategy: SearchEngine3
|
|
||||||
def search_engine_test(self, s):
|
|
||||||
url = "%s/api.php?q=%s" % (librey_apiurl, s)
|
|
||||||
response = requests.get(url, verify=False)
|
|
||||||
if response.status_code != 200:
|
|
||||||
return False
|
|
||||||
|
|
||||||
data = response.json()
|
|
||||||
|
|
||||||
if 'results_source' in data:
|
|
||||||
del data['results_source']
|
|
||||||
|
|
||||||
num_results = len(data)
|
|
||||||
|
|
||||||
return num_results > 2
|
|
||||||
|
|
||||||
# Strategy: RepeatedNumbers3
|
|
||||||
def repeated_numbers_test(self, s):
|
|
||||||
return bool(re.search(r'\d{3,}', s))
|
|
|
@ -1,100 +0,0 @@
|
||||||
#!/usr/bin/python3
|
|
||||||
#
|
|
||||||
# wayback.py
|
|
||||||
# Cached previous page (e.g. Wayback Machine) integration plugin for Caterpillar Proxy
|
|
||||||
#
|
|
||||||
# Caterpillar Proxy - The simple and parasitic web proxy with SPAM filter
|
|
||||||
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
|
||||||
# https://github.com/gnh1201/caterpillar
|
|
||||||
# Created at: 2024-03-13
|
|
||||||
# Updated at: 2024-03-13
|
|
||||||
#
|
|
||||||
|
|
||||||
import requests
|
|
||||||
|
|
||||||
from server import Extension
|
|
||||||
|
|
||||||
try:
|
|
||||||
client_encoding = config('CLIENT_ENCODING')
|
|
||||||
except Exception as e:
|
|
||||||
print ("[*] Invaild configration: %s" % (str(e)))
|
|
||||||
|
|
||||||
def get_cached_page_from_google(url):
|
|
||||||
status_code, text = (0, '')
|
|
||||||
|
|
||||||
# Google Cache URL
|
|
||||||
google_cache_url = "https://webcache.googleusercontent.com/search?q=cache:" + url
|
|
||||||
|
|
||||||
# Send a GET request to Google Cache URL
|
|
||||||
response = requests.get(google_cache_url)
|
|
||||||
|
|
||||||
# Check if the request was successful (status code 200)
|
|
||||||
if response.status_code == 200:
|
|
||||||
text = response.text # Extract content from response
|
|
||||||
else:
|
|
||||||
status_code = response.status_code
|
|
||||||
|
|
||||||
return status_code, text
|
|
||||||
|
|
||||||
# API documentation: https://archive.org/help/wayback_api.php
|
|
||||||
def get_cached_page_from_wayback(url):
|
|
||||||
status_code, text = (0, '')
|
|
||||||
|
|
||||||
# Wayback Machine API URL
|
|
||||||
wayback_api_url = "http://archive.org/wayback/available?url=" + url
|
|
||||||
|
|
||||||
# Send a GET request to Wayback Machine API
|
|
||||||
response = requests.get(wayback_api_url)
|
|
||||||
|
|
||||||
# Check if the request was successful (status code 200)
|
|
||||||
if response.status_code == 200:
|
|
||||||
try:
|
|
||||||
# Parse JSON response
|
|
||||||
data = response.json()
|
|
||||||
archived_snapshots = data.get("archived_snapshots", {})
|
|
||||||
closest_snapshot = archived_snapshots.get("closest", {})
|
|
||||||
|
|
||||||
# Check if the URL is available in the archive
|
|
||||||
if closest_snapshot:
|
|
||||||
archived_url = closest_snapshot.get("url", "")
|
|
||||||
|
|
||||||
# If URL is available, fetch the content of the archived page
|
|
||||||
if archived_url:
|
|
||||||
archived_page_response = requests.get(archived_url)
|
|
||||||
status_code = archived_page_response.status_code;
|
|
||||||
if status_code == 200:
|
|
||||||
text = archived_page_response.text
|
|
||||||
else:
|
|
||||||
status_code = 404
|
|
||||||
else:
|
|
||||||
status_code = 404
|
|
||||||
except:
|
|
||||||
status_code = 502
|
|
||||||
else:
|
|
||||||
status_code = response.status_code
|
|
||||||
|
|
||||||
return status_code, text
|
|
||||||
|
|
||||||
class Wayback(Extension):
|
|
||||||
def __init__(self):
|
|
||||||
self.type = "connector" # this is a connctor
|
|
||||||
self.connection_type = "wayback"
|
|
||||||
|
|
||||||
def connect(self, conn, data, webserver, port, scheme, method, url):
|
|
||||||
connected = False
|
|
||||||
|
|
||||||
target_url = url.decode(client_encoding)
|
|
||||||
|
|
||||||
if not connected:
|
|
||||||
status_code, text = get_cached_page_from_google(target_url)
|
|
||||||
if status_code == 200:
|
|
||||||
conn.send(text.encode(client_encoding))
|
|
||||||
connected = True
|
|
||||||
|
|
||||||
if not connected:
|
|
||||||
status_code, text = get_cached_page_from_wayback(target_url)
|
|
||||||
if status_code == 200:
|
|
||||||
conn.send(text.encode(client_encoding))
|
|
||||||
connected = True
|
|
||||||
|
|
||||||
return connected
|
|
|
@ -1,2 +1,6 @@
|
||||||
python-decouple
|
python-decouple
|
||||||
requests
|
requests
|
||||||
|
aiosmtpd
|
||||||
|
ruff
|
||||||
|
flask
|
||||||
|
flask_cors
|
69
ruff.toml
Normal file
69
ruff.toml
Normal file
|
@ -0,0 +1,69 @@
|
||||||
|
exclude = [
|
||||||
|
".bzr",
|
||||||
|
".direnv",
|
||||||
|
".eggs",
|
||||||
|
".git",
|
||||||
|
".git-rewrite",
|
||||||
|
".hg",
|
||||||
|
".ipynb_checkpoints",
|
||||||
|
".mypy_cache",
|
||||||
|
".nox",
|
||||||
|
".pants.d",
|
||||||
|
".pyenv",
|
||||||
|
".pytest_cache",
|
||||||
|
".pytype",
|
||||||
|
".ruff_cache",
|
||||||
|
".svn",
|
||||||
|
".tox",
|
||||||
|
".venv",
|
||||||
|
".vscode",
|
||||||
|
"__pypackages__",
|
||||||
|
"_build",
|
||||||
|
"buck-out",
|
||||||
|
"build",
|
||||||
|
"dist",
|
||||||
|
"node_modules",
|
||||||
|
"site-packages",
|
||||||
|
"venv",
|
||||||
|
"assets",
|
||||||
|
"data"
|
||||||
|
]
|
||||||
|
|
||||||
|
target-version = "py310"
|
||||||
|
|
||||||
|
[lint]
|
||||||
|
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
|
||||||
|
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
|
||||||
|
# McCabe complexity (`C901`) by default.
|
||||||
|
select = ["E4", "E7", "E9", "F"]
|
||||||
|
ignore = ["E501"]
|
||||||
|
|
||||||
|
# Allow fix for all enabled rules (when `--fix`) is provided.
|
||||||
|
fixable = ["ALL"]
|
||||||
|
|
||||||
|
[format]
|
||||||
|
# Like Black, use double quotes for strings.
|
||||||
|
quote-style = "double"
|
||||||
|
|
||||||
|
# Like Black, indent with spaces, rather than tabs.
|
||||||
|
indent-style = "space"
|
||||||
|
|
||||||
|
# Like Black, respect magic trailing commas.
|
||||||
|
skip-magic-trailing-comma = false
|
||||||
|
|
||||||
|
# Like Black, automatically detect the appropriate line ending.
|
||||||
|
line-ending = "auto"
|
||||||
|
|
||||||
|
# Enable auto-formatting of code examples in docstrings. Markdown,
|
||||||
|
# reStructuredText code/literal blocks and doctests are all supported.
|
||||||
|
#
|
||||||
|
# This is currently disabled by default, but it is planned for this
|
||||||
|
# to be opt-out in the future.
|
||||||
|
docstring-code-format = false
|
||||||
|
|
||||||
|
# Set the line length limit used when formatting code snippets in
|
||||||
|
# docstrings.
|
||||||
|
#
|
||||||
|
# This only has an effect when the `docstring-code-format` setting is
|
||||||
|
# enabled.
|
||||||
|
docstring-code-line-length = "dynamic"
|
582
server.py
582
server.py
|
@ -7,57 +7,72 @@
|
||||||
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
||||||
# https://github.com/gnh1201/caterpillar
|
# https://github.com/gnh1201/caterpillar
|
||||||
# Created at: 2022-10-06
|
# Created at: 2022-10-06
|
||||||
# Updated at: 2024-06-20
|
# Updated at: 2025-02-17
|
||||||
#
|
#
|
||||||
|
|
||||||
import argparse
|
import argparse
|
||||||
import socket
|
import socket
|
||||||
import sys
|
import sys
|
||||||
import os
|
import os
|
||||||
|
import re
|
||||||
from _thread import *
|
from _thread import *
|
||||||
from subprocess import PIPE, Popen
|
from subprocess import PIPE, Popen
|
||||||
import base64
|
import base64
|
||||||
import json
|
import json
|
||||||
import ssl
|
import ssl
|
||||||
import time
|
import time
|
||||||
import hashlib
|
|
||||||
import traceback
|
import traceback
|
||||||
import textwrap
|
import textwrap
|
||||||
import importlib
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
from platform import python_version
|
from platform import python_version
|
||||||
|
import logging
|
||||||
|
|
||||||
import re
|
|
||||||
import requests
|
import requests
|
||||||
from requests.auth import HTTPBasicAuth
|
from requests.auth import HTTPBasicAuth
|
||||||
from urllib.parse import urlparse
|
from urllib.parse import urlparse
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
|
||||||
from base import Extension, extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode
|
from base import (
|
||||||
|
Extension,
|
||||||
|
extract_credentials,
|
||||||
|
jsonrpc2_encode,
|
||||||
|
find_openssl_binpath,
|
||||||
|
Logger,
|
||||||
|
)
|
||||||
|
|
||||||
# initalization
|
logger = Logger(name="server", level=logging.DEBUG)
|
||||||
|
|
||||||
|
# initialization
|
||||||
try:
|
try:
|
||||||
listening_port = config('PORT', default=5555, cast=int)
|
listening_port = config("PORT", default=5555, cast=int)
|
||||||
_username, _password, server_url = extract_credentials(config('SERVER_URL', default=''))
|
_username, _password, server_url = extract_credentials(
|
||||||
server_connection_type = config('SERVER_CONNECTION_TYPE', default='')
|
config("SERVER_URL", default="")
|
||||||
cakey = config('CA_KEY', default='ca.key')
|
)
|
||||||
cacert = config('CA_CERT', default='ca.crt')
|
connection_timeout = config("CONNECTION_TIMEOUT", default=5, cast=int)
|
||||||
certkey = config('CERT_KEY', default='cert.key')
|
server_connection_type = config("SERVER_CONNECTION_TYPE", default="proxy")
|
||||||
certdir = config('CERT_DIR', default='certs/')
|
ca_key = config("CA_KEY", default="ca.key")
|
||||||
openssl_binpath = config('OPENSSL_BINPATH', default='openssl')
|
ca_cert = config("CA_CERT", default="ca.crt")
|
||||||
client_encoding = config('CLIENT_ENCODING', default='utf-8')
|
cert_key = config("CERT_KEY", default="cert.key")
|
||||||
local_domain = config('LOCAL_DOMAIN', default='')
|
cert_dir = config("CERT_DIR", default="certs/")
|
||||||
proxy_pass = config('PROXY_PASS', default='')
|
openssl_bin_path = config("OPENSSL_BINPATH", default=find_openssl_binpath())
|
||||||
|
client_encoding = config("CLIENT_ENCODING", default="utf-8")
|
||||||
|
local_domain = config("LOCAL_DOMAIN", default="")
|
||||||
|
proxy_pass = config("PROXY_PASS", default="")
|
||||||
|
use_extensions = config("USE_EXTENSIONS", default="")
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\n[*] User has requested an interrupt")
|
logger.warning("[*] User has requested an interrupt")
|
||||||
print("[*] Application Exiting.....")
|
logger.warning("[*] Application Exiting.....")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[*] Failed to initialize:", str(e))
|
logger.error("[*] Failed to initialize:", exc_info=e)
|
||||||
|
|
||||||
parser = argparse.ArgumentParser()
|
parser = argparse.ArgumentParser()
|
||||||
parser.add_argument('--max_conn', help="Maximum allowed connections", default=255, type=int)
|
parser.add_argument(
|
||||||
parser.add_argument('--buffer_size', help="Number of samples to be used", default=8192, type=int)
|
"--max_conn", help="Maximum allowed connections", default=255, type=int
|
||||||
|
)
|
||||||
|
parser.add_argument(
|
||||||
|
"--buffer_size", help="Number of samples to be used", default=8192, type=int
|
||||||
|
)
|
||||||
|
|
||||||
args = parser.parse_args()
|
args = parser.parse_args()
|
||||||
max_connection = args.max_conn
|
max_connection = args.max_conn
|
||||||
|
@ -67,169 +82,260 @@ resolved_address_list = []
|
||||||
|
|
||||||
# set environment of Extension
|
# set environment of Extension
|
||||||
Extension.set_buffer_size(buffer_size)
|
Extension.set_buffer_size(buffer_size)
|
||||||
Extension.set_protocol('tcp')
|
Extension.set_protocol("tcp")
|
||||||
|
|
||||||
# set basic authentication
|
# set basic authentication
|
||||||
auth = None
|
auth = None
|
||||||
if _username:
|
if _username:
|
||||||
auth = HTTPBasicAuth(_username, _password)
|
auth = HTTPBasicAuth(_username, _password)
|
||||||
|
|
||||||
def parse_first_data(data):
|
|
||||||
parsed_data = (b'', b'', b'', b'', b'')
|
def parse_first_data(data: bytes):
|
||||||
|
parsed_data = (b"", b"", b"", b"", b"")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
first_line = data.split(b'\n')[0]
|
first_line = data.split(b"\n")[0]
|
||||||
|
|
||||||
method, url = first_line.split()[0:2]
|
method, url = first_line.split()[0:2]
|
||||||
|
|
||||||
http_pos = url.find(b'://') #Finding the position of ://
|
http_pos = url.find(b"://") # Finding the position of ://
|
||||||
scheme = b'http' # check http/https or other protocol
|
scheme = b"http" # check http/https or other protocol
|
||||||
if http_pos == -1:
|
if http_pos == -1:
|
||||||
temp = url
|
temp = url
|
||||||
else:
|
else:
|
||||||
temp = url[(http_pos+3):]
|
temp = url[(http_pos + 3) :]
|
||||||
scheme = url[0:http_pos]
|
scheme = url[0:http_pos]
|
||||||
|
|
||||||
port_pos = temp.find(b':')
|
port_pos = temp.find(b":")
|
||||||
|
|
||||||
webserver_pos = temp.find(b'/')
|
webserver_pos = temp.find(b"/")
|
||||||
if webserver_pos == -1:
|
if webserver_pos == -1:
|
||||||
webserver_pos = len(temp)
|
webserver_pos = len(temp)
|
||||||
webserver = b''
|
webserver = b""
|
||||||
port = -1
|
port = -1
|
||||||
if port_pos == -1 or webserver_pos < port_pos:
|
if port_pos == -1 or webserver_pos < port_pos:
|
||||||
port = 80
|
port = 80
|
||||||
webserver = temp[:webserver_pos]
|
webserver = temp[:webserver_pos]
|
||||||
else:
|
else:
|
||||||
port = int((temp[(port_pos+1):])[:webserver_pos-port_pos-1])
|
port = int((temp[(port_pos + 1) :])[: webserver_pos - port_pos - 1])
|
||||||
webserver = temp[:port_pos]
|
webserver = temp[:port_pos]
|
||||||
if port == 443:
|
if port == 443:
|
||||||
scheme = b'https'
|
scheme = b"https"
|
||||||
|
|
||||||
parsed_data = (webserver, port, scheme, method, url)
|
parsed_data = (webserver, port, scheme, method, url)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[*] Exception on parsing the header. Cause: %s" % (str(e)))
|
logger.error("[*] Exception on parsing the header", exc_info=e)
|
||||||
|
|
||||||
return parsed_data
|
return parsed_data
|
||||||
|
|
||||||
def conn_string(conn, data, addr):
|
|
||||||
|
def conn_string(conn: socket.socket, data: bytes, addr: bytes):
|
||||||
# JSON-RPC 2.0 request
|
# JSON-RPC 2.0 request
|
||||||
def process_jsonrpc2(data):
|
def process_jsonrpc2(_data: bytes):
|
||||||
jsondata = json.loads(data.decode(client_encoding, errors='ignore'))
|
json_data = json.loads(_data.decode(client_encoding, errors="ignore"))
|
||||||
if jsondata['jsonrpc'] == "2.0":
|
if json_data["jsonrpc"] == "2.0":
|
||||||
jsonrpc2_server(conn, jsondata['id'], jsondata['method'], jsondata['params'])
|
jsonrpc2_server(
|
||||||
|
conn, json_data["id"], json_data["method"], json_data["params"]
|
||||||
|
)
|
||||||
return True
|
return True
|
||||||
return False
|
return False
|
||||||
|
|
||||||
|
# debugging
|
||||||
|
logger.debug("@ " + ("%s:%s" % addr))
|
||||||
|
logger.debug("> " + data.hex(' '))
|
||||||
|
|
||||||
# JSON-RPC 2.0 request over Socket (stateful)
|
# JSON-RPC 2.0 request over Socket (stateful)
|
||||||
if data.find(b'{') == 0 and process_jsonrpc2(data):
|
if data.find(b"{") == 0 and process_jsonrpc2(data):
|
||||||
# will be close by the client
|
# will be close by the client
|
||||||
return
|
return
|
||||||
|
|
||||||
|
# Check a preludes in connectors
|
||||||
|
connector = Extension.test_connectors(data)
|
||||||
|
if connector:
|
||||||
|
logger.info("[*] Connecting...")
|
||||||
|
connector.connect(conn, data, b'', b'', b'', b'', b'')
|
||||||
|
return
|
||||||
|
|
||||||
# parse first data (header)
|
# parse first data (header)
|
||||||
webserver, port, scheme, method, url = parse_first_data(data)
|
webserver, port, scheme, method, url = parse_first_data(data)
|
||||||
|
|
||||||
# JSON-RPC 2.0 request over HTTP (stateless)
|
# JSON-RPC 2.0 request over HTTP (stateless)
|
||||||
path = urlparse(url.decode(client_encoding)).path
|
path = urlparse(url.decode(client_encoding)).path
|
||||||
if path == "/proxy-cgi/jsonrpc2":
|
if path == "/proxy-cgi/jsonrpc2":
|
||||||
conn.send(b'HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n')
|
conn.send(b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n")
|
||||||
pos = data.find(b'\r\n\r\n')
|
pos = data.find(b"\r\n\r\n")
|
||||||
if pos > -1 and process_jsonrpc2(data[pos+4:]):
|
if pos > -1 and process_jsonrpc2(data[pos + 4 :]):
|
||||||
conn.close() # will be close by the server
|
conn.close() # will be close by the server
|
||||||
return
|
return
|
||||||
|
|
||||||
# if it is reverse proxy
|
# if it is reverse proxy
|
||||||
if local_domain != '':
|
local_domains = list(filter(None, map(str.strip, local_domain.split(','))))
|
||||||
localserver = local_domain.encode(client_encoding)
|
for domain in local_domains:
|
||||||
if webserver == localserver or data.find(b'\nHost: ' + localserver) > -1:
|
localserver = domain.encode(client_encoding)
|
||||||
print ("[*] Detected the reverse proxy request: %s" % (local_domain))
|
|
||||||
scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b':')
|
# Resolve a cache mismatch issue when making requests to a local domain.
|
||||||
|
header_end = data.find(b"\r\n\r\n")
|
||||||
|
header_section_data = data[:header_end] if header_end > -1 else b''
|
||||||
|
header_host_pattern = re.compile(rb"\n\s*host\s*:\s*" + re.escape(localserver), re.IGNORECASE)
|
||||||
|
if webserver == localserver or header_host_pattern.search(header_section_data):
|
||||||
|
logger.info("[*] Reverse proxy requested: %s" % local_domain)
|
||||||
|
scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b":")
|
||||||
webserver = _webserver[2:]
|
webserver = _webserver[2:]
|
||||||
port = int(_port.decode(client_encoding))
|
port = int(_port.decode(client_encoding))
|
||||||
|
method = b"CONNECT" if scheme == b"https" else method # proxy pass on HTTPS
|
||||||
|
break
|
||||||
|
|
||||||
proxy_server(webserver, port, scheme, method, url, conn, addr, data)
|
proxy_server(webserver, port, scheme, method, url, conn, addr, data)
|
||||||
|
|
||||||
def jsonrpc2_server(conn, id, method, params):
|
|
||||||
|
def jsonrpc2_server(
|
||||||
|
conn: socket.socket, _id: str, method: str, params: dict[str, str | int]
|
||||||
|
):
|
||||||
if method == "relay_accept":
|
if method == "relay_accept":
|
||||||
accepted_relay[id] = conn
|
accepted_relay[_id] = conn
|
||||||
connection_speed = params['connection_speed']
|
connection_speed = params["connection_speed"]
|
||||||
print ("[*] connection speed: %s miliseconds" % (str(connection_speed)))
|
logger.info("[*] connection speed: %s milliseconds" % str(connection_speed))
|
||||||
while conn.fileno() > -1:
|
while conn.fileno() > -1:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
del accepted_relay[id]
|
del accepted_relay[_id]
|
||||||
print ("[*] relay destroyed: %s" % (id))
|
logger.info("[*] relay destroyed: %s" % _id)
|
||||||
else:
|
else:
|
||||||
Extension.dispatch_rpcmethod(method, "call", id, params, conn)
|
Extension.dispatch_rpcmethod(method, "call", _id, params, conn)
|
||||||
|
|
||||||
#return in conn_string()
|
# return in conn_string()
|
||||||
|
|
||||||
def proxy_connect(webserver, conn):
|
|
||||||
|
def proxy_connect(webserver: bytes, conn: socket.socket):
|
||||||
hostname = webserver.decode(client_encoding)
|
hostname = webserver.decode(client_encoding)
|
||||||
certpath = "%s/%s.crt" % (certdir.rstrip('/'), hostname)
|
cert_path = "%s/%s.crt" % (cert_dir.rstrip("/"), hostname)
|
||||||
|
|
||||||
|
if not os.path.exists(cert_dir):
|
||||||
|
os.makedirs(cert_dir)
|
||||||
|
|
||||||
# https://stackoverflow.com/questions/24055036/handle-https-request-in-proxy-server-by-c-sharp-connect-tunnel
|
# https://stackoverflow.com/questions/24055036/handle-https-request-in-proxy-server-by-c-sharp-connect-tunnel
|
||||||
conn.send(b'HTTP/1.1 200 Connection Established\r\n\r\n')
|
conn.send(b"HTTP/1.1 200 Connection Established\r\n\r\n")
|
||||||
|
|
||||||
# https://github.com/inaz2/proxy2/blob/master/proxy2.py
|
# https://github.com/inaz2/proxy2/blob/master/proxy2.py
|
||||||
try:
|
try:
|
||||||
if not os.path.isfile(certpath):
|
if not os.path.isfile(cert_path):
|
||||||
epoch = "%d" % (time.time() * 1000)
|
epoch = "%d" % (time.time() * 1000)
|
||||||
p1 = Popen([openssl_binpath, "req", "-new", "-key", certkey, "-subj", "/CN=%s" % hostname], stdout=PIPE)
|
p1 = Popen(
|
||||||
p2 = Popen([openssl_binpath, "x509", "-req", "-days", "3650", "-CA", cacert, "-CAkey", cakey, "-set_serial", epoch, "-out", certpath], stdin=p1.stdout, stderr=PIPE)
|
[
|
||||||
|
openssl_bin_path,
|
||||||
|
"req",
|
||||||
|
"-new",
|
||||||
|
"-key",
|
||||||
|
cert_key,
|
||||||
|
"-subj",
|
||||||
|
"/CN=%s" % hostname,
|
||||||
|
],
|
||||||
|
stdout=PIPE,
|
||||||
|
)
|
||||||
|
p2 = Popen(
|
||||||
|
[
|
||||||
|
openssl_bin_path,
|
||||||
|
"x509",
|
||||||
|
"-req",
|
||||||
|
"-days",
|
||||||
|
"3650",
|
||||||
|
"-CA",
|
||||||
|
ca_cert,
|
||||||
|
"-CAkey",
|
||||||
|
ca_key,
|
||||||
|
"-set_serial",
|
||||||
|
epoch,
|
||||||
|
"-out",
|
||||||
|
cert_path,
|
||||||
|
],
|
||||||
|
stdin=p1.stdout,
|
||||||
|
stderr=PIPE,
|
||||||
|
)
|
||||||
p2.communicate()
|
p2.communicate()
|
||||||
|
except FileNotFoundError as e:
|
||||||
|
logger.error(
|
||||||
|
"[*] OpenSSL distribution not found on this system. Skipping certificate issuance.",
|
||||||
|
exc_info=e,
|
||||||
|
)
|
||||||
|
cert_path = "default.crt"
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[*] Skipped generating the certificate. Cause: %s" % (str(e)))
|
logger.error("[*] Skipping certificate issuance.", exc_info=e)
|
||||||
|
cert_path = "default.crt"
|
||||||
|
|
||||||
|
logger.info("[*] Certificate file: %s" % cert_path)
|
||||||
|
logger.info("[*] Private key file: %s" % cert_key)
|
||||||
|
|
||||||
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
|
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
|
||||||
# https://docs.python.org/3/library/ssl.html
|
# https://docs.python.org/3/library/ssl.html
|
||||||
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
|
||||||
context.load_cert_chain(certpath, certkey)
|
context.check_hostname = False
|
||||||
|
context.verify_mode = ssl.CERT_NONE
|
||||||
|
context.load_cert_chain(certfile=cert_path, keyfile=cert_key)
|
||||||
|
|
||||||
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
|
try:
|
||||||
conn = context.wrap_socket(conn, server_side=True)
|
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
|
||||||
data = conn.recv(buffer_size)
|
conn = context.wrap_socket(conn, server_side=True)
|
||||||
|
data = conn.recv(buffer_size)
|
||||||
|
except ssl.SSLError as e:
|
||||||
|
logger.error(
|
||||||
|
"[*] SSL negotiation failed.",
|
||||||
|
exc_info=e,
|
||||||
|
)
|
||||||
|
return conn, b""
|
||||||
|
|
||||||
return (conn, data)
|
return conn, data
|
||||||
|
|
||||||
def proxy_check_filtered(data, webserver, port, scheme, method, url):
|
|
||||||
|
def proxy_check_filtered(
|
||||||
|
data: bytes, webserver: bytes, port: bytes, scheme: bytes, method: bytes, url: bytes
|
||||||
|
):
|
||||||
filtered = False
|
filtered = False
|
||||||
|
|
||||||
filters = Extension.get_filters()
|
filters = Extension.get_filters()
|
||||||
print ("[*] Checking data with %s filters..." % (str(len(filters))))
|
logger.info("[*] Checking data with %s filters..." % (str(len(filters))))
|
||||||
for f in filters:
|
for f in filters:
|
||||||
filtered = f.test(filtered, data, webserver, port, scheme, method, url)
|
filtered = f.test(filtered, data, webserver, port, scheme, method, url)
|
||||||
|
|
||||||
return filtered
|
return filtered
|
||||||
|
|
||||||
def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
|
|
||||||
|
def proxy_server(
|
||||||
|
webserver: bytes,
|
||||||
|
port: bytes,
|
||||||
|
scheme: bytes,
|
||||||
|
method: bytes,
|
||||||
|
url: bytes,
|
||||||
|
conn: socket.socket,
|
||||||
|
addr: bytes,
|
||||||
|
data: bytes,
|
||||||
|
):
|
||||||
try:
|
try:
|
||||||
print("[*] Started the request. %s" % (str(addr[0])))
|
logger.info("[*] Started the request. %s" % (str(addr[0])))
|
||||||
|
|
||||||
# SSL negotiation
|
# SSL negotiation
|
||||||
is_ssl = scheme in [b'https', b'tls', b'ssl']
|
is_ssl = scheme in [b"https", b"tls", b"ssl"]
|
||||||
if is_ssl and method == b'CONNECT':
|
if is_ssl and method == b"CONNECT":
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
conn, data = proxy_connect(webserver, conn)
|
conn, data = proxy_connect(webserver, conn)
|
||||||
break # success
|
break # success
|
||||||
#except OSError as e:
|
# except OSError as e:
|
||||||
# print ("[*] Retrying SSL negotiation... (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
|
# print ("[*] Retrying SSL negotiation... (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
raise Exception("SSL negotiation failed. (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
|
raise Exception(
|
||||||
|
"SSL negotiation failed. (%s:%s) %s"
|
||||||
|
% (webserver.decode(client_encoding), str(port), str(e))
|
||||||
|
)
|
||||||
|
|
||||||
# override data
|
# override data
|
||||||
if is_ssl:
|
if is_ssl:
|
||||||
_, _, _, method, url = parse_first_data(data)
|
_, _, _, method, url = parse_first_data(data)
|
||||||
|
|
||||||
# https://stackoverflow.com/questions/44343739/python-sockets-ssl-eof-occurred-in-violation-of-protocol
|
# https://stackoverflow.com/questions/44343739/python-sockets-ssl-eof-occurred-in-violation-of-protocol
|
||||||
def sock_close(sock, is_ssl = False):
|
def sock_close(_sock: socket.socket):
|
||||||
#if is_ssl:
|
_sock.close()
|
||||||
# sock = sock.unwrap()
|
|
||||||
#sock.shutdown(socket.SHUT_RDWR)
|
|
||||||
sock.close()
|
|
||||||
|
|
||||||
# Wait to see if there is more data to transmit
|
# Wait to see if there is more data to transmit
|
||||||
def sendall(sock, conn, data):
|
def sendall(_sock: socket.socket, _conn: socket.socket, _data: bytes):
|
||||||
# send first chuck
|
# send first chuck
|
||||||
if proxy_check_filtered(data, webserver, port, scheme, method, url):
|
if proxy_check_filtered(data, webserver, port, scheme, method, url):
|
||||||
sock.close()
|
sock.close()
|
||||||
|
@ -239,25 +345,27 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
|
||||||
return
|
return
|
||||||
|
|
||||||
# send following chunks
|
# send following chunks
|
||||||
buffered = b''
|
buffered = b""
|
||||||
conn.settimeout(1)
|
conn.settimeout(connection_timeout)
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
chunk = conn.recv(buffer_size)
|
chunk = conn.recv(buffer_size)
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
buffered += chunk
|
buffered += chunk
|
||||||
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
if proxy_check_filtered(
|
||||||
sock_close(sock, is_ssl)
|
buffered, webserver, port, scheme, method, url
|
||||||
|
):
|
||||||
|
sock_close(sock)
|
||||||
raise Exception("Filtered request")
|
raise Exception("Filtered request")
|
||||||
sock.send(chunk)
|
sock.send(chunk)
|
||||||
if len(buffered) > buffer_size*2:
|
if len(buffered) > buffer_size * 2:
|
||||||
buffered = buffered[-buffer_size*2:]
|
buffered = buffered[-buffer_size * 2 :]
|
||||||
except:
|
except:
|
||||||
break
|
break
|
||||||
|
|
||||||
# localhost mode
|
# localhost mode
|
||||||
if server_url == "localhost":
|
if server_url == "localhost" and server_connection_type == "proxy":
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
|
|
||||||
if is_ssl:
|
if is_ssl:
|
||||||
|
@ -265,159 +373,226 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
|
||||||
context.check_hostname = False
|
context.check_hostname = False
|
||||||
context.verify_mode = ssl.CERT_NONE
|
context.verify_mode = ssl.CERT_NONE
|
||||||
|
|
||||||
sock = context.wrap_socket(sock, server_hostname=webserver.decode(client_encoding))
|
sock = context.wrap_socket(
|
||||||
|
sock, server_hostname=webserver.decode(client_encoding)
|
||||||
|
)
|
||||||
sock.connect((webserver, port))
|
sock.connect((webserver, port))
|
||||||
#sock.sendall(data)
|
# sock.sendall(data)
|
||||||
sendall(sock, conn, data)
|
sendall(sock, conn, data)
|
||||||
else:
|
else:
|
||||||
sock.connect((webserver, port))
|
sock.connect((webserver, port))
|
||||||
#sock.sendall(data)
|
# sock.sendall(data)
|
||||||
sendall(sock, conn, data)
|
sendall(sock, conn, data)
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
is_http_403 = False
|
is_http_403 = False
|
||||||
buffered = b''
|
_buffered = b""
|
||||||
while True:
|
while True:
|
||||||
chunk = sock.recv(buffer_size)
|
chunk = sock.recv(buffer_size)
|
||||||
if not chunk:
|
if not chunk:
|
||||||
break
|
break
|
||||||
if i == 0 and chunk.find(b'HTTP/1.1 403') == 0:
|
if i == 0 and chunk.find(b"HTTP/1.1 403") == 0:
|
||||||
is_http_403 = True
|
is_http_403 = True
|
||||||
break
|
break
|
||||||
buffered += chunk
|
_buffered += chunk
|
||||||
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
if proxy_check_filtered(
|
||||||
sock_close(sock, is_ssl)
|
_buffered, webserver, port, scheme, method, url
|
||||||
add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
|
):
|
||||||
|
sock_close(sock)
|
||||||
|
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1")
|
||||||
raise Exception("Filtered response")
|
raise Exception("Filtered response")
|
||||||
conn.send(chunk)
|
conn.send(chunk)
|
||||||
if len(buffered) > buffer_size*2:
|
if len(_buffered) > buffer_size * 2:
|
||||||
buffered = buffered[-buffer_size*2:]
|
_buffered = _buffered[-buffer_size * 2 :]
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
# when blocked
|
# when blocked
|
||||||
if is_http_403:
|
if is_http_403:
|
||||||
print ("[*] Blocked the request by remote server: %s" % (webserver.decode(client_encoding)))
|
logger.warning(
|
||||||
|
"[*] Blocked the request by remote server: %s"
|
||||||
|
% webserver.decode(client_encoding)
|
||||||
|
)
|
||||||
|
|
||||||
def bypass_callback(response, *args, **kwargs):
|
def bypass_callback(response: requests.Response):
|
||||||
if response.status_code != 200:
|
if response.status_code != 200:
|
||||||
conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
|
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}')
|
||||||
return
|
return
|
||||||
|
|
||||||
# https://stackoverflow.com/questions/20658572/python-requests-print-entire-http-request-raw
|
# https://stackoverflow.com/questions/20658572/python-requests-print-entire-http-request-raw
|
||||||
format_headers = lambda d: '\r\n'.join(f'{k}: {v}' for k, v in d.items())
|
format_headers = lambda d: "\r\n".join(
|
||||||
|
f"{k}: {v}" for k, v in d.items()
|
||||||
|
)
|
||||||
|
|
||||||
first_data = textwrap.dedent('HTTP/1.1 {res.status_code} {res.reason}\r\n{reshdrs}\r\n\r\n').format(
|
first_data = (
|
||||||
res=response,
|
textwrap.dedent(
|
||||||
reshdrs=format_headers(response.headers),
|
"HTTP/1.1 {res.status_code} {res.reason}\r\n{reshdrs}\r\n\r\n"
|
||||||
).encode(client_encoding)
|
)
|
||||||
|
.format(
|
||||||
|
res=response,
|
||||||
|
reshdrs=format_headers(response.headers),
|
||||||
|
)
|
||||||
|
.encode(client_encoding)
|
||||||
|
)
|
||||||
conn.send(first_data)
|
conn.send(first_data)
|
||||||
|
|
||||||
for chunk in response.iter_content(chunk_size=buffer_size):
|
for chunk in response.iter_content(chunk_size=buffer_size):
|
||||||
conn.send(chunk)
|
conn.send(chunk)
|
||||||
|
|
||||||
if is_ssl and method == b'GET':
|
if is_ssl and method == b"GET":
|
||||||
print ("[*] Trying to bypass blocked request...")
|
logger.info("[*] Trying to bypass blocked request...")
|
||||||
remote_url = "%s://%s%s" % (scheme.decode(client_encoding), webserver.decode(client_encoding), url.decode(client_encoding))
|
remote_url = "%s://%s%s" % (
|
||||||
requests.get(remote_url, stream=True, verify=False, hooks={'response': bypass_callback})
|
scheme.decode(client_encoding),
|
||||||
|
webserver.decode(client_encoding),
|
||||||
|
url.decode(client_encoding),
|
||||||
|
)
|
||||||
|
requests.get(
|
||||||
|
remote_url,
|
||||||
|
stream=True,
|
||||||
|
verify=False,
|
||||||
|
hooks={"response": bypass_callback},
|
||||||
|
)
|
||||||
else:
|
else:
|
||||||
conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
|
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}')
|
||||||
|
|
||||||
sock_close(sock, is_ssl)
|
sock_close(sock)
|
||||||
|
|
||||||
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
|
logger.info(
|
||||||
|
"[*] Received %s chunks. (%s bytes per chunk)"
|
||||||
|
% (str(i), str(buffer_size))
|
||||||
|
)
|
||||||
|
|
||||||
# stateful mode
|
# stateful mode
|
||||||
elif server_connection_type == "stateful":
|
elif server_connection_type == "stateful":
|
||||||
|
client_address = str(addr[0])
|
||||||
|
|
||||||
proxy_data = {
|
proxy_data = {
|
||||||
'headers': {
|
"headers": {
|
||||||
"User-Agent": "php-httpproxy/0.1.5 (Client; Python " + python_version() + "; abuse@catswords.net)",
|
"User-Agent": "php-httpproxy/0.1.5 (Client; Python "
|
||||||
|
+ python_version()
|
||||||
|
+ "; abuse@catswords.net)",
|
||||||
},
|
},
|
||||||
'data': {
|
"data": {
|
||||||
"buffer_size": str(buffer_size),
|
"buffer_size": str(buffer_size),
|
||||||
"client_address": str(addr[0]),
|
"client_address": client_address,
|
||||||
"client_port": str(listening_port),
|
"client_port": str(listening_port),
|
||||||
"client_encoding": client_encoding,
|
"client_encoding": client_encoding,
|
||||||
"remote_address": webserver.decode(client_encoding),
|
"remote_address": webserver.decode(client_encoding),
|
||||||
"remote_port": str(port),
|
"remote_port": str(port),
|
||||||
"scheme": scheme.decode(client_encoding),
|
"scheme": scheme.decode(client_encoding),
|
||||||
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"),
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
# get client address
|
# get client address
|
||||||
print ("[*] resolving the client address...")
|
logger.info("[*] Resolving the client address...")
|
||||||
while len(resolved_address_list) == 0:
|
while len(resolved_address_list) == 0:
|
||||||
try:
|
try:
|
||||||
_, query_data = jsonrpc2_encode('get_client_address')
|
_, query_data = jsonrpc2_encode("get_client_address")
|
||||||
query = requests.post(server_url, headers=proxy_data['headers'], data=query_data, timeout=1, auth=auth)
|
query = requests.post(
|
||||||
|
server_url,
|
||||||
|
headers=proxy_data["headers"],
|
||||||
|
data=query_data,
|
||||||
|
timeout=1,
|
||||||
|
auth=auth,
|
||||||
|
)
|
||||||
if query.status_code == 200:
|
if query.status_code == 200:
|
||||||
result = query.json()['result']
|
result = query.json()["result"]
|
||||||
resolved_address_list.append(result['data'])
|
|
||||||
print ("[*] resolved IP: %s" % (result['data']))
|
if isinstance(result["data"], str):
|
||||||
except requests.exceptions.ReadTimeout as e:
|
client_address = result["data"]
|
||||||
pass
|
resolved_address_list.append(client_address)
|
||||||
proxy_data['data']['client_address'] = resolved_address_list[0]
|
elif isinstance(result["data"], list):
|
||||||
|
client_address = result["data"][0]
|
||||||
|
resolved_address_list.append(client_address)
|
||||||
|
else:
|
||||||
|
logger.warn("[*] Failed to resolve a client address. Retrying...")
|
||||||
|
else:
|
||||||
|
logger.warn("[*] Failed to resolve a client address. Retrying...")
|
||||||
|
except requests.exceptions.ReadTimeout:
|
||||||
|
logger.warn("[*] Failed to resolve a client address. Retrying...")
|
||||||
|
|
||||||
|
# update the client address
|
||||||
|
logger.info("[*] Use the client address: %s" % (client_address))
|
||||||
|
proxy_data["data"]["client_address"] = client_address
|
||||||
|
|
||||||
# build a tunnel
|
# build a tunnel
|
||||||
def relay_connect(id, raw_data, proxy_data):
|
def relay_connect(id, raw_data, proxy_data):
|
||||||
try:
|
try:
|
||||||
# The tunnel connect forever until the client destroy it
|
# The tunnel connect forever until the client destroy it
|
||||||
relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, timeout=None, auth=auth)
|
relay = requests.post(
|
||||||
|
server_url,
|
||||||
|
headers=proxy_data["headers"],
|
||||||
|
data=raw_data,
|
||||||
|
stream=True,
|
||||||
|
timeout=None,
|
||||||
|
auth=auth,
|
||||||
|
)
|
||||||
for chunk in relay.iter_content(chunk_size=buffer_size):
|
for chunk in relay.iter_content(chunk_size=buffer_size):
|
||||||
jsondata = json.loads(chunk.decode(client_encoding, errors='ignore'))
|
jsondata = json.loads(
|
||||||
if jsondata['jsonrpc'] == "2.0" and ("error" in jsondata):
|
chunk.decode(client_encoding, errors="ignore")
|
||||||
e = jsondata['error']
|
)
|
||||||
print ("[*] Error received from the relay server: (%s) %s" % (str(e['code']), str(e['message'])))
|
if jsondata["jsonrpc"] == "2.0" and ("error" in jsondata):
|
||||||
|
e = jsondata["error"]
|
||||||
|
logger.error(
|
||||||
|
"[*] Error received from the relay server: (%s) %s"
|
||||||
|
% (str(e["code"]), str(e["message"]))
|
||||||
|
)
|
||||||
except requests.exceptions.ReadTimeout as e:
|
except requests.exceptions.ReadTimeout as e:
|
||||||
pass
|
pass
|
||||||
id, raw_data = jsonrpc2_encode('relay_connect', proxy_data['data'])
|
|
||||||
|
id, raw_data = jsonrpc2_encode("relay_connect", proxy_data["data"])
|
||||||
start_new_thread(relay_connect, (id, raw_data, proxy_data))
|
start_new_thread(relay_connect, (id, raw_data, proxy_data))
|
||||||
|
|
||||||
# wait for the relay
|
# wait for the relay
|
||||||
print ("[*] waiting for the relay... %s" % (id))
|
logger.info("[*] waiting for the relay... %s" % id)
|
||||||
max_reties = 30
|
max_reties = 30
|
||||||
t = 0
|
t = 0
|
||||||
while t < max_reties and not id in accepted_relay:
|
while t < max_reties and id not in accepted_relay:
|
||||||
time.sleep(1)
|
time.sleep(1)
|
||||||
t += 1
|
t += 1
|
||||||
if t < max_reties:
|
if t < max_reties:
|
||||||
sock = accepted_relay[id]
|
sock = accepted_relay[id]
|
||||||
print ("[*] connected the relay. %s" % (id))
|
logger.info("[*] connected the relay. %s" % id)
|
||||||
sendall(sock, conn, data)
|
sendall(sock, conn, data)
|
||||||
else:
|
else:
|
||||||
resolved_address_list.remove(resolved_address_list[0])
|
resolved_address_list.remove(resolved_address_list[0])
|
||||||
print ("[*] the relay is gone. %s" % (id))
|
logger.info("[*] the relay is gone. %s" % id)
|
||||||
sock_close(sock, is_ssl)
|
sock_close(sock)
|
||||||
return
|
return
|
||||||
|
|
||||||
# get response
|
# get response
|
||||||
i = 0
|
i = 0
|
||||||
buffered = b''
|
buffered = b""
|
||||||
while True:
|
while True:
|
||||||
chunk = sock.recv(buffer_size)
|
_chunk = sock.recv(buffer_size)
|
||||||
if not chunk:
|
if not _chunk:
|
||||||
break
|
break
|
||||||
buffered += chunk
|
buffered += _chunk
|
||||||
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
||||||
sock_close(sock, is_ssl)
|
sock_close(sock)
|
||||||
add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
|
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1")
|
||||||
raise Exception("Filtered response")
|
raise Exception("Filtered response")
|
||||||
conn.send(chunk)
|
conn.send(_chunk)
|
||||||
if len(buffered) > buffer_size*2:
|
if len(buffered) > buffer_size * 2:
|
||||||
buffered = buffered[-buffer_size*2:]
|
buffered = buffered[-buffer_size * 2 :]
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
sock_close(sock, is_ssl)
|
sock_close(sock)
|
||||||
|
|
||||||
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
|
logger.info(
|
||||||
|
"[*] Received %s chunks. (%s bytes per chunk)"
|
||||||
|
% (str(i), str(buffer_size))
|
||||||
|
)
|
||||||
|
|
||||||
# stateless mode
|
# stateless mode
|
||||||
elif server_connection_type == "stateless":
|
elif server_connection_type == "stateless":
|
||||||
proxy_data = {
|
proxy_data = {
|
||||||
'headers': {
|
"headers": {
|
||||||
"User-Agent": "php-httpproxy/0.1.5 (Client; Python " + python_version() + "; abuse@catswords.net)",
|
"User-Agent": "php-httpproxy/0.1.5 (Client; Python "
|
||||||
|
+ python_version()
|
||||||
|
+ "; abuse@catswords.net)",
|
||||||
},
|
},
|
||||||
'data': {
|
"data": {
|
||||||
"buffer_size": str(buffer_size),
|
"buffer_size": str(buffer_size),
|
||||||
"request_data": base64.b64encode(data).decode(client_encoding),
|
"request_data": base64.b64encode(data).decode(client_encoding),
|
||||||
"request_length": str(len(data)),
|
"request_length": str(len(data)),
|
||||||
|
@ -427,81 +602,110 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
|
||||||
"remote_address": webserver.decode(client_encoding),
|
"remote_address": webserver.decode(client_encoding),
|
||||||
"remote_port": str(port),
|
"remote_port": str(port),
|
||||||
"scheme": scheme.decode(client_encoding),
|
"scheme": scheme.decode(client_encoding),
|
||||||
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
|
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"),
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
_, raw_data = jsonrpc2_encode('relay_request', proxy_data['data'])
|
_, raw_data = jsonrpc2_encode("relay_request", proxy_data["data"])
|
||||||
|
|
||||||
print("[*] Sending %s bytes..." % (str(len(raw_data))))
|
logger.info("[*] Sending %s bytes..." % (str(len(raw_data))))
|
||||||
|
|
||||||
i = 0
|
i = 0
|
||||||
relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, auth=auth)
|
relay = requests.post(
|
||||||
buffered = b''
|
server_url,
|
||||||
|
headers=proxy_data["headers"],
|
||||||
|
data=raw_data,
|
||||||
|
stream=True,
|
||||||
|
auth=auth,
|
||||||
|
)
|
||||||
|
buffered = b""
|
||||||
for chunk in relay.iter_content(chunk_size=buffer_size):
|
for chunk in relay.iter_content(chunk_size=buffer_size):
|
||||||
buffered += chunk
|
buffered += chunk
|
||||||
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
|
||||||
add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
|
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1")
|
||||||
raise Exception("Filtered response")
|
raise Exception("Filtered response")
|
||||||
conn.send(chunk)
|
conn.send(chunk)
|
||||||
if len(buffered) > buffer_size*2:
|
if len(buffered) > buffer_size * 2:
|
||||||
buffered = buffered[-buffer_size*2:]
|
buffered = buffered[-buffer_size * 2 :]
|
||||||
i += 1
|
i += 1
|
||||||
|
|
||||||
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
|
logger.info(
|
||||||
|
"[*] Received %s chunks. (%s bytes per chunk)"
|
||||||
|
% (str(i), str(buffer_size))
|
||||||
|
)
|
||||||
|
|
||||||
# nothing at all
|
# nothing at all
|
||||||
else:
|
else:
|
||||||
connector = Extension.get_connector(server_connection_type)
|
connector = Extension.get_connector(server_connection_type)
|
||||||
if connector:
|
if connector:
|
||||||
|
logger.info("[*] Connecting...")
|
||||||
connector.connect(conn, data, webserver, port, scheme, method, url)
|
connector.connect(conn, data, webserver, port, scheme, method, url)
|
||||||
else:
|
else:
|
||||||
raise Exception("Unsupported connection type")
|
raise Exception("[*] The request from " + ("%s:%s" % addr) + " is ignored due to an undefined connector type.")
|
||||||
|
|
||||||
print("[*] Request and received. Done. %s" % (str(addr[0])))
|
logger.info("[*] Request and received. Done. %s" % (str(addr[0])))
|
||||||
conn.close()
|
conn.close()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print(traceback.format_exc())
|
print(traceback.format_exc())
|
||||||
print("[*] Exception on requesting the data. Cause: %s" % (str(e)))
|
logger.warning("[*] Ignored the request.", exc_info=e)
|
||||||
conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
|
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}')
|
||||||
conn.close()
|
conn.close()
|
||||||
|
|
||||||
|
|
||||||
# journaling a filtered hosts
|
# journaling a filtered hosts
|
||||||
def add_filtered_host(domain, ip_address):
|
def add_filtered_host(domain: str, ip_address: str):
|
||||||
hosts_path = './filtered.hosts'
|
hosts_path = "./filtered.hosts"
|
||||||
with open(hosts_path, 'r') as file:
|
with open(hosts_path, "r") as file:
|
||||||
lines = file.readlines()
|
lines = file.readlines()
|
||||||
|
|
||||||
domain_exists = any(domain in line for line in lines)
|
domain_exists = any(domain in line for line in lines)
|
||||||
if not domain_exists:
|
if not domain_exists:
|
||||||
lines.append(f"{ip_address}\t{domain}\n")
|
lines.append(f"{ip_address}\t{domain}\n")
|
||||||
with open(hosts_path, 'w') as file:
|
with open(hosts_path, "w") as file:
|
||||||
file.writelines(lines)
|
file.writelines(lines)
|
||||||
|
|
||||||
def start(): #Main Program
|
|
||||||
|
def start(): # Main Program
|
||||||
try:
|
try:
|
||||||
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
|
||||||
sock.bind(('', listening_port))
|
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
|
||||||
|
sock.bind(("", listening_port))
|
||||||
sock.listen(max_connection)
|
sock.listen(max_connection)
|
||||||
print("[*] Server started successfully [ %d ]" %(listening_port))
|
logger.warning("[*] Server started successfully [ %d ]" % listening_port)
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[*] Unable to Initialize Socket:", str(e))
|
logger.error("[*] Unable to Initialize Socket", exc_info=e)
|
||||||
sys.exit(2)
|
sys.exit(2)
|
||||||
|
|
||||||
|
def recv(conn):
|
||||||
|
conn.settimeout(connection_timeout)
|
||||||
|
|
||||||
|
try:
|
||||||
|
data = conn.recv(buffer_size)
|
||||||
|
if not data:
|
||||||
|
data = b''
|
||||||
|
except socket.timeout:
|
||||||
|
logger.warning(f"No data received from " + ("%s:%s" % addr) + ". Attempting to request data.")
|
||||||
|
data = b''
|
||||||
|
|
||||||
|
return data
|
||||||
|
|
||||||
while True:
|
while True:
|
||||||
try:
|
try:
|
||||||
conn, addr = sock.accept() #Accept connection from client browser
|
conn, addr = sock.accept() # Accept connection from client browser
|
||||||
data = conn.recv(buffer_size) #Recieve client data
|
data = recv(conn) # Recieve client data
|
||||||
start_new_thread(conn_string, (conn, data, addr)) #Starting a thread
|
start_new_thread(conn_string, (conn, data, addr)) # Starting a thread
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
sock.close()
|
sock.close()
|
||||||
print("\n[*] Graceful Shutdown")
|
logger.info("[*] Graceful Shutdown")
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if __name__== "__main__":
|
if __name__ == "__main__":
|
||||||
# load extensions
|
# Fix Value error
|
||||||
#Extension.register(importlib.import_module("plugins.fediverse").Fediverse())
|
if use_extensions:
|
||||||
#Extension.register(importlib.import_module("plugins.container").Container())
|
# load extensions
|
||||||
#Extension.register(importlib.import_module("plugins.wayback").Wayback())
|
for s in use_extensions.split(","):
|
||||||
|
Extension.register(s)
|
||||||
|
else:
|
||||||
|
logger.warning("[*] No extensions registered")
|
||||||
|
|
||||||
# start Caterpillar
|
# start Caterpillar
|
||||||
start()
|
start()
|
||||||
|
|
129
smtp.py
129
smtp.py
|
@ -1,88 +1,113 @@
|
||||||
#!/usr/bin/python3
|
#!/usr/bin/python3
|
||||||
#
|
#
|
||||||
# smtp.py
|
# smtp.py
|
||||||
# SMTP over HTTP gateway
|
# SMTP mail sender over HTTP/S
|
||||||
#
|
#
|
||||||
# Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
|
# Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
|
||||||
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
||||||
# https://github.com/gnh1201/caterpillar
|
# https://github.com/gnh1201/caterpillar
|
||||||
# Created at: 2024-03-01
|
# Created at: 2024-03-01
|
||||||
# Updated at: 2024-05-20
|
# Updated at: 2024-07-12
|
||||||
#
|
#
|
||||||
|
import asyncio
|
||||||
import asyncore
|
from aiosmtpd.controller import Controller
|
||||||
from smtpd import SMTPServer
|
from email.message import EmailMessage
|
||||||
import re
|
import sys
|
||||||
import json
|
|
||||||
import requests
|
import requests
|
||||||
|
from platform import python_version
|
||||||
from decouple import config
|
from decouple import config
|
||||||
from requests.auth import HTTPBasicAuth
|
from requests.auth import HTTPBasicAuth
|
||||||
from base import extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode
|
from base import (
|
||||||
|
extract_credentials,
|
||||||
|
jsonrpc2_encode,
|
||||||
|
Logger, jsonrpc2_decode,
|
||||||
|
)
|
||||||
|
|
||||||
|
logger = Logger(name="smtp")
|
||||||
|
|
||||||
try:
|
try:
|
||||||
smtp_host = config('SMTP_HOST', default='127.0.0.1')
|
smtp_host = config("SMTP_HOST", default="127.0.0.1")
|
||||||
smtp_port = config('SMTP_PORT', default=25, cast=int)
|
smtp_port = config("SMTP_PORT", default=25, cast=int)
|
||||||
_username, _password, server_url = extract_credentials(config('SERVER_URL', default=''))
|
_username, _password, server_url = extract_credentials(
|
||||||
|
config("SERVER_URL", default="")
|
||||||
|
)
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\n[*] User has requested an interrupt")
|
logger.warning("[*] User has requested an interrupt")
|
||||||
print("[*] Application Exiting.....")
|
logger.warning("[*] Application Exiting.....")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
|
|
||||||
auth = None
|
auth = None
|
||||||
if _username:
|
if _username:
|
||||||
auth = HTTPBasicAuth(_username, _password)
|
auth = HTTPBasicAuth(_username, _password)
|
||||||
|
|
||||||
class CaterpillarSMTPServer(SMTPServer):
|
|
||||||
def __init__(self, localaddr, remoteaddr):
|
|
||||||
self.__class__.smtpd_hostname = "CaterpillarSMTPServer"
|
|
||||||
self.__class__.smtp_version = "0.1.6"
|
|
||||||
super().__init__(localaddr, remoteaddr)
|
|
||||||
|
|
||||||
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
|
class CaterpillarSMTPHandler:
|
||||||
message_lines = data.decode('utf-8').split('\n')
|
def __init__(self):
|
||||||
subject = ''
|
self.smtpd_hostname = "CaterpillarSMTPServer"
|
||||||
to = ''
|
self.smtp_version = "0.1.6"
|
||||||
for line in message_lines:
|
|
||||||
pos = line.find(':')
|
async def handle_DATA(self, server, session, envelope):
|
||||||
if pos > -1:
|
mail_from = envelope.mail_from
|
||||||
k = line[0:pos]
|
rcpt_tos = envelope.rcpt_tos
|
||||||
v = line[pos+1:]
|
data = envelope.content
|
||||||
if k == 'Subject':
|
|
||||||
subject = v
|
message = EmailMessage()
|
||||||
elif k == 'To':
|
message.set_content(data)
|
||||||
to = v
|
|
||||||
|
subject = message.get("Subject", "")
|
||||||
|
to = message.get("To", "")
|
||||||
|
|
||||||
# build a data
|
|
||||||
proxy_data = {
|
proxy_data = {
|
||||||
'headers': {
|
"headers": {
|
||||||
"User-Agent": "php-httpproxy/0.1.6 (Client; Python " + python_version() + "; Caterpillar; abuse@catswords.net)",
|
"User-Agent": "php-httpproxy/0.1.6 (Client; Python "
|
||||||
|
+ python_version()
|
||||||
|
+ "; Caterpillar; abuse@catswords.net)",
|
||||||
},
|
},
|
||||||
'data': {
|
"data": {
|
||||||
"to": to,
|
"to": to,
|
||||||
"from": mailfrom,
|
"from": mail_from,
|
||||||
"subject": subject,
|
"subject": subject,
|
||||||
"message": data.decode('utf-8')
|
"message": data.decode("utf-8"),
|
||||||
}
|
},
|
||||||
}
|
}
|
||||||
_, raw_data = jsonrpc2_encode('relay_sendmail', proxy_data['data'])
|
_, raw_data = jsonrpc2_encode("relay_sendmail", proxy_data["data"])
|
||||||
|
|
||||||
# send HTTP POST request
|
|
||||||
try:
|
try:
|
||||||
response = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, auth=auth)
|
response = await asyncio.to_thread(
|
||||||
|
requests.post,
|
||||||
|
server_url,
|
||||||
|
headers=proxy_data["headers"],
|
||||||
|
data=raw_data,
|
||||||
|
auth=auth,
|
||||||
|
)
|
||||||
if response.status_code == 200:
|
if response.status_code == 200:
|
||||||
type, id, method, rpcdata = jsonrpc2_decode(response.text)
|
_type, _id, rpc_data = jsonrpc2_decode(response.text)
|
||||||
if rpcdata['success']:
|
if rpc_data["success"]:
|
||||||
print("[*] Email sent successfully.")
|
logger.info("[*] Email sent successfully.")
|
||||||
else:
|
else:
|
||||||
raise Exception("(%s) %s" % (str(rpcdata['code']), rpcdata['message']))
|
raise Exception(f"({rpc_data['code']}) {rpc_data['message']}")
|
||||||
else:
|
else:
|
||||||
raise Exception("Status %s" % (str(response.status_code)))
|
raise Exception(f"Status {response.status_code}")
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[*] Failed to send email:", str(e))
|
logger.error("[*] Failed to send email", exc_info=e)
|
||||||
|
return "500 Could not process your message. " + str(e)
|
||||||
|
|
||||||
# Start SMTP server
|
return "250 OK"
|
||||||
smtp_server = CaterpillarSMTPServer((smtp_host, smtp_port), None)
|
|
||||||
|
|
||||||
# Start asynchronous event loop
|
|
||||||
asyncore.loop()
|
# https://aiosmtpd-pepoluan.readthedocs.io/en/latest/migrating.html
|
||||||
|
def main():
|
||||||
|
handler = CaterpillarSMTPHandler()
|
||||||
|
controller = Controller(handler, hostname=smtp_host, port=smtp_port)
|
||||||
|
# Run the event loop in a separate thread.
|
||||||
|
controller.start()
|
||||||
|
# Wait for the user to press Return.
|
||||||
|
input("SMTP server running. Press Return to stop server and exit.")
|
||||||
|
controller.stop()
|
||||||
|
logger.warning("[*] User has requested an interrupt")
|
||||||
|
logger.warning("[*] Application Exiting.....")
|
||||||
|
sys.exit()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
|
|
99
web.py
99
web.py
|
@ -7,93 +7,108 @@
|
||||||
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
|
||||||
# https://github.com/gnh1201/caterpillar
|
# https://github.com/gnh1201/caterpillar
|
||||||
# Created at: 2024-05-20
|
# Created at: 2024-05-20
|
||||||
# Updated at: 2024-05-20
|
# Updated at: 2024-10-25
|
||||||
#
|
#
|
||||||
|
|
||||||
from flask import Flask, request, redirect, url_for, render_template
|
|
||||||
import os
|
import os
|
||||||
import sys
|
import sys
|
||||||
import json
|
|
||||||
import importlib
|
|
||||||
|
|
||||||
import hashlib
|
|
||||||
from decouple import config
|
from decouple import config
|
||||||
|
from flask import Flask, request, render_template
|
||||||
|
from flask_cors import CORS
|
||||||
|
from base import Extension, jsonrpc2_error_encode, Logger
|
||||||
|
|
||||||
from base import Extension, jsonrpc2_create_id, jsonrpc2_result_encode, jsonrpc2_error_encode
|
# TODO: 나중에 Flask 커스텀 핸들러 구현 해야 함
|
||||||
|
logger = Logger(name="web")
|
||||||
app = Flask(__name__)
|
app = Flask(__name__)
|
||||||
app.config['UPLOAD_FOLDER'] = 'data/'
|
CORS(app)
|
||||||
|
app.config["UPLOAD_FOLDER"] = "data/"
|
||||||
|
if not os.path.exists(app.config["UPLOAD_FOLDER"]):
|
||||||
|
os.makedirs(app.config["UPLOAD_FOLDER"])
|
||||||
|
|
||||||
if not os.path.exists(app.config['UPLOAD_FOLDER']):
|
|
||||||
os.makedirs(app.config['UPLOAD_FOLDER'])
|
|
||||||
|
|
||||||
@app.route('/')
|
@app.route("/")
|
||||||
def upload_form():
|
def upload_form():
|
||||||
return render_template('upload.html')
|
return render_template("upload.html")
|
||||||
|
|
||||||
@app.route('/upload', methods=['POST'])
|
|
||||||
|
@app.route("/upload", methods=["POST"])
|
||||||
def process_upload():
|
def process_upload():
|
||||||
# make connection profile from Flask request
|
# make connection profile from Flask request
|
||||||
conn = Connection(request)
|
conn = Connection(request)
|
||||||
|
|
||||||
# pass to the method
|
# pass to the method
|
||||||
method = request.form['method']
|
method = request.form["method"]
|
||||||
filename = request.files['file'].filename
|
filename = request.files["file"].filename
|
||||||
params = {
|
params = {"filename": filename}
|
||||||
'filename': filename
|
|
||||||
}
|
|
||||||
|
|
||||||
# just do it
|
# just do it
|
||||||
return Extension.dispatch_rpcmethod(method, 'call', '', params, conn)
|
return Extension.dispatch_rpcmethod(method, "call", "", params, conn)
|
||||||
|
|
||||||
@app.route('/jsonrpc2', methods=['POST'])
|
|
||||||
|
@app.route("/jsonrpc2", methods=["POST"])
|
||||||
def process_jsonrpc2():
|
def process_jsonrpc2():
|
||||||
# make connection profile from Flask request
|
# make connection profile from Flask request
|
||||||
conn = Connection(request)
|
conn = Connection(request)
|
||||||
|
|
||||||
# JSON-RPC 2.0 request
|
# JSON-RPC 2.0 request
|
||||||
jsondata = request.get_json(silent=True)
|
json_data = request.get_json(silent=True)
|
||||||
if jsondata['jsonrpc'] == "2.0":
|
if json_data["jsonrpc"] == "2.0":
|
||||||
return Extension.dispatch_rpcmethod(jsondata['method'], 'call', jsondata['id'], jsondata['params'], conn)
|
result = Extension.dispatch_rpcmethod(
|
||||||
|
json_data["method"], "call", json_data["id"], json_data["params"], conn)
|
||||||
|
|
||||||
|
return {
|
||||||
|
"jsonrpc": "2.0",
|
||||||
|
"result": {
|
||||||
|
"data": result
|
||||||
|
},
|
||||||
|
"id": None
|
||||||
|
}
|
||||||
|
|
||||||
# when error
|
# when error
|
||||||
return jsonrpc2_error_encode({
|
return jsonrpc2_error_encode({"message": "Not valid JSON-RPC 2.0 request"})
|
||||||
'message': "Not vaild JSON-RPC 2.0 request"
|
|
||||||
})
|
|
||||||
|
|
||||||
def jsonrpc2_server(conn, id, method, params):
|
|
||||||
return Extension.dispatch_rpcmethod(method, "call", id, params, conn)
|
|
||||||
|
|
||||||
class Connection():
|
def jsonrpc2_server(conn, _id, method, params):
|
||||||
|
return Extension.dispatch_rpcmethod(method, "call", _id, params, conn)
|
||||||
|
|
||||||
|
|
||||||
|
class Connection:
|
||||||
def send(self, data):
|
def send(self, data):
|
||||||
self.messages.append(data)
|
self.messages.append(data)
|
||||||
|
|
||||||
def recv(self, size):
|
def recv(self, size):
|
||||||
print ("Not allowed method")
|
logger.info("Not allowed method")
|
||||||
|
|
||||||
def close(self):
|
def close(self):
|
||||||
print ("Not allowed method")
|
logger.info("Not allowed method")
|
||||||
|
|
||||||
def __init__(self, req):
|
def __init__(self, req):
|
||||||
self.messages = []
|
self.messages = []
|
||||||
self.request = req
|
self.request = req
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
# initalization
|
# initialization
|
||||||
try:
|
try:
|
||||||
listening_port = config('PORT', default=5555, cast=int)
|
listening_port = config("PORT", default=5555, cast=int)
|
||||||
client_encoding = config('CLIENT_ENCODING', default='utf-8')
|
client_encoding = config("CLIENT_ENCODING", default="utf-8")
|
||||||
|
use_extensions = config("USE_EXTENSIONS", default="")
|
||||||
except KeyboardInterrupt:
|
except KeyboardInterrupt:
|
||||||
print("\n[*] User has requested an interrupt")
|
logger.warning("[*] User has requested an interrupt")
|
||||||
print("[*] Application Exiting.....")
|
logger.warning("[*] Application Exiting.....")
|
||||||
sys.exit()
|
sys.exit()
|
||||||
except Exception as e:
|
except Exception as e:
|
||||||
print("[*] Failed to initialize:", str(e))
|
logger.error("[*] Failed to initialize", exc_info=e)
|
||||||
|
|
||||||
# set environment of Extension
|
# set environment of Extension
|
||||||
Extension.set_protocol('http')
|
Extension.set_protocol("http")
|
||||||
|
|
||||||
# load extensions
|
# Fix Value error
|
||||||
#Extension.register(importlib.import_module("plugins.yourownplugin").YourOwnPlugin())
|
if use_extensions:
|
||||||
|
# load extensions
|
||||||
|
for s in use_extensions.split(","):
|
||||||
|
Extension.register(s)
|
||||||
|
else:
|
||||||
|
logger.warning("[*] No extensions registered")
|
||||||
|
|
||||||
app.run(debug=True, host='0.0.0.0', port=listening_port)
|
app.run(debug=True, host="0.0.0.0", port=listening_port)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user