Compare commits

...

276 Commits

Author SHA1 Message Date
f493d026b8 Add additional PHP classes 2025-03-21 11:31:17 +09:00
f7256c674a
Update README.md 2025-03-13 10:41:40 +09:00
feee46aabd
Merge pull request #51 from gnh1201/dev
Allow a local source
2025-03-11 14:57:58 +09:00
02554d75a9
Update llm-code-review.yml 2025-03-11 14:54:58 +09:00
4eea005aa0 Update index.php 2025-03-11 14:42:32 +09:00
b7f9b96bc4 Update index.php 2025-03-11 14:28:00 +09:00
75dba7093a
Create llm-code-review.yml 2025-03-11 14:23:19 +09:00
caf0afa73a
Update README.md 2025-02-17 10:34:42 +09:00
4f46d3e75f
Update server.py 2025-02-17 10:04:36 +09:00
6d368eb9e6
Update index.php 2025-02-17 09:46:01 +09:00
cb215bb423
Update index.php 2025-02-13 13:25:03 +09:00
efb2401a66
Update index.php 2025-02-13 13:24:49 +09:00
f2ead73592 Add relay_get_env_hash method 2025-02-09 02:53:21 +09:00
cc41ac4a2e Fix an user agent resolved incorrectly 2025-02-09 02:26:03 +09:00
479eb560da
Update index.php 2025-02-09 02:03:44 +09:00
768dad77cf
Update index.php 2025-02-09 02:03:10 +09:00
3b24c6c209
Update index.php 2025-02-09 01:57:13 +09:00
54c6f6f33e
Update index.php 2025-02-09 01:52:46 +09:00
4486c1d411
Update console.html 2025-01-22 13:37:24 +09:00
7efc6612c7
Update README.md 2025-01-20 12:07:08 +09:00
d34f68c8a1
Update README.md 2025-01-09 11:33:49 +09:00
0c634f6da0
Update README.md 2025-01-09 11:02:50 +09:00
ddac81a036
Update README.md 2025-01-08 17:59:51 +09:00
03b2315211
Delete .github/workflows/lint.yml 2025-01-06 17:02:47 +09:00
a97160f9a0
Update index.php 2025-01-06 16:16:08 +09:00
895cc03d31
Update index.php 2025-01-02 22:33:56 +09:00
10c91d5045
Update index.php 2025-01-02 22:11:30 +09:00
51de2628bf
Update index.php 2025-01-02 22:11:21 +09:00
376fd71b07
Update index.php 2025-01-02 22:04:44 +09:00
0bfc39a5e9
Update index.php 2025-01-02 21:55:16 +09:00
be5496aa16
Update index.php 2025-01-02 21:18:07 +09:00
02befd1c17
Update index.php 2025-01-02 21:13:09 +09:00
9926e1564d
Update README.md 2024-12-29 06:31:30 +09:00
18ec101d84
Update README.md 2024-12-28 17:26:32 +09:00
d7cc297a80 Fix bugs and add execution time measureing in PHP worker 2024-11-26 02:42:59 +09:00
24e05065f1 Update index.php 2024-11-26 02:22:55 +09:00
9a6b68cf9a Fix incorrect user agent, Update the dynamic loading feature 2024-11-26 01:18:08 +09:00
359d070b54
Update README.md 2024-11-26 00:08:18 +09:00
09c542431d Merge branch 'main' of https://github.com/gnh1201/caterpillar 2024-11-26 00:01:55 +09:00
604a4d7886 Add the dynamic loading feature in relay_invoke_method method 2024-11-26 00:01:50 +09:00
bb0710b723
Update README.md 2024-11-25 21:33:07 +09:00
3977d0c719 Update index.php 2024-11-24 01:49:28 +09:00
3d0f55c1ee Update console.html 2024-11-24 01:41:15 +09:00
297f0595f7 Update console.html 2024-11-24 01:39:50 +09:00
e3b5a344e3 Update console.html 2024-11-24 01:35:04 +09:00
78eb86800c Update index.php 2024-11-24 01:25:37 +09:00
8803fb7f05 Update index.php 2024-11-23 21:59:44 +09:00
0c0cbd5247 Update index.php 2024-11-23 21:50:51 +09:00
8d22483225 Update index.php 2024-11-23 18:27:08 +09:00
40a42c2811
Update README.md 2024-11-20 19:31:12 +09:00
286d75642a
Update README.md 2024-11-20 19:04:40 +09:00
e783b641be
Update README.md 2024-11-20 19:04:07 +09:00
660cfb3818 Update console.html 2024-11-18 22:07:56 +09:00
2ddef30daf disable upgrade-insecure-requests 2024-11-18 22:00:25 +09:00
cac5b29280
Update README.md 2024-11-18 21:22:17 +09:00
5960bb5732
Update README.md 2024-11-18 21:21:55 +09:00
3ffc8ca29c
Update server.py 2024-11-18 21:05:07 +09:00
85f2b19b46
Update server.py 2024-11-18 21:02:12 +09:00
9cc6bb3b08
Update server.py 2024-11-14 17:00:38 +09:00
69a3c5f323
Update server.py 2024-11-14 16:47:09 +09:00
943ff478aa
Update server.py 2024-11-14 16:17:04 +09:00
50265ad56b
Update server.py 2024-11-14 15:26:09 +09:00
a50edb3c77
Update server.py 2024-11-14 15:24:21 +09:00
1ebbd96340
Update base.py 2024-11-14 15:13:42 +09:00
0ad61d0a30
Update README.md 2024-11-13 16:06:33 +09:00
65f387dbeb
Update lint.yml 2024-11-13 03:58:08 +09:00
d592109de4
Update server.py 2024-11-13 03:56:06 +09:00
fbe2f6fa87 Update README.md 2024-11-12 19:06:20 +09:00
b0cc2652ba
Update server.py 2024-11-12 17:28:06 +09:00
f343020ae6
Update server.py 2024-11-12 17:27:39 +09:00
d8dd92f9c0
Update server.py 2024-11-12 17:26:37 +09:00
ed91362515
Update base.py 2024-11-12 17:09:40 +09:00
c19a38a008
Update base.py 2024-11-12 16:10:20 +09:00
5567325620
Update base.py 2024-11-12 16:10:09 +09:00
75aec1d8bf Update the method relay_web_search 2024-11-08 06:17:26 +09:00
0791e79be9 Update console.html 2024-11-08 06:13:55 +09:00
fedfc5f041 Add method relay_web_search 2024-11-08 05:45:12 +09:00
2f828252c5
Update index.php 2024-11-08 05:32:13 +09:00
09ac94bf00
Update index.php 2024-11-08 05:10:15 +09:00
ff381b8e3e Update console.html 2024-11-08 03:45:41 +09:00
7810e85dec Update console.html 2024-11-08 03:32:22 +09:00
1c77b640dd Update console.html 2024-11-08 03:22:50 +09:00
564d3dba03 Update console.html 2024-11-08 02:17:15 +09:00
006b1b17bd Update console.html 2024-11-08 01:33:05 +09:00
add701f92d Update console.html 2024-11-07 14:42:48 +09:00
be2f38d276 Update console.html 2024-11-06 18:47:29 +09:00
a1abaee646 Update console.html 2024-11-06 18:46:17 +09:00
36804b3763 Update console.html 2024-11-06 18:43:50 +09:00
ea0a24ee5f Update console.html 2024-11-06 18:42:08 +09:00
6c16083d9b
Update FUNDING.yml 2024-11-06 16:21:45 +09:00
7e63b0b00b
Update FUNDING.yml 2024-11-06 16:16:50 +09:00
6c0d5193a6
Update FUNDING.yml 2024-11-06 16:14:20 +09:00
e79a7cf68a
Update server.py 2024-11-04 18:00:31 +09:00
e067afc735
Update server.py 2024-11-04 17:37:59 +09:00
9f069b48e6
Update server.py 2024-11-04 17:34:55 +09:00
18738fe80b
Update server.py 2024-11-04 17:17:32 +09:00
99f960307d
Fix a cache overfitting issue: use re.IGNORECASE 2024-11-04 17:17:19 +09:00
3af8879adb Fix bugs 2024-10-25 15:27:45 +09:00
965423addb Update plugins 2024-10-25 11:49:00 +09:00
549cc9a8f9 Update plugins 2024-10-25 11:44:42 +09:00
20ddfbbcbb Update plugins 2024-10-25 11:42:31 +09:00
441fd81a0e Update plugins 2024-10-25 11:13:50 +09:00
5efe392ace Update plugins 2024-10-25 11:03:33 +09:00
05b51f7e7f Update plugins 2024-10-25 10:22:04 +09:00
0759dbffaf Update console.html, plugins 2024-10-25 09:36:33 +09:00
44425dbb8b Fix bugs when dispatch the RPC method 2024-10-25 08:59:49 +09:00
08212459eb Update plugins 2024-10-25 07:15:09 +09:00
ef72ba9296 Update console.html, plugins 2024-10-25 07:08:39 +09:00
2fa3f1471f
Update index.php 2024-10-25 01:52:03 +09:00
a71b6023ae
Update index.php 2024-10-25 01:51:38 +09:00
dc65f9a827
Fix Call to undefined function mysqli_fetch_all() when try a mysql query 2024-10-25 01:51:26 +09:00
e75a5a4b2d
Revert LICENSE to MIT 2024-10-24 14:43:08 +09:00
0e936a044d
Delete requirements-dev.txt 2024-10-24 14:16:54 +09:00
a6fd4515f1
Update requirements.txt 2024-10-24 14:16:40 +09:00
66b73730dc Update plugins 2024-10-23 17:07:42 +09:00
5a11042f7f Update plugins 2024-10-20 00:08:24 +09:00
d473dd569c Update plugins 2024-10-19 21:56:14 +09:00
22dcce06ab
Update README.md 2024-10-19 17:39:37 +09:00
1a65c9fdb8
Update server.py 2024-10-19 16:22:46 +09:00
bf8635c8b7
Update README.md 2024-10-19 16:15:53 +09:00
0c3f32d4f8
Update README.md 2024-10-18 22:33:38 +09:00
db9454a568
Update README.md 2024-10-18 22:32:37 +09:00
94252ba409 Add submodule caterpillar-plugins 2024-10-18 22:07:28 +09:00
acc6393658 Remove all plug-ins 2024-10-18 22:06:47 +09:00
a7371b1fa2
Update fediverse.py
Some checks failed
Ruff / ruff (push) Has been cancelled
2024-10-09 04:20:31 +09:00
0d543d2da9
Update fediverse.py 2024-10-09 04:17:25 +09:00
7472260de7
Update fediverse.py 2024-10-09 04:11:15 +09:00
2fb49ccf5f
Update README.md 2024-10-09 04:05:42 +09:00
796123f83b
Update server.py 2024-10-09 03:56:10 +09:00
1d43b64ce9
Update server.py 2024-10-09 03:55:24 +09:00
071e768c53
Update fediverse.py 2024-10-09 03:50:52 +09:00
a6ea467f6c
Update server.py 2024-10-09 03:49:38 +09:00
bd2e017598
Update fediverse.py 2024-10-09 03:33:21 +09:00
447b152f85
Update fediverse.py 2024-10-09 03:21:14 +09:00
3de3620b1f
Update fediverse.py 2024-10-09 03:16:25 +09:00
eafb738ad2
Update fediverse.py 2024-10-09 03:13:58 +09:00
539b7c3b58
Update fediverse.py 2024-10-09 03:11:52 +09:00
a376b8084d
Update fediverse.py 2024-10-09 03:08:47 +09:00
56c8c62aa6
Update fediverse.py 2024-10-09 03:07:34 +09:00
58e7322555
Update fediverse.py 2024-10-09 03:06:25 +09:00
c272efe8b1
Update fediverse.py
Some checks are pending
Ruff / ruff (push) Waiting to run
2024-10-09 01:32:45 +09:00
a0775bd15a
Update fediverse.py 2024-10-09 01:15:10 +09:00
f81d2f4649
Update fediverse.py 2024-10-09 01:11:17 +09:00
579a7fe89c
Update fediverse.py 2024-10-09 01:04:10 +09:00
0466dffb07
Update server.py 2024-10-09 01:03:43 +09:00
d5b65c71b1
Update server.py 2024-10-09 01:03:12 +09:00
c0ac6151c2
Update server.py 2024-10-09 01:02:07 +09:00
ea10dd83fd
Update fediverse.py 2024-10-09 00:46:01 +09:00
0b1bfadd8a
Update fediverse.py 2024-10-08 23:55:37 +09:00
5618186699
Update base.py 2024-10-08 23:36:14 +09:00
eb701292ce
Update base.py 2024-10-08 23:36:02 +09:00
f01c5d26a0
Update fediverse.py 2024-10-08 23:01:06 +09:00
bc08241aa2
Merge pull request #45 from zeroday0619/refactoring
Some checks failed
Ruff / ruff (push) Has been cancelled
feat: refactoring typed programing
2024-09-13 11:32:08 +09:00
Euiseo Cha
6b99ee97ce
feat: refactoring typed programing 2024-08-31 17:46:03 +09:00
Euiseo Cha
910e5e4ed5
feat: refactoring typed programing 2024-08-31 15:48:35 +09:00
Euiseo Cha
93e0b4edd9
feat: refactoring typed programing 2024-08-31 14:37:21 +09:00
9c2b66fb07 Update requirements-dev.txt
Some checks failed
Ruff / ruff (push) Has been cancelled
2024-08-29 11:47:49 +09:00
44d68203fe Update requirements-dev.txt
Some checks are pending
Ruff / ruff (push) Waiting to run
2024-08-28 22:26:03 +09:00
ea379fb750 Update requirements-dev.txt 2024-08-28 21:56:25 +09:00
148e9a20cf Update requirements-dev.txt 2024-08-28 21:48:46 +09:00
d1ba38ca0a Create .env.example
Some checks are pending
Ruff / ruff (push) Waiting to run
2024-08-28 20:15:50 +09:00
b5cd9d79ab
Update requirements-dev.txt
Some checks failed
Ruff / ruff (push) Has been cancelled
2024-08-21 07:56:06 +09:00
2314327358
Merge pull request #42 from gnh1201/change-license-to-gplv3
Change license to GPLv3 / 라이선스 GPLv3로 변경
2024-08-21 07:35:41 +09:00
b10a58f502
Update README.md
Some checks are pending
Ruff / ruff (push) Waiting to run
2024-08-20 20:11:55 +09:00
0c52169f7a
Update README.md 2024-08-20 20:11:10 +09:00
0daa8840ef
Update README.md 2024-08-20 20:07:41 +09:00
08d60f4248
Update README.md 2024-08-20 20:04:35 +09:00
889f21d484
Update README.md 2024-08-20 20:03:30 +09:00
fe8738a2a4
Update README.md 2024-08-20 20:00:55 +09:00
3648be7e94
Update README.md 2024-08-20 19:54:52 +09:00
7f644eed54
Update README.md 2024-08-20 19:51:50 +09:00
1fcebe78b4
Update README.md 2024-08-20 19:51:29 +09:00
ec4d38ed6f
Update README.md 2024-08-20 19:48:56 +09:00
f937a0314b
Update README.md 2024-08-20 19:46:03 +09:00
3ec236e955
Update README.md 2024-08-20 19:45:04 +09:00
3a5ed1d983
Merge pull request #43 from fossabot/add-license-scan-badge
Add license scan report and status
2024-08-20 19:39:48 +09:00
fossabot
33b7e075c5 Add license scan report and status
Signed off by: fossabot <badges@fossa.com>
2024-08-20 04:38:47 -06:00
e18e288beb Revert "Update LICENSE"
This reverts commit fbe0d7f1e2.
2024-08-20 19:32:37 +09:00
fbe0d7f1e2
Update LICENSE 2024-08-20 17:33:08 +09:00
0d1eea08eb
Change license to GPLv3 2024-08-20 17:15:37 +09:00
b47c89db14
Update README.md
Some checks are pending
Ruff / ruff (push) Waiting to run
2024-08-20 14:50:06 +09:00
6fa63100b7 Update the cover image 2024-08-20 14:36:26 +09:00
6353cb69ad
Merge pull request #41 from AkiaCode/serial
Some checks failed
Ruff / ruff (push) Has been cancelled
Implement simple serial connector
2024-08-12 21:24:09 +09:00
AkiaCode
b13a55a18b
Change author 2024-08-12 15:12:50 +09:00
AkiaCode
bdd6615670
implement simple serial connector 2024-08-11 02:29:27 +09:00
724f9f071e
Merge pull request #40 from gnh1201/elasticsearch
Some checks failed
Ruff / ruff (push) Has been cancelled
Implement the Always Online Cache with Elasticsearch / Always Online Cache 기능 중 엘라스틱서치 관련 구현
2024-08-01 15:16:56 +09:00
c23d2adefa ruff checked 2024-07-31 15:39:48 +09:00
b845fe9356 Add AlwaysOnline feature with Elasticsearch 2024-07-31 15:36:16 +09:00
6fa48ac64b Add elasticsearch webpage cache 2024-07-31 13:39:09 +09:00
cccae65676
Update README.md
Some checks failed
Ruff / ruff (push) Has been cancelled
2024-07-19 11:43:00 +09:00
7139092c12
Merge pull request #39 from gnh1201/Container
Some checks failed
Ruff / ruff (push) Has been cancelled
add docker container lifecycle methods
2024-07-17 09:35:07 +09:00
67dc16d976
Update index.php
Some checks failed
Ruff / ruff (push) Has been cancelled
2024-07-15 00:28:51 +09:00
486b12f643
Merge pull request #37 from gnh1201/yara
Adopt VirusTotal/yara (Pattern matching)
2024-07-14 19:12:57 +09:00
tkgka
13494e285b add docker container lifecycle methods 2024-07-14 18:46:02 +09:00
7abc36d66f
Merge pull request #38 from zeroday0619/ruff-check-gh-action
Some checks are pending
Ruff / ruff (push) Waiting to run
GitHub action configure for ruff action
2024-07-13 21:04:38 +09:00
feb7cff398
Merge pull request #36 from gnh1201/smtp
SMTP fix #35 / SMTP 서버 수정
2024-07-13 21:03:55 +09:00
5bff160d17
Merge branch 'main' into smtp 2024-07-13 21:03:36 +09:00
Euiseo Cha
4be3fa4df8
Merge branch 'gnh1201:main' into ruff-check-gh-action 2024-07-13 20:35:18 +09:00
d7acbf42f0
Merge pull request #34 from gnh1201/ssl-negotiation
Fix SSL negotiation + ruff formatted / SSL 협상 관련 수정 및 ruff format 적용
2024-07-13 19:21:33 +09:00
Euiseo Cha
27bb1616c2
feat: github action configure for ruff action 2024-07-13 18:35:03 +09:00
7b2d3529f5
Update README.md 2024-07-12 16:28:40 +09:00
68ef47b569
Update README.md 2024-07-12 11:28:05 +09:00
52b0949ce1
Update README.md 2024-07-12 11:23:42 +09:00
b5c8cc7b87 Adopt VirusTotal/yara (Pattern matching) 2024-07-12 10:35:50 +09:00
1d39e8a3b6 SMTP fix #35 2024-07-12 10:14:39 +09:00
ce3c6e7623 Update roadmap.png 2024-07-12 00:59:42 +09:00
43c3ff3466 Update roadmap.png 2024-07-12 00:43:24 +09:00
9bf5078294 Fix SSL negotiation 2024-07-12 00:34:33 +09:00
c206ee99e5 Update roadmap.png 2024-07-11 21:42:23 +09:00
1832801918 Update roadmap.png 2024-07-11 21:07:57 +09:00
f25ed75eb1 Update roadmap.png 2024-07-11 21:05:18 +09:00
1414824f86 Update roadmap.png 2024-07-11 21:03:19 +09:00
6de1888077 Add roadmap image 2024-07-11 20:53:06 +09:00
e2442a6290 Fix SSL negotiation #32 2024-07-11 19:03:34 +09:00
77ae320f40 ruff formatted 2024-07-11 19:02:08 +09:00
1a8022df73 Revert "Fix SSL negotiation + ruff formatted"
This reverts commit 57ed60fd01.
2024-07-11 19:00:58 +09:00
57ed60fd01 Fix SSL negotiation + ruff formatted 2024-07-11 18:42:10 +09:00
f1d2d58374
Merge pull request #28 from zeroday0619/logging-system
[Critical] Implemented a custom logger and fixed some invalid code implementations and fixed some misspellings.
2024-07-11 17:12:40 +09:00
60bcc14a93
Update server.py 2024-07-11 16:45:21 +09:00
afc974ae37
Rename certs/download_certs.sh to download_certs.sh 2024-07-11 16:43:39 +09:00
67160232d0
Rename certs/download_certs.bat to download_certs.bat 2024-07-11 16:43:28 +09:00
7078a1a3b8
Rename download_certs.sh to certs/download_certs.sh 2024-07-11 16:35:57 +09:00
9fa17e8c7b
Rename download_certs.bat to certs/download_certs.bat 2024-07-11 16:35:43 +09:00
7fdf83be9d
Merge pull request #26 from gnh1201/certs_downloader
Add the local certificate downloader / 로컬 인증서 다운로더 추가
2024-07-11 16:32:59 +09:00
Euiseo Cha
8f38f3d5de
fix: use_extension value extension 2024-07-11 16:05:56 +09:00
Euiseo Cha
391fc021d6
fix: use_extension value extension 2024-07-11 16:02:51 +09:00
Euiseo Cha
4d97e006e5
Merge branch 'gnh1201:main' into logging-system 2024-07-11 15:41:13 +09:00
caca4e3f65
One more fix #24 2024-07-11 15:38:38 +09:00
aa009b5a27 Revert "One more fix #24"
This reverts commit fffac9dcb7.
2024-07-11 15:37:43 +09:00
fffac9dcb7
One more fix #24 2024-07-11 15:36:28 +09:00
Euiseo Cha
1b47fb744a
Merge branch 'main' into logging-system 2024-07-11 15:27:31 +09:00
4d3af6c128
Merge branch 'main' into certs_downloader 2024-07-11 15:21:58 +09:00
e262348e75
Merge pull request #30 from zeroday0619/main
ruff exclude append assets data
2024-07-11 15:21:20 +09:00
Euiseo Cha
e9aba0f803
feat: ruff exclude append assets data 2024-07-11 15:19:50 +09:00
2072b06dd3
Merge pull request #29 from zeroday0619/ruff
apply ruff linter
2024-07-11 15:16:31 +09:00
5d4d70a33a
Merge branch 'main' into ruff 2024-07-11 15:12:06 +09:00
Euiseo Cha
c527b1d831
feat: apply ruff linter and update .gitignore 2024-07-11 15:05:51 +09:00
b3e4165fab
Merge branch 'main' into certs_downloader 2024-07-11 14:11:35 +09:00
1fdb788e0c
Update .gitignore 2024-07-11 14:11:09 +09:00
83b46d3ede
Merge pull request #27 from gnh1201/importlib_with_env
Change the `Extension.register()` process / 확장 등록 프로세스 변경
2024-07-11 14:08:01 +09:00
a9783c6081 One more fix #27 2024-07-10 09:28:06 +09:00
f5caf1cac7 Fix fix fix 2024-07-09 17:02:29 +09:00
Euiseo Cha
cd7350655b
feat: implemented a custom logger and fixed some invalid code implementations
fixed some misspellings. Note that this modification is highly likely to cause conflicts.
2024-07-09 17:01:25 +09:00
1064dc017b
Update web.py 2024-07-09 16:50:48 +09:00
810d5041cb
Update web.py 2024-07-09 16:50:40 +09:00
16bbddcd94
Update base.py 2024-07-09 16:44:45 +09:00
d3f3b423c6
Update server.py 2024-07-09 16:41:15 +09:00
bbb8c7fe55
Update server.py 2024-07-09 16:41:02 +09:00
2d2e54cd2d
Update base.py 2024-07-09 16:38:19 +09:00
0b94de24e9
Update README.md 2024-07-09 16:31:41 +09:00
2e23938ca7
Update README.md 2024-07-09 16:17:33 +09:00
e539e3e670 Revert "Update server.py"
This reverts commit c087312455.
2024-07-09 16:15:48 +09:00
c087312455
Update server.py 2024-07-09 16:11:05 +09:00
d0b1cc2bf5
Update README.md 2024-07-09 16:08:52 +09:00
3f185a237f
Update README.md 2024-07-09 15:56:20 +09:00
bf8ea7be95
One more fix #24 2024-07-09 15:53:34 +09:00
352fc3229f
Merge pull request #24 from gnh1201/nmap
Add the network port scanning support / 네트워크 포트 스캐닝 지원
2024-07-09 14:16:44 +09:00
e72e835f7d
Merge branch 'main' into nmap 2024-07-09 14:16:35 +09:00
dc995854ba
Merge pull request #25 from gnh1201/extension_importlib_wrapper
Fix `Extension.register()` API and related files / 모듈 등록 방식 변경
2024-07-09 14:05:10 +09:00
9f8d221aea
Update and rename configure_certs.sh to download_certs.sh 2024-07-09 13:56:36 +09:00
e27f5c34ab
Create download_certs.bat 2024-07-09 13:54:53 +09:00
f214120c1c
Update bio.py 2024-07-07 19:19:31 +09:00
6e4413a010
Merge pull request #23 from zeroday0619/biopython
Create bio.py
2024-07-07 19:18:50 +09:00
f953341330 Fix Extension.register() API and related files 2024-07-06 22:52:53 +09:00
Euiseo Cha
9133f75c38
feat: removed unnecessary code and added comments 2024-07-05 20:50:39 +09:00
32af8bd701
Rename portscan.py to nmap.py 2024-07-04 15:02:51 +09:00
823c97015f
Update server.py 2024-07-04 15:00:58 +09:00
7d5d997881
Update portscan.py 2024-07-04 14:46:40 +09:00
4bef7a2417
Create portscan.py 2024-07-04 14:44:57 +09:00
Euiseo Cha
001852956c
Create bio.py 2024-07-02 19:52:32 +09:00
96f77b956f Remove unused workers (will be refactor) 2024-07-02 12:32:22 +09:00
a73ff414c2
Update README.md 2024-06-28 14:27:27 +09:00
65d5e26c1e Update console.html 2024-06-26 16:21:05 +09:00
b8fa3e6722 Update console.html 2024-06-25 19:39:32 +09:00
0c393a1338 Update console.html 2024-06-25 17:28:05 +09:00
243cadd5d0 Update console.html 2024-06-25 17:22:03 +09:00
30 changed files with 2268 additions and 1445 deletions

13
.env.example Normal file
View File

@ -0,0 +1,13 @@
[settings]
PORT=5555
SERVER_URL=localhost
SERVER_CONNECTION_TYPE=proxy
CA_KEY=ca.key
CA_CERT=ca.crt
CERT_KEY=cert.key
CERT_DIR=certs/
#OPENSSL_BINPATH=openssl
CLIENT_ENCODING=utf-8
USE_EXTENSIONS=wayback.Wayback,bio.PyBio,alwaysonline.AlwaysOnline
ES_HOST=http://127.0.0.1:9200
ES_INDEX=alwaysonline

8
.github/FUNDING.yml vendored
View File

@ -1,8 +1,2 @@
# These are supported funding model platforms
github: gnh1201
open_collective: welsonjs
liberapay: catswords
custom: ['https://www.buymeacoffee.com/catswords', 'https://toss.me/catswords']
patreon: catswords # Replace with a single Patreon username
ko_fi: catswords
custom: ['https://gnh1201.link']

23
.github/workflows/llm-code-review.yml vendored Normal file
View File

@ -0,0 +1,23 @@
name: AI Code Review
on:
pull_request:
types: [opened, synchronize, reopened]
issues:
types: [opened, reopened]
jobs:
repofix:
runs-on: ubuntu-latest
steps:
- name: Run RepoFixAI
uses: Manav916/llm-code-review@main
with:
groq_api_key: ${{ secrets.GROQ_API_KEY }}
groq_model: 'gemma-2-9b-it'
github_token: ${{ secrets.GITHUB_TOKEN }}
# exclude_extensions: 'txt'
repo_owner: ${{ github.repository_owner }}
repo_name: ${{ github.event.repository.name }}
event_number: ${{ github.event.number || github.event.issue.number }} # when listening for both pull requests and issues
event_name: ${{ github.event_name }}

175
.gitignore vendored
View File

@ -1,4 +1,179 @@
certs/
savedfiles/
logs/
settings.ini
.env
*.crt
*.key
### Python ###
# Byte-compiled / optimized / DLL files
__pycache__/
*.py[cod]
*$py.class
# C extensions
*.so
# Distribution / packaging
.Python
build/
develop-eggs/
dist/
downloads/
eggs/
.eggs/
lib/
lib64/
parts/
sdist/
var/
wheels/
share/python-wheels/
*.egg-info/
.installed.cfg
*.egg
MANIFEST
# PyInstaller
# Usually these files are written by a python script from a template
# before PyInstaller builds the exe, so as to inject date/other infos into it.
*.manifest
*.spec
# Installer logs
pip-log.txt
pip-delete-this-directory.txt
# Unit test / coverage reports
htmlcov/
.tox/
.nox/
.coverage
.coverage.*
.cache
nosetests.xml
coverage.xml
*.cover
*.py,cover
.hypothesis/
.pytest_cache/
cover/
# Translations
*.mo
*.pot
# Django stuff:
*.log
local_settings.py
db.sqlite3
db.sqlite3-journal
# Flask stuff:
instance/
.webassets-cache
# Scrapy stuff:
.scrapy
# Sphinx documentation
docs/_build/
# PyBuilder
.pybuilder/
target/
# Jupyter Notebook
.ipynb_checkpoints
# IPython
profile_default/
ipython_config.py
# pyenv
# For a library or package, you might want to ignore these files since the code is
# intended to run in multiple environments; otherwise, check them in:
# .python-version
# pipenv
# According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
# However, in case of collaboration, if having platform-specific dependencies or dependencies
# having no cross-platform support, pipenv may install dependencies that don't work, or not
# install all needed dependencies.
#Pipfile.lock
# poetry
# Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
# This is especially recommended for binary packages to ensure reproducibility, and is more
# commonly ignored for libraries.
# https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
#poetry.lock
# pdm
# Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
#pdm.lock
# pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
# in version control.
# https://pdm.fming.dev/#use-with-ide
.pdm.toml
# PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
__pypackages__/
# Celery stuff
celerybeat-schedule
celerybeat.pid
# SageMath parsed files
*.sage.py
# Environments
.env
.venv
env/
venv/
ENV/
env.bak/
venv.bak/
# Spyder project settings
.spyderproject
.spyproject
# Rope project settings
.ropeproject
# mkdocs documentation
/site
# mypy
.mypy_cache/
.dmypy.json
dmypy.json
# Pyre type checker
.pyre/
# pytype static type analyzer
.pytype/
# Cython debug symbols
cython_debug/
# PyCharm
# JetBrains specific template is maintained in a separate JetBrains.gitignore that can
# be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
# and can be added to the global gitignore or merged into this file. For a more nuclear
# option (not recommended) you can uncomment the following to ignore the entire idea folder.
#.idea/
### Python Patch ###
# Poetry local configuration file - https://python-poetry.org/docs/configuration/#local-configuration
poetry.toml
# ruff
.ruff_cache/
# LSP config files
pyrightconfig.json

3
.gitmodules vendored Normal file
View File

@ -0,0 +1,3 @@
[submodule "plugins"]
path = plugins
url = https://github.com/gnh1201/caterpillar-plugins

View File

@ -1,23 +1,43 @@
# gnh1201/caterpillar
Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
# Caterpillar Proxy (Songchoongi Project)
![title image](assets/img/title.jfif)
[![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fgnh1201%2Fcaterpillar.svg?type=shield)](https://app.fossa.com/projects/git%2Bgithub.com%2Fgnh1201%2Fcaterpillar?ref=badge_shield)
[![DOI 10.5281/zenodo.13346533](https://zenodo.org/badge/DOI/10.5281/zenodo.13346533.svg)](https://doi.org/10.5281/zenodo.13346533)
[![ChatGPT available](https://img.shields.io/badge/ChatGPT-74aa9c?logo=openai&logoColor=white)](#)
[![slideshare.net available](https://img.shields.io/badge/SlideShare-black?logo=slideshare)](https://www.slideshare.net/slideshow/2024-caterpillar-project-in-2024-korea-oss-contest/273031732)
[![Discord chat](https://img.shields.io/discord/359930650330923008?logo=discord)](https://discord.gg/9VVTHpfsVW)
[![Open to work](https://img.shields.io/badge/%23-OPENTOWORK-green)](https://github.com/gnh1201/welsonjs/discussions/167)
Caterpillar Proxy (Songchoongi Project) - The simple web debugging proxy (formerly, php-httpproxy)
![A cover image: Caterpillar on a tree looking at a rocket flying over the clouds](assets/img/cover.png)
You can connect all physical and logical channels with communication capabilities to the web!
Imagine various means such as time machines, satellites, quantum technology, sound, light, the Philosopher's Stone, or Excalibur, just like in science fiction movies! Caterpillar Proxy supports the implementation of extensions for Connectors, Filters, and RPC methods to bring your imagination to life.
:rocket: [Open the Caterpillar Proxy Web Console](https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/console.html)
## Use cases
* [Build a network tunnel using Python and the LAMP(PHP) stack.](https://qiita.com/gnh1201/items/40f9350ca6d308def6d4)
* [K-Anonymity for Spam Filtering: Case with Mastodon, and Misskey](https://qiita.com/gnh1201/items/09f4081f84610db3a9d3)
* [File Upload Attack Test with Caterpillar Proxy](https://youtu.be/sPZOCgYtLRw)
* [Build a network tunnel using Python and the LAMP(PHP) stack (qiita.com)](https://qiita.com/gnh1201/items/40f9350ca6d308def6d4)
* [K-Anonymity for Spam Filtering: Case with Mastodon, and Misskey (qiita.com)](https://qiita.com/gnh1201/items/09f4081f84610db3a9d3)
* [File Upload Vulnerability Attack Test (Caterpillar Proxy) (youtu.be) ](https://youtu.be/sPZOCgYtLRw)
* [Real-time processing of emergency disaster sensor data (e.g., fire detection).](https://catswords.social/@catswords_oss/114016647285923011)
## How it works
### Basic structure
```
You <-> Proxy client (Python) <-> Parasitized proxy server (Optional, PHP) <-> On the Web
* You <-> Proxy client (Python) <-> Parasitized proxy server (Optional, PHP/LAMP) <-> On the Web
* You <-> Proxy client (Python) <-> Connector extensions (Optional, Python) <-> On the Web
```
For example, build a simple web debugging proxy on the shared servers.
### Stateful mode
This project supports two modes of connection. The default is stateless. You can use the stateful mode to avoid being constrained by transfer capacity limits. See the [Stateful mode (github.com/gnh1201/caterpillar wiki)](https://github.com/gnh1201/caterpillar/wiki/Stateful-mode).
This project supports two modes of connection. The default is stateless. You can use the stateful mode to avoid being constrained by transfer capacity limits. See the [Stateful mode (catswords-oss.rdbl.io)](https://catswords-oss.rdbl.io/1155378128/5211324242).
### Connector extensions
This project supports the implementation of Connector extensions. The provided basic examples include implementations of web archives (caches) and serial communication as Connector extensions. Go to the [caterpillar-plugins repository (github.com)](https://github.com/gnh1201/caterpillar-plugins)
## (Optional) Before to use
If you have a server that ***will be parasitized*** and you want to proxy it, you should upload the `index.php` file to a shared server. The index.php file is located in the `assets/php` directory within this repository.
@ -27,17 +47,21 @@ If you have a server that ***will be parasitized*** and you want to proxy it, yo
```
[settings]
CONNECTION_TIMEOUT=1
PORT=5555
SERVER_URL=http://example.org
SERVER_CONNECTION_TYPE=stateless
SERVER_URL=localhost
SERVER_CONNECTION_TYPE=
CA_KEY=ca.key
CA_CERT=ca.crt
CERT_KEY=cert.key
CERT_DIR=certs/
OPENSSL_BINPATH=openssl
CLIENT_ENCODING=utf-8
USE_EXTENSIONS=wayback.Wayback,bio.PyBio
```
***Note***: If using Caterpillar Proxy (Python) alone, set `SERVER_URL=localhost`. Otherwise, use the endpoint URL of the Worker script (PHP or Java), e.g., `SERVER_URL=http://example.org`.
- (Optional) Create a certificate for SSL decryption
```bash
@ -57,14 +81,35 @@ sudo update-ca-certificates
4. (Optional) With [Cloudflare](https://cloudflare.com), we can expect to accelerate the 4x speed and reduce the network stuck.
## Extensions
* [Web Console Available](https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/console.html)
* Fediverse (e.g., Mastodon): See the [Fediverse (github.com/gnh1201/caterpillar wiki)](https://github.com/gnh1201/caterpillar/wiki/Fediverse).
* Wayback (Private browsing with Google or Wayback cache): See the [Wayback (github.com/gnh1201/caterpillar wiki)](https://github.com/gnh1201/caterpillar/wiki/Wayback).
* [Web Console](https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/console.html)
* Fediverse (e.g., Mastodon): See the [Fediverse (catswords-oss.rdbl.io)](https://catswords-oss.rdbl.io/1155378128/3821602484).
* Wayback (Private browsing with Google or Wayback cache): See the [Wayback (catswords-oss.rdbl.io)](https://catswords-oss.rdbl.io/1155378128/6994492654)
## Thanks to
* Pan Art by [@yeohangdang@i.peacht.art](#): ![Caterpillar Project Pan Art by @yeohangdang@i.peacht.art](assets/img/logo.png)
* Pan Art by [@yeohangdang@i.peacht.art](#): [Image File](assets/img/logo.png)
* [GitHub Sponsors](https://github.com/sponsors/gnh1201)
## Contributors
<a href="https://github.com/gnh1201/caterpillar/graphs/contributors">
<img src="https://contrib.rocks/image?repo=gnh1201/caterpillar" alt="Contributors" />
</a>
## Our roadmap
![Roadmap image](assets/img/roadmap.png)
## Report abuse
* ActivityPub [@gnh1201@catswords.social](https://catswords.social/@gnh1201)
* abuse@catswords.net
- abuse@catswords.net
- [GitHub Security Advisories (gnh1201/caterpillar)](https://github.com/gnh1201/caterpillar/security)
## Join the community
- ActivityPub [@catswords_oss@catswords.social](https://catswords.social/@catswords_oss)
- XMPP [catswords@conference.omemo.id](xmpp:catswords@conference.omemo.id?join)
- [Join Catswords OSS on Microsoft Teams (teams.live.com)](https://teams.live.com/l/community/FEACHncAhq8ldnojAI)
- [Join Catswords OSS #caterpillar on Discord (discord.gg)](https://discord.gg/9VVTHpfsVW)
## Special channels
- [A paid consultation channel (m.expert.naver.com)](https://m.expert.naver.com/mobile/expert/product/detail?storeId=100051156&productId=100144540) is available for Korean (한국어) region.
- [Join the private operations channel (forms.gle)](https://forms.gle/ZKAAaGTiGamksHoo8) is available for all regions.
## License
[![FOSSA Status](https://app.fossa.com/api/projects/git%2Bgithub.com%2Fgnh1201%2Fcaterpillar.svg?type=large)](https://app.fossa.com/projects/git%2Bgithub.com%2Fgnh1201%2Fcaterpillar?ref=badge_large)

BIN
assets/img/cover.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 2.3 MiB

View File

Before

Width:  |  Height:  |  Size: 143 KiB

After

Width:  |  Height:  |  Size: 143 KiB

BIN
assets/img/roadmap.png Normal file

Binary file not shown.

After

Width:  |  Height:  |  Size: 500 KiB

View File

@ -1,182 +0,0 @@
// https://github.com/gnh1201/caterpillar
const express = require('express');
const bodyParser = require('body-parser');
const net = require('net');
const tls = require('tls');
const DEFAULT_SOCKET_TIMEOUT = 1000; // milliseconds
const STATEFUL_SOCKET_TIMEOUT = 30000; // milliseconds
const app = express();
const port = 3000; // listening port number
app.use(bodyParser.json());
function jsonrpc2_encode(method, params, id = '') {
const data = {
jsonrpc: '2.0',
method: method,
params: params,
id: id
};
return JSON.stringify(data);
}
function jsonrpc2_error_encode(error, id = '') {
const data = {
jsonrpc: '2.0',
error: error,
id: id
};
return JSON.stringify(data);
}
function read_from_remote_server(remote_address, remote_port, scheme, data = null, conn = null, buffer_size = 8192, id = '') {
const sock = scheme === "https" || scheme === "ssl" || scheme === "tls"
? tls.connect(remote_port, remote_address)
: net.connect(remote_port, remote_address);
sock.on('error', error => {
const err = {
status: 502,
code: error.code,
message: error.message
};
if (!conn) {
console.log(jsonrpc2_error_encode(err, id));
} else {
let buf = `HTTP/1.1 502 Bad Gateway\r\n\r\n`;
buf += jsonrpc2_error_encode(err, id);
conn.write(buf);
}
});
sock.on('connect', () => {
if (!conn) {
sock.write(data);
sock.on('data', buf => {
console.log(buf.toString());
});
} else {
conn.on('data', buf => {
sock.write(buf);
});
sock.on('data', buf => {
conn.write(buf);
});
}
});
sock.on('end', () => {
sock.end();
});
}
function relay_request(params, id = '') {
const { buffer_size, request_data, request_length, client_address, client_port, client_encoding, remote_address, remote_port, scheme, datetime } = params;
const request_header = parse_headers(Buffer.from(request_data, 'base64').toString());
switch (request_header['@method'][0]) {
case 'CONNECT':
const err = {
status: 405,
code: -1,
message: "Method Not Allowed"
};
console.log(jsonrpc2_error_encode(err, id));
break;
default:
read_from_remote_server(remote_address, remote_port, scheme, Buffer.from(request_data, 'base64'), null, buffer_size, id);
}
}
function relay_connect(params, id = '') {
const { buffer_size, client_address, client_port, client_encoding, remote_address, remote_port, scheme, datetime } = params;
const starttime = Date.now();
const sock = net.connect(client_port, client_address);
sock.on('error', error => {
const err = {
status: 502,
code: error.code,
message: error.message,
_params: params
};
console.log(jsonrpc2_error_encode(err, id));
});
sock.on('connect', () => {
const stoptime = Date.now();
const connection_speed = Math.floor((stoptime - starttime));
const data = jsonrpc2_encode("relay_accept", {
success: true,
connection_speed: connection_speed
}, id);
sock.write(data + '\r\n\r\n');
read_from_remote_server(remote_address, remote_port, scheme, null, sock, buffer_size, id);
});
}
function parse_headers(str) {
const headers = {};
const lines = str.split(/\r?\n/);
const first_line = lines.shift();
headers['@method'] = first_line.split(' ');
lines.forEach(line => {
const match = line.match(/^([^:]+):(.*)$/);
if (match) {
headers[match[1]] = match[2].trim();
}
});
return headers;
}
function get_client_address(req, res) {
const client_address = req.ip;
const response = {
client_address: client_address
};
res.json(response);
}
app.post('/', (req, res) => {
const context = req.body;
if (context.jsonrpc === '2.0') {
const method = context.method;
switch (method) {
case 'relay_request':
relay_request(context.params, context.id);
break;
case 'relay_connect':
relay_connect(context.params, context.id);
break;
case 'get_client_address':
get_client_address(req, res);
break;
default:
res.status(400).send('Invalid method');
break;
}
} else {
res.status(400).send('Invalid JSON-RPC version');
}
});
app.listen(port, () => {
console.log(`Server is running on port ${port}`);
});

View File

@ -1,204 +0,0 @@
# https://github.com/gnh1201/caterpillar
use JSON;
use IO::Socket::INET;
use IO::Socket::SSL;
use Time::HiRes qw(time);
use constant DEFAULT_SOCKET_TIMEOUT => 1;
use constant STATEFUL_SOCKET_TIMEOUT => 30;
sub jsonrpc2_encode {
my ($method, $params, $id) = @_;
my $data = {
jsonrpc => "2.0",
method => $method,
params => $params,
id => $id
};
return encode_json($data);
}
sub jsonrpc2_result_encode {
my ($result, $id) = @_;
my $data = {
jsonrpc => "2.0",
result => $result,
id => $id
};
return encode_json($data);
}
sub jsonrpc2_error_encode {
my ($error, $id) = @_;
my $data = {
jsonrpc => "2.0",
error => $error,
id => $id
};
return encode_json($data);
}
sub parse_headers {
my ($str) = @_;
my %headers;
my @lines = split(/\r?\n/, $str);
my $first_line = shift(@lines);
$headers{'@method'} = [split(' ', $first_line)];
foreach my $line (@lines) {
if ($line =~ /^([^:]+):(.*)$/) {
$headers{$1} = trim($2);
}
}
return \%headers;
}
sub read_from_remote_server {
my ($remote_address, $remote_port, $scheme, $data, $conn, $buffer_size, $id) = @_;
my $sock;
if ($scheme ~~ ["https", "ssl", "tls"]) {
$sock = IO::Socket::SSL->new(
PeerAddr => $remote_address,
PeerPort => $remote_port,
SSL_verify_mode => 0, # You may adjust SSL options as needed
Timeout => DEFAULT_SOCKET_TIMEOUT
);
} else {
$sock = IO::Socket::INET->new(
PeerAddr => $remote_address,
PeerPort => $remote_port,
Proto => 'tcp',
Timeout => DEFAULT_SOCKET_TIMEOUT
);
}
if (!$sock) {
my $error = {
status => 502,
code => $!,
message => $@
};
if (!$conn) {
print jsonrpc2_error_encode($error, $id);
} else {
my $buf = sprintf("HTTP/1.1 502 Bad Gateway\r\n\r\n");
$buf .= jsonrpc2_error_encode($error, $id);
print $conn $buf;
}
} else {
if (!$conn) {
# send data
print $sock $data;
# receive data
my $buf;
while (!eof($sock) && defined($buf = <$sock>)) {
print $buf;
}
} else {
# send data
my $buf;
while (!eof($conn) && defined($buf = <$conn>)) {
print $sock $buf;
}
# receive data
$buf = "";
while (!eof($sock) && defined($buf = <$sock>)) {
print $conn $buf;
}
}
close($sock);
}
}
sub relay_request {
my ($params, $id) = @_;
my $buffer_size = $params->{'buffer_size'};
my $request_data = decode_base64($params->{'request_data'});
my $request_header = parse_headers($request_data);
my $request_length = int($params->{'request_length'});
my $client_address = $params->{'client_address'};
my $client_port = int($params->{'client_port'});
my $client_encoding = $params->{'client_encoding'};
my $remote_address = $params->{'remote_address'};
my $remote_port = int($params->{'remote_port'});
my $scheme = $params->{'scheme'};
my $datetime = $params->{'datetime'};
given ($request_header->{'@method'}[0]) {
when ("CONNECT") {
my $error = {
status => 405,
code => -1,
message => "Method Not Allowed"
};
print jsonrpc2_error_encode($error, $id);
}
default {
read_from_remote_server($remote_address, $remote_port, $scheme, $request_data, undef, $buffer_size, $id);
}
}
}
sub relay_connect {
my ($params, $id) = @_;
my $buffer_size = $params->{'buffer_size'};
my $client_address = $params->{'client_address'};
my $client_port = int($params->{'client_port'});
my $client_encoding = $params->{'client_encoding'};
my $remote_address = $params->{'remote_address'};
my $remote_port = int($params->{'remote_port'});
my $scheme = $params->{'scheme'};
my $datetime = $params->{'datetime'};
my $starttime = time();
my $conn = IO::Socket::INET->new(
PeerAddr => $client_address,
PeerPort => $client_port,
Proto => 'tcp',
Timeout => STATEFUL_SOCKET_TIMEOUT
);
if (!$conn) {
my $error = {
status => 502,
code => $!,
message => $@
};
print jsonrpc2_error_encode($error, $id);
} else {
my $stoptime = time();
my $connection_speed = int(($stoptime - $starttime) * 1000);
my $data = jsonrpc2_encode("relay_accept", {
success => 1,
connection_speed => $connection_speed
}, $id);
print $conn $data . "\r\n\r\n";
read_from_remote_server($remote_address, $remote_port, $scheme, undef, $conn, $buffer_size, $id);
close($conn);
}
}
# Parse a context
my $json_input = do { local $/; <STDIN> };
my $context = decode_json($json_input);
# Check if it's JSON-RPC 2 (stateless)
if ($context->{'jsonrpc'} eq "2.0") {
my $method = $context->{'method'};
given ($method) {
when ("relay_request") {
relay_request($context->{'params'}, $context->{'id'}); # stateless mode
}
when ("relay_connect") {
relay_connect($context->{'params'}, $context->{'id'}); # stateful mode
}
}
}

61
assets/php/class.tfa.php Normal file
View File

@ -0,0 +1,61 @@
<?php
// https://github.com/dimamedia/PHP-Simple-TOTP-and-PubKey
class tfa {
// RFC4648 Base32 alphabet
private $alphabet = "ABCDEFGHIJKLMNOPQRSTUVWXYZ234567";
function getOtp($key) {
/* Base32 decoder */
// Remove spaces from the given public key and converting to an array
$key = str_split(str_replace(" ","",$key));
$n = 0;
$j = 0;
$binary_key = "";
// Decode public key's each character to base32 and save into binary chunks
foreach($key as $char) {
$n = $n << 5;
$n = $n + stripos($this->alphabet, $char);
$j += 5;
if($j >= 8) {
$j -= 8;
$binary_key .= chr(($n & (0xFF << $j)) >> $j);
}
}
/* End of Base32 decoder */
// current unix time 30sec period as binary
$binary_timestamp = pack('N*', 0) . pack('N*', floor(microtime(true)/30));
// generate keyed hash
$hash = hash_hmac('sha1', $binary_timestamp, $binary_key, true);
// generate otp from hash
$offset = ord($hash[19]) & 0xf;
$otp = (
((ord($hash[$offset+0]) & 0x7f) << 24 ) |
((ord($hash[$offset+1]) & 0xff) << 16 ) |
((ord($hash[$offset+2]) & 0xff) << 8 ) |
(ord($hash[$offset+3]) & 0xff)
) % pow(10, 6);
return $otp;
}
function getPubKey() {
$alphabet = str_split($this->alphabet);
$key = '';
// generate 16 chars public key from Base32 alphabet
for ($i = 0; $i < 16; $i++) $key .= $alphabet[mt_rand(0,31)];
// split into 4x4 chunks for easy reading
return implode(" ", str_split($key, 4));
}
}
?>

View File

@ -0,0 +1,70 @@
<?php
// coupang.class.php
// Coupang Product Search API integration class
// Namhyeon Go <gnh1201@gmail.com>
// https://github.com/gnh1201/welsonjs
//
date_default_timezone_set("GMT+0");
class CoupangProductSearch {
private $accessKey = "";
private $secretKey = "";
private $baseUrl = "https://api-gateway.coupang.com";
private function generateSignature($method, $path, $query = "") {
$datetime = (new \DateTime("now", new \DateTimeZone("GMT")))->format("ymd\THis\Z");
$message = $datetime . $method . $path . $query;
$signature = hash_hmac('sha256', $message, $this->secretKey);
return [
'authorization' => "CEA algorithm=HmacSHA256, access-key={$this->accessKey}, signed-date={$datetime}, signature={$signature}",
'datetime' => $datetime
];
}
public function searchProducts($keyword, $limit = 10, $subId = null, $imageSize = null, $srpLinkOnly = false) {
$path = "/v2/providers/affiliate_open_api/apis/openapi/products/search";
$queryParams = http_build_query([
'keyword' => $keyword,
'limit' => $limit,
'subId' => $subId,
'imageSize' => $imageSize,
'srpLinkOnly' => $srpLinkOnly
]);
$fullPath = $path . '?' . $queryParams;
$url = $this->baseUrl . $fullPath;
$signatureData = $this->generateSignature("GET", $path, $queryParams);
$authorization = $signatureData['authorization'];
$datetime = $signatureData['datetime'];
$headers = [
"Content-Type: application/json;charset=UTF-8",
"Authorization: $authorization"
];
$curl = curl_init();
curl_setopt($curl, CURLOPT_URL, $url);
curl_setopt($curl, CURLOPT_CUSTOMREQUEST, "GET");
curl_setopt($curl, CURLOPT_HTTPHEADER, $headers);
curl_setopt($curl, CURLOPT_RETURNTRANSFER, true);
$response = curl_exec($curl);
$httpCode = curl_getinfo($curl, CURLINFO_HTTP_CODE);
curl_close($curl);
if ($httpCode === 200) {
return json_decode($response, true);
} else {
try {
return json_decode($response, true);
} catch (Exception $e) {
return [
"status" => $httpCode,
"message" => $e->getMessage()
];
}
}
}
}

View File

@ -1,37 +1,72 @@
<?php
/* index.php
* Caterpillar Worker on PHP
* Caterpillar Proxy Worker on PHP runtime
*
* Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
* Namhyeon Go (Catswords Research) <abuse@catswords.net>
* https://github.com/gnh1201/caterpillar
* Created at: 2022-10-06
* Updated at: 2024-06-25
* Updated at: 2025-03-11
*/
define("PHP_HTTPPROXY_VERSION", "0.1.5.23");
define("PERF_START_TIME", microtime(true));
define("PHP_HTTPPROXY_VERSION", "0.1.6.10");
define("DEFAULT_SOCKET_TIMEOUT", 1);
define("STATEFUL_SOCKET_TIMEOUT", 30);
define("MAX_EXECUTION_TIME", 0);
define("DEFAULT_USER_AGENT", $_SERVER['HTTP_USER_AGENT'] . '</p><hr><p>php-httpproxy/' . PHP_HTTPPROXY_VERSION . ' (Server; PHP ' . phpversion() . '; Caterpillar; abuse@catswords.net)');
define("ALLOW_INVOKE_INSECURE_METHOD", false);
define("ALLOW_LOAD_INSECURE_SCRIPT", true);
define("DEFAULT_USER_AGENT", 'php-httpproxy/' . PHP_HTTPPROXY_VERSION . ' (Server; PHP ' . phpversion() . '; Caterpillar Proxy)');
define("RELAY_ALLOW_METHODS", ""); // e.g., GET,POST
define("RELAY_PROXY_PASS", ""); // e.g., https://example.org
define("RELAY_IMAGE_FILE_EXTENSIONS", ".png,.gif,.jpg");
define("RELAY_STATIC_FILE_EXTENSIONS", ".js,.css");
define("RELAY_ENABLE_JS_REDIRECT", false);
error_reporting(E_ALL);
ini_set("display_errors", 0);
ini_set("default_socket_timeout", DEFAULT_SOCKET_TIMEOUT); // must be. because of `feof()` works
ini_set("max_execution_time", MAX_EXECUTION_TIME);
header('Access-Control-Allow-Origin: *');
header('Access-Control-Allow-Methods: *');
header("Access-Control-Allow-Headers: *");
if (strpos($_SERVER['HTTP_USER_AGENT'], "php-httpproxy/") !== 0 && strpos($_SERVER['HTTP_X_USER_AGENT'], "php-httpproxy/") !== 0) {
exit('<!DOCTYPE html><html><head><title>It works!</title><meta charset="utf-8"></head><body><h1>It works!</h1><p><a href="https://github.com/gnh1201/caterpillar">Download the client</a></p><p>' . DEFAULT_USER_AGENT . '</p></body></html>');
function get_current_execution_time() {
$end_time = microtime(true);
return $end_time - PERF_START_TIME;
}
ini_set("default_socket_timeout", DEFAULT_SOCKET_TIMEOUT); // must be. because of `feof()` works
ini_set("max_execution_time", MAX_EXECUTION_TIME);
function array_get($key, $arr, $default = null) {
return array_key_exists($key, $arr) ? $arr[$key] : $default;
}
function server_env_get($key) {
return array_get($key, $_SERVER, "");
}
function verity_integrity($data, $integrity) {
if (strpos($integrity, 'sha384-') !== 0) {
return false;
}
$encoded_hash = substr($integrity, 7);
$decoded_hash = base64_decode($encoded_hash);
$calculated_hash = hash('sha384', $data, true);
return hash_equals($calculated_hash, $decoded_hash);
}
function cast_to_array($data) {
return is_array($data) ? $data : array($data);
}
function jsonrpc2_encode($method, $params, $id = '') {
$data = array(
"jsonrpc" => "2.0",
"method" => $method,
"params" => $params,
"id" => $id
"id" => $id,
"_execution_time" => get_current_execution_time()
);
return json_encode($data);
}
@ -40,7 +75,8 @@ function jsonrpc2_result_encode($result, $id = '') {
$data = array(
"jsonrpc" => "2.0",
"result" => $result,
"id" => $id
"id" => $id,
"_execution_time" => get_current_execution_time()
);
return json_encode($data);
}
@ -49,7 +85,8 @@ function jsonrpc2_error_encode($error, $id = '') {
$data = array(
"jsonrpc" => "2.0",
"error" => $error,
"id" => $id
"id" => $id,
"_execution_time" => get_current_execution_time()
);
return json_encode($data);
}
@ -71,7 +108,7 @@ function fatal_handler() {
$errstr = $error["message"];
header("HTTP/1.1 200 OK");
exit(jsonrpc2_error_encode(array(
exit("\r\n\r\n" . jsonrpc2_error_encode(array(
"status" => 503,
"code" => $errno,
"message"=> "Error occurred in file '$errfile' at line $errline: $errstr"
@ -80,6 +117,27 @@ function fatal_handler() {
}
register_shutdown_function("fatal_handler");
function load_script($data) {
$loaded_script = false;
if (!ALLOW_LOAD_INSECURE_SCRIPT) {
return $loaded_script;
}
$fh = tmpfile();
if ($fh !== false) {
if (!(strpos($data, "<?") !== false)) {
$data = "<?php\r\n\r\n" . $data . "\r\n\r\n?>";
}
fwrite($fh, $data);
$path = stream_get_meta_data($fh)['uri'];
$loaded_script = include($path);
fclose($fh);
}
return $loaded_script;
}
// https://stackoverflow.com/questions/16934409/curl-as-proxy-deal-with-https-connect-method
// https://stackoverflow.com/questions/12433958/how-to-parse-response-headers-in-php
function parse_headers($str) { // Parses HTTP headers into an array
@ -218,12 +276,12 @@ function relay_connect($params, $id = '') {
}
function relay_mysql_connect($params) {
$hostname = $params['hostname'];
$username = $params['username'];
$password = $params['password'];
$database = array_key_exists('database', $params) ? $params['database'] : null;
$port = array_key_exists('port', $params) ? intval($params['port']) : 3306;
$charset = array_key_exists('charset', $params) ? $params['charset'] : "utf8";
$hostname = array_get("hostname", $params, "localhost");
$username = array_get("username", $params, "root");
$password = array_get("password", $params, "");
$database = array_get("database", $params, null);
$port = intval(array_get("port", $params, 3306));
$charset = array_get("charset", $params, "utf8");
try {
$mysqli = new mysqli($hostname, $username, $password, $database, $port);
@ -289,12 +347,20 @@ function relay_mysql_query($params, $mysqli) {
case "show":
case "select":
$success = true;
$result['data'] = mysqli_fetch_all($query_result, MYSQLI_ASSOC);
if (function_exists("mysqli_fetch_all")) {
$result['data'] = mysqli_fetch_all($query_result, MYSQLI_ASSOC);
} else {
$data = array();
while ($row = $query_result->fetch_assoc()) {
$data[] = $row;
}
$result['data'] = $data;
}
break;
case "insert":
$success = (bool) $query_result;
$result['last_id'] = @$mysqli->insert_id();
$result['last_id'] = @$mysqli->insert_id;
break;
default:
@ -357,6 +423,24 @@ function relay_get_phpversion() {
);
}
function relay_get_env_hash() {
$params = array(
"php_version" => phpversion(),
"php_os" => PHP_OS,
"php_sapi" => PHP_SAPI,
"loaded_extensions" => get_loaded_extensions(),
"ini_settings" => ini_get_all(null, false)
);
$serialized_params = serialize($params);
return array(
"data" => array(
sha1($serialized_params),
md5($serialized_params)
)
);
}
function relay_get_loaded_extensions() {
return array(
"data" => get_loaded_extensions()
@ -389,15 +473,85 @@ function relay_dns_get_record($params) {
function relay_fetch_url($params) {
$url = $params['url'];
$method = array_get("method", $params, "GET");
$headers = array_get("headers", $params, array());
$data = array_get("data", $params, '');
// from local source
$local_prefix = "file:";
$pos = strpos($url, $local_prefix);
if ($pos !== false && $pos === 0) {
$path = realpath(substr($url, strlen($local_prefix)));
$basedir = realpath(__DIR__);
if ($path && strpos($path, $basedir) === 0) {
if (file_exists($path)) {
$response = file_get_contents($path);
return array(
"success" => true,
"result" => array(
"status" => 200,
"data" => $response
)
);
} else {
return array(
"success" => false,
"error" => array(
"status" => 404,
"code" => -1,
"message" => "Not found"
)
);
}
} else {
return array(
"success" => false,
"error" => array(
"status" => 403,
"code" => -1,
"message" => "Access denied"
)
);
}
}
// from remote source
$_headers = array();
if (is_array($headers) && count($headers) > 0) {
foreach ($headers as $header_line) {
$pos = strpos($header_line, ':');
if ($pos !== false) {
$header_key = trim(substr($header_line, 0, $pos));
$header_value = trim(substr($header_line, $pos + 1));
$_header_line = sprintf("%s: %s", $header_key, $header_value);
array_push($_headers, $_header_line);
}
}
}
try {
$ch = curl_init();
curl_setopt($ch, CURLOPT_URL, $url);
curl_setopt($ch, CURLOPT_USERAGENT, DEFAULT_USER_AGENT);
curl_setopt($ch, CURLOPT_RETURNTRANSFER, true);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 1);
curl_setopt($ch, CURLOPT_CONNECTTIMEOUT, 30);
curl_setopt($ch, CURLOPT_SSL_VERIFYPEER, false);
curl_setopt($ch, CURLOPT_DNS_USE_GLOBAL_CACHE, false);
curl_setopt($ch, CURLOPT_DNS_CACHE_TIMEOUT, 30);
// check the request headers
if (count($_headers) > 0) {
curl_setopt($ch, CURLOPT_HTTPHEADER, $_headers);
}
// check it is POST request
if ($method == "POST") {
curl_setopt($ch, CURLOPT_POSTFIELDS, cast_to_array($data));
curl_setopt($ch, CURLOPT_POST, true);
}
// make cURL instance
$response = curl_exec($ch);
$error_code = curl_errno($ch);
if ($error_code) {
@ -451,10 +605,58 @@ function relay_get_geolocation() {
}
}
function relay_invoke_method($params) {
$callback = $params['callback'];
$args = (is_array($params['args']) ? $params['args'] : array());
$requires = cast_to_array($params['requires']);
$args = cast_to_array($params['args']);
if (!ALLOW_INVOKE_INSECURE_METHOD) {
$allow_callbacks = array("phpinfo", "idn_to_ascii", "idn_to_utf8", "load_script");
if (!in_array($callback, $allow_callbacks)) {
return array(
"success" => false,
"error" => array(
"status" => 403,
"code" => -1,
"message" => $callback . " is not allowed"
)
);
}
}
foreach($requires as $require_ctx) {
$resource_url = "";
$resource_integrity = "";
if (is_string($require_ctx)) {
$resource_url = $require_ctx;
} else if (is_array($require_ctx)) {
$resource_url = array_get("url", $require_ctx, "");
$resource_integrity = array_get("integrity", $require_ctx, "");
}
if (empty($resource_url))
continue;
try {
$result = relay_fetch_url(array(
"url" => $resource_url
));
if ($result['success'] && $result['result']['status'] == 200) {
$response = $result['result']['data'];
if (!empty($resource_integrity)) {
if (verify_integrity($response, $resource_integrity)) {
load_script($response);
}
} else {
load_script($response);
}
}
} catch (Exception $e) {
//echo $e->message; // ignore an exception
}
}
try {
$data = call_user_func_array($callback, $args);
@ -481,21 +683,104 @@ function relay_invoke_method($params) {
}
}
function get_client_address() {
$client_address = '';
if (!empty($_SERVER['HTTP_CLIENT_IP'])) {
$client_address = $_SERVER['HTTP_CLIENT_IP'];
} elseif (!empty($_SERVER['HTTP_X_FORWARDED_FOR'])) {
$client_address = $_SERVER['HTTP_X_FORWARDED_FOR'];
function relay_web_search($params) {
$page = $params['page'];
$search_params = array(
"q" => $params['keyword'],
"p" => ($page > 0 ? $page - 1 : 0),
"t" => "0" // text only
);
$result = relay_fetch_url(array(
"url" => "https://farside.link/librex/api.php?" . http_build_query($search_params)
));
if ($result['success']) {
return array(
"success" => true,
"result" => array(
"status" => 200,
"data" => json_decode($result['result']['data'], true)
)
);
} else {
$client_address = $_SERVER['REMOTE_ADDR'];
return $result;
}
}
function get_client_address() {
$client_address = "";
$client_address_candidates = array_filter(array_map("server_env_get", array(
"HTTP_CLIENT_IP",
"HTTP_X_FORWARDED_FOR",
"HTTP_X_FORWARDED",
"HTTP_X_CLUSTER_CLIENT_IP",
"HTTP_FORWARDED_FOR",
"HTTP_FORWARDED",
"REMOTE_ADDR"
)));
if (count($client_address_candidates) > 0) {
$client_address = $client_address_candidates[0];
}
return array(
"data" => $client_address,
"data" => $client_address_candidates,
"client_address" => $client_address // compatible under version 0.1.5.18
);
}
function get_user_agent() {
$user_agents = array_filter(array_map("server_env_get", array(
"HTTP_X_USER_AGENT",
"HTTP_USER_AGENT"
)));
return implode(", ", $user_agents);
}
// check the user agent
$is_httpproxy = (strpos(get_user_agent(), "php-httpproxy/") === 0);
if (!$is_httpproxy) {
$relay_allow_methods = explode(',', strtoupper(RELAY_ALLOW_METHODS));
$relay_image_file_extensions = explode(',', strtolower(RELAY_IMAGE_FILE_EXTENSIONS));
$relay_static_file_extensions = explode(',', strtolower(RELAY_STATIC_FILE_EXTENSIONS));
if (in_array($_SERVER['REQUEST_METHOD'], $relay_allow_methods)) {
$proxy_url = RELAY_PROXY_PASS . $_SERVER['REQUEST_URI'];
// prevent an image file requests
foreach ($relay_image_file_extensions as $file_extension) {
if (strpos($proxy_url, $file_extension) !== false) {
header("Location: https://http.cat/images/200.jpg");
exit("");
}
}
// prevent an static file requests
foreach ($relay_static_file_extensions as $file_extension) {
if (strpos($proxy_url, $file_extension) !== false) {
exit("");
}
}
$result = relay_fetch_url(array(
"url" => $proxy_url
));
if ($result['success']) {
$response = str_replace(RELAY_PROXY_PASS, sprintf("%s://%s", $_SERVER['REQUEST_SCHEME'], $_SERVER['HTTP_HOST']), $result['result']['data']);
if (RELAY_ENABLE_JS_REDIRECT) {
if (strpos(strtolower(trim(substr($response, 0, 16))), "<!doctype html") === 0) {
$response .= "<script>setTimeout(function() { var a = document.createElement('a'); a.href = '" . $proxy_url . "'; document.body.appendChild(a); a.click(); }, 3000);</script>";
}
}
exit($response);
} else {
http_response_code(500);
exit($proxy_url . " is down.");
}
} else {
exit('<!DOCTYPE html><html><head><title>It works!</title><meta charset="utf-8"></head><body><h1>It works!</h1><p><a href="https://github.com/gnh1201/caterpillar">Download the client</a></p><p>' . $_SERVER['HTTP_USER_AGENT'] . '</p><hr><p>' . DEFAULT_USER_AGENT . '</p></body></html>');
}
}
// parse a context
$context = json_decode(file_get_contents('php://input'), true);
@ -543,6 +828,10 @@ if ($context['jsonrpc'] == "2.0") {
echo jsonrpc2_result_encode(relay_get_phpversion(), $context['id']);
break;
case "relay_get_env_hash":
echo jsonrpc2_result_encode(relay_get_env_hash(), $context['id']);
break;
case "relay_get_loaded_extensions":
echo jsonrpc2_result_encode(relay_get_loaded_extensions(), $context['id']);
break;
@ -583,6 +872,15 @@ if ($context['jsonrpc'] == "2.0") {
}
break;
case "relay_web_search":
$result = relay_web_search($context['params']);
if ($result['success']) {
echo jsonrpc2_result_encode($result['result'], $context['id']);
} else {
echo jsonrpc2_error_encode($result['error'], $context['id']);
}
break;
case "get_client_address":
echo jsonrpc2_result_encode(get_client_address(), $context['id']);
break;

View File

@ -0,0 +1,418 @@
<?php
/**
* The MIT License (MIT)
*
* Copyright (c) 2013 mk-j, zedwood.com
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in all
* copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
* SOFTWARE.
*/
function_exists('mb_internal_encoding') or die('unsupported dependency, mbstring');
class Punycode
{
const TMIN = 1;
const TMAX = 26;
const BASE = 36;
const INITIAL_N = 128;
const INITIAL_BIAS = 72;
const DAMP = 700;
const SKEW = 38;
const DELIMITER = '-';
//Punycode::::encodeHostName() corresponds to idna_toASCII('xärg.örg');
public static function encodeHostName($hostname)
{
if (!self::is_valid_utf8($hostname))
{
return $hostname;//invalid
}
if (function_exists('idn_to_ascii') && 0)
{
return idn_to_ascii($hostname);//php 5.3+
}
$old_encoding = mb_internal_encoding();
mb_internal_encoding("UTF-8");
$pieces = explode(".", self::mb_strtolower($hostname) );
$punycode_pieces = array();
foreach($pieces as $piece)
{
if (preg_match("/[\x{80}-\x{FFFF}]/u", $piece))//is multi byte utf8
{
$punycode_pieces[] = "xn--".self::encode($piece);
}
else if (preg_match('/^[a-z\d][a-z\d-]{0,62}$/i', $piece) && !preg_match('/-$/', $piece) )//is valid ascii hostname
{
$punycode_pieces[] = $piece;
}
else
{
mb_internal_encoding($old_encoding);
return $hostname;//invalid domain
}
}
mb_internal_encoding($old_encoding);
return implode(".", $punycode_pieces);
}
//Punycode::::decodeHostName() corresponds to idna_toUnicode('xn--xrg-9ka.xn--rg-eka');
public static function decodeHostName($encoded_hostname)
{
if (!preg_match('/[a-z\d.-]{1,255}/', $encoded_hostname))
{
return false;
}
if (function_exists('idn_to_utf8') && 0)
{
return idn_to_utf8($encoded_hostname);
}
$old_encoding = mb_internal_encoding();
mb_internal_encoding("UTF-8");
$pieces = explode(".", strtolower($encoded_hostname));
foreach($pieces as $piece)
{
if (!preg_match('/^[a-z\d][a-z\d-]{0,62}$/i', $piece) || preg_match('/-$/', $piece) )
{
mb_internal_encoding($old_encoding);
return $encoded_hostname;//invalid
}
$punycode_pieces[] = strpos($piece, "xn--")===0 ? self::decode(substr($piece,4)) : $piece;
}
mb_internal_encoding($old_encoding);
return implode(".", $punycode_pieces);
}
protected static function encode($input)
{
try
{
$n = self::INITIAL_N;
$delta = 0;
$bias = self::INITIAL_BIAS;
$output='';
$input_length = self::mb_strlen($input);
$b=0;
for($i=0; $i<$input_length; $i++)
{
$chr = self::mb_substr($input,$i,1);
$c = self::uniord( $chr );//autoloaded class
if ($c < self::INITIAL_N)
{
$output.= $chr;
$b++;
}
}
if ($b==$input_length)//no international chars to convert to punycode here
{
throw new Exception("PunycodeException.BAD_INPUT");
}
else if ($b>0)
{
$output.= self::DELIMITER;
}
$h = $b;
while($h < $input_length)
{
$m = PHP_INT_MAX;
// Find the minimum code point >= n
for($i=0; $i<$input_length; $i++)
{
$chr = self::mb_substr($input,$i,1);
$c = self::uniord( $chr );
if ($c >= $n && $c < $m)
{
$m = $c;
}
}
if (($m - $n) > (PHP_INT_MAX - $delta) / ($h+1))
{
throw new Exception("PunycodeException.OVERFLOW");
}
$delta = $delta + ($m - $n) * ($h + 1);
$n = $m;
for($j=0; $j<$input_length; $j++)
{
$chr = self::mb_substr($input,$j,1);
$c = self::uniord( $chr );
if ($c < $n)
{
$delta++;
if (0==$delta)
{
throw new Exception("PunycodeException.OVERFLOW");
}
}
if ($c == $n)
{
$q = $delta;
for($k= self::BASE;; $k+=self::BASE)
{
$t=0;
if ($k <= $bias)
{
$t= self::TMIN;
} else if ($k >= $bias + self::TMAX) {
$t= self::TMAX;
} else {
$t = $k - $bias;
}
if ($q < $t)
{
break;
}
$output.= chr( self::digit2codepoint($t + ($q - $t) % (self::BASE - $t)) );
$q = floor( ($q-$t) / (self::BASE - $t) );//integer division
}
$output.= chr( self::digit2codepoint($q) );
$bias = self::adapt($delta, $h+1, $h==$b);
$delta=0;
$h++;
}
}
$delta++;
$n++;
}
}
catch (Exception $e)
{
error_log("[PUNYCODE] error ".$e->getMessage());
return $input;
}
return $output;
}
protected static function decode($input)
{
try
{
$n = self::INITIAL_N;
$i = 0;
$bias = self::INITIAL_BIAS;
$output = '';
$d = self::rstrpos($input, self::DELIMITER);
if ($d>0) {
for($j=0; $j<$d; $j++) {
$chr = self::mb_substr($input,$j,1);
$c = self::uniord( $chr );
if ($c>=self::INITIAL_N) {
throw new Exception("PunycodeException.BAD_INPUT");
}
$output.=$chr;
}
$d++;
} else {
$d = 0;
}
$input_length = self::mb_strlen($input);
while ($d < $input_length) {
$oldi = $i;
$w = 1;
for($k= self::BASE;; $k += self::BASE) {
if ($d == $input_length) {
throw new Exception("PunycodeException.BAD_INPUT");
}
$chr = self::mb_substr($input,$d++,1);
$c = self::uniord( $chr );
$digit = self::codepoint2digit($c);
if ($digit > (PHP_INT_MAX - $i) / $w) {
throw new Exception("PunycodeException.OVERFLOW");
}
$i = $i + $digit * $w;
$t=0;
if ($k <= $bias) {
$t = self::TMIN;
} else if ($k >= $bias + self::TMAX) {
$t = self::TMAX;
} else {
$t = $k - $bias;
}
if ($digit < $t) {
break;
}
$w = $w * (self::BASE - $t);
}
$output_length = self::mb_strlen($output);
$bias = self::adapt($i - $oldi, $output_length + 1, $oldi == 0);
if ($i / ($output_length + 1) > PHP_INT_MAX - $n) {
throw new Exception("PunycodeException.OVERFLOW");
}
$n = floor($n + $i / ($output_length + 1));
$i = $i % ($output_length + 1);
$output = self::mb_strinsert($output, self::utf8($n), $i);
$i++;
}
}
catch(Exception $e)
{
error_log("[PUNYCODE] error ".$e->getMessage());
return $input;
}
return $output;
}
//adapt patched from:
//https://github.com/takezoh/php-PunycodeEncoder/blob/master/punycode.php
protected static function adapt($delta, $numpoints, $firsttime)
{
$delta = (int)($firsttime ? $delta / self::DAMP : $delta / 2);
$delta += (int)($delta / $numpoints);
$k = 0;
while ($delta > (((self::BASE - self::TMIN) * self::TMAX) / 2)) {
$delta = (int)($delta / (self::BASE - self::TMIN));
$k += self::BASE;
}
return $k + (int)((self::BASE - self::TMIN + 1) * $delta / ($delta + self::SKEW));
}
protected static function digit2codepoint($d)
{
if ($d < 26) {
// 0..25 : 'a'..'z'
return $d + ord('a');
} else if ($d < 36) {
// 26..35 : '0'..'9';
return $d - 26 + ord('0');
} else {
throw new Exception("PunycodeException.BAD_INPUT");
}
}
protected static function codepoint2digit($c)
{
if ($c - ord('0') < 10) {
// '0'..'9' : 26..35
return $c - ord('0') + 26;
} else if ($c - ord('a') < 26) {
// 'a'..'z' : 0..25
return $c - ord('a');
} else {
throw new Exception("PunycodeException.BAD_INPUT");
}
}
protected static function rstrpos($haystack, $needle)
{
$pos = strpos (strrev($haystack), $needle);
if ($pos === false)
return false;
return strlen ($haystack)-1 - $pos;
}
protected static function mb_strinsert($haystack, $needle, $position)
{
$old_encoding = mb_internal_encoding();
mb_internal_encoding("UTF-8");
$r = mb_substr($haystack,0,$position).$needle.mb_substr($haystack,$position);
mb_internal_encoding($old_encoding);
return $r;
}
protected static function mb_substr($str,$start,$length)
{
$old_encoding = mb_internal_encoding();
mb_internal_encoding("UTF-8");
$r = mb_substr($str,$start,$length);
mb_internal_encoding($old_encoding);
return $r;
}
protected static function mb_strlen($str)
{
$old_encoding = mb_internal_encoding();
mb_internal_encoding("UTF-8");
$r = mb_strlen($str);
mb_internal_encoding($old_encoding);
return $r;
}
protected static function mb_strtolower($str)
{
$old_encoding = mb_internal_encoding();
mb_internal_encoding("UTF-8");
$r = mb_strtolower($str);
mb_internal_encoding($old_encoding);
return $r;
}
public static function uniord($c)//cousin of ord() but for unicode
{
$ord0 = ord($c[0]); if ($ord0>=0 && $ord0<=127) return $ord0;
$ord1 = ord($c[1]); if ($ord0>=192 && $ord0<=223) return ($ord0-192)*64 + ($ord1-128);
if ($ord0==0xed && ($ord1 & 0xa0) == 0xa0) return false; //code points, 0xd800 to 0xdfff
$ord2 = ord($c[2]); if ($ord0>=224 && $ord0<=239) return ($ord0-224)*4096 + ($ord1-128)*64 + ($ord2-128);
$ord3 = ord($c[3]); if ($ord0>=240 && $ord0<=247) return ($ord0-240)*262144 + ($ord1-128)*4096 + ($ord2-128)*64 + ($ord3-128);
return false;
}
public static function utf8($num)//cousin of ascii() but for utf8
{
if($num<=0x7F) return chr($num);
if($num<=0x7FF) return chr(($num>>6)+192).chr(($num&63)+128);
if(0xd800<=$num && $num<=0xdfff) return '';//invalid block of utf8
if($num<=0xFFFF) return chr(($num>>12)+224).chr((($num>>6)&63)+128).chr(($num&63)+128);
if($num<=0x10FFFF) return chr(($num>>18)+240).chr((($num>>12)&63)+128).chr((($num>>6)&63)+128).chr(($num&63)+128);
return '';
}
public static function is_valid_utf8($string)
{
for ($i=0, $ix=strlen($string); $i < $ix; $i++)
{
$c = ord($string[$i]);
if ($c==0x09 || $c==0x0a || $c==0x0d || (0x20 <= $c && $c < 0x7e) ) $n = 0; # 0bbbbbbb
else if (($c & 0xE0) == 0xC0) $n=1; # 110bbbbb
else if ($c==0xed && (ord($string[$i+1]) & 0xa0)==0xa0) return false; //code points, 0xd800 to 0xdfff
else if (($c & 0xF0) == 0xE0) $n=2; # 1110bbbb
else if (($c & 0xF8) == 0xF0) $n=3; # 11110bbb
//else if (($c & 0xFC) == 0xF8) $n=4; # 111110bb //byte 5, unnecessary in 4 byte UTF-8
//else if (($c & 0xFE) == 0xFC) $n=5; # 1111110b //byte 6, unnecessary in 4 byte UTF-8
else return false;
for ($j=0; $j<$n; $j++) { // n bytes matching 10bbbbbb follow ?
if ((++$i == $ix) || ((ord($string[$i]) & 0xC0) != 0x80))
return false;
}
}
return true;
}
}

View File

@ -1,192 +0,0 @@
# https://github.com/gnh1201/caterpillar
require 'socket'
require 'json'
require 'openssl'
require 'base64'
require 'timeout'
DEFAULT_SOCKET_TIMEOUT = 1
STATEFUL_SOCKET_TIMEOUT = 30
def jsonrpc2_encode(method, params, id = '')
{
"jsonrpc" => "2.0",
"method" => method,
"params" => params,
"id" => id
}.to_json
end
def jsonrpc2_result_encode(result, id = '')
{
"jsonrpc" => "2.0",
"result" => result,
"id" => id
}.to_json
end
def jsonrpc2_error_encode(error, id = '')
{
"jsonrpc" => "2.0",
"error" => error,
"id" => id
}.to_json
end
def parse_headers(str)
headers = {}
lines = str.split(/\r?\n/)
first_line = lines.shift.split(' ')
headers['@method'] = first_line
lines.each do |line|
if match = line.match(/^([^:]+):(.*)$/)
headers[match[1]] = match[2].strip
end
end
headers
end
def read_from_remote_server(remote_address, remote_port, scheme, data = nil, conn = nil, buffer_size = 8192, id = '')
if ["https", "ssl", "tls"].include?(scheme)
ssl_context = OpenSSL::SSL::SSLContext.new
sock = OpenSSL::SSL::SSLSocket.new(TCPSocket.open(remote_address, remote_port), ssl_context)
sock.connect
else
sock = TCPSocket.open(remote_address, remote_port)
end
if sock.nil?
error = {
"status" => 502,
"code" => error_code,
"message" => error_message
}
if conn.nil?
puts jsonrpc2_error_encode(error, id)
else
buf = "HTTP/1.1 502 Bad Gateway\r\n\r\n"
buf += jsonrpc2_error_encode(error, id)
conn.write(buf)
end
else
if conn.nil?
sock.write(data) unless data.nil?
buf = nil
while buf != false && !sock.eof?
buf = sock.gets(buffer_size)
puts buf
end
else
buf = nil
while buf != false && !conn.eof?
buf = conn.gets(buffer_size)
sock.write(buf)
end
buf = nil
while buf != false && !sock.eof?
buf = sock.gets(buffer_size)
conn.write(buf)
end
end
sock.close
end
end
def relay_request(params, id = '')
buffer_size = params['buffer_size']
request_data = Base64.decode64(params['request_data'])
request_header = parse_headers(request_data)
request_length = params['request_length'].to_i
client_address = params['client_address']
client_port = params['client_port'].to_i
client_encoding = params['client_encoding']
remote_address = params['remote_address']
remote_port = params['remote_port'].to_i
scheme = params['scheme']
datetime = params['datetime'] # format: %Y-%m-%d %H:%M:%S.%f
begin
Timeout.timeout(DEFAULT_SOCKET_TIMEOUT) do
if ["https", "ssl", "tls"].include?(scheme)
ssl_context = OpenSSL::SSL::SSLContext.new
sock = OpenSSL::SSL::SSLSocket.new(TCPSocket.open(remote_address, remote_port), ssl_context)
sock.connect
else
sock = TCPSocket.open(remote_address, remote_port)
end
end
rescue Timeout::Error
error = {
"status" => 504,
"message" => "Gateway Timeout"
}
puts jsonrpc2_error_encode(error, id)
return
end
case request_header['@method'][0]
when "CONNECT"
error = {
"status" => 405,
"code" => -1,
"message" => "Method Not Allowed"
}
puts jsonrpc2_error_encode(error, id)
else
read_from_remote_server(remote_address, remote_port, scheme, request_data, nil, buffer_size, id)
end
end
def relay_connect(params, id = '')
buffer_size = params['buffer_size']
client_address = params['client_address']
client_port = params['client_port'].to_i
client_encoding = params['client_encoding']
remote_address = params['remote_address']
remote_port = params['remote_port'].to_i
scheme = params['scheme']
datetime = params['datetime'] # format: %Y-%m-%d %H:%M:%S.%f
starttime = Time.now.to_f
begin
Timeout.timeout(STATEFUL_SOCKET_TIMEOUT) do
conn = TCPSocket.open(client_address, client_port)
end
rescue Timeout::Error
error = {
"status" => 504,
"message" => "Gateway Timeout"
}
puts jsonrpc2_error_encode(error, id)
return
end
stoptime = Time.now.to_f
connection_speed = ((stoptime - starttime) * 1000).to_i
data = jsonrpc2_encode("relay_accept", {
"success" => true,
"connection_speed" => connection_speed
}, id)
conn.write(data + "\r\n\r\n")
read_from_remote_server(remote_address, remote_port, scheme, nil, conn, buffer_size, id)
conn.close
end
context = JSON.parse(STDIN.read)
if context['jsonrpc'] == "2.0"
method = context['method']
case method
when "relay_request"
relay_request(context['params'], context['id'])
when "relay_connect"
relay_connect(context['params'], context['id'])
end
end

237
base.py
View File

@ -3,62 +3,135 @@
# base.py
# base (common) file
#
# Caterpillar Proxy - The simple and parasitic web proxy SPAM spam filter
# Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
# Euiseo Cha (Wonkwang University) <zeroday0619_dev@outlook.com>
# https://github.com/gnh1201/caterpillar
# Created at: 2024-05-20
# Updated at: 2024-05-21
# Updated at: 2024-11-14
#
import logging
import hashlib
import json
import os
import re
import importlib
import subprocess
import platform
from abc import ABC, abstractmethod
from datetime import datetime, timezone
from typing import Union, List
client_encoding = "utf-8"
client_encoding = 'utf-8'
def extract_credentials(url):
pattern = re.compile(r'(?P<scheme>\w+://)?(?P<username>[^:/]+):(?P<password>[^@]+)@(?P<url>.+)')
pattern = re.compile(
r"(?P<scheme>\w+://)?(?P<username>[^:/]+):(?P<password>[^@]+)@(?P<url>.+)"
)
match = pattern.match(url)
if match:
scheme = match.group('scheme') if match.group('scheme') else 'https://'
username = match.group('username')
password = match.group('password')
url = match.group('url')
scheme = match.group("scheme") if match.group("scheme") else "https://"
username = match.group("username")
password = match.group("password")
url = match.group("url")
return username, password, scheme + url
else:
return None, None, url
def jsonrpc2_create_id(data):
return hashlib.sha1(json.dumps(data).encode(client_encoding)).hexdigest()
def jsonrpc2_encode(method, params = None):
data = {
"jsonrpc": "2.0",
"method": method,
"params": params
}
def jsonrpc2_encode(method, params=None):
data = {"jsonrpc": "2.0", "method": method, "params": params}
id = jsonrpc2_create_id(data)
data['id'] = id
data["id"] = id
return (id, json.dumps(data))
def jsonrpc2_result_encode(result, id = ''):
data = {
"jsonrpc": "2.0",
"result": result,
"id": id
}
def jsonrpc2_decode(text):
data = json.loads(text)
type = "error" if "error" in data else "result" if "result" in data else None
id = data.get("id")
rpcdata = data.get(type) if type else None
return type, id, rpcdata
def jsonrpc2_result_encode(result, id=""):
data = {"jsonrpc": "2.0", "result": result, "id": id}
return json.dumps(data)
def jsonrpc2_error_encode(error, id = ''):
data = {
"jsonrpc": "2.0",
"error": error,
"id": id
}
def jsonrpc2_error_encode(error, id=""):
data = {"jsonrpc": "2.0", "error": error, "id": id}
return json.dumps(data)
class Extension():
extensions = []
def find_openssl_binpath():
system = platform.system()
if system == "Windows":
possible_paths = [
os.path.join(
os.getenv("ProgramFiles", "C:\\Program Files"),
"OpenSSL-Win64",
"bin",
"openssl.exe",
),
os.path.join(
os.getenv("ProgramFiles", "C:\\Program Files"),
"OpenSSL-Win32",
"bin",
"openssl.exe",
),
os.path.join(
os.getenv("ProgramFiles(x86)", "C:\\Program Files (x86)"),
"OpenSSL-Win32",
"bin",
"openssl.exe",
),
os.path.join(
os.getenv("ProgramW6432", "C:\\Program Files"),
"OpenSSL-Win64",
"bin",
"openssl.exe",
),
os.path.join(
os.getenv("ProgramW6432", "C:\\Program Files"),
"OpenSSL-Win32",
"bin",
"openssl.exe",
),
]
for path in possible_paths:
if os.path.exists(path):
return path
else:
try:
result = subprocess.run(
["which", "openssl"], stdout=subprocess.PIPE, stderr=subprocess.PIPE
)
path = result.stdout.decode().strip()
if path:
return path
except Exception:
pass
return "openssl"
class ExtensionType:
def __init__(self):
self.type: str = None
self.method: str = None
self.exported_methods: list[str] = []
self.connection_type: str = None
class Extension:
extensions: list[ExtensionType] = []
protocols = []
buffer_size = 8192
@ -71,8 +144,16 @@ class Extension():
cls.buffer_size = _buffer_size
@classmethod
def register(cls, f):
cls.extensions.append(f)
def register(cls, s):
module_name, class_name = s.strip().split(".")[0:2]
module_path = "plugins." + module_name
try:
module = importlib.import_module(module_path)
_class = getattr(module, class_name)
cls.extensions.append(_class())
except (ImportError, AttributeError):
raise ImportError(class_name + " in the extension " + module_name)
@classmethod
def get_filters(cls):
@ -85,7 +166,13 @@ class Extension():
@classmethod
def get_rpcmethod(cls, method):
for extension in cls.extensions:
is_exported_method = (method == extension.method) or (method in extension.exported_methods)
is_exported_method = False
try:
is_exported_method = (method == extension.method) or (
method in extension.exported_methods
)
except:
pass
if extension.type == "rpcmethod" and is_exported_method:
return extension
return None
@ -104,24 +191,41 @@ class Extension():
@classmethod
def get_connector(cls, connection_type):
for extension in cls.extensions:
if extension.type == "connector" and extension.connection_type == connection_type:
if (
extension.type == "connector"
and extension.connection_type == connection_type
):
return extension
return None
@classmethod
def send_accept(cls, conn, method, success = True):
if 'tcp' in cls.protocols:
_, message = jsonrpc2_encode(f"{method}_accept", {
"success": success
})
def test_connectors(cls, data):
def test(preludes, data):
for prelude in preludes:
if data.find(prelude) == 0:
return True
return False
for extension in cls.extensions:
if (
extension.type == "connector"
and test(extension.preludes, data)
):
return extension
return None
@classmethod
def send_accept(cls, conn, method, success=True):
if "tcp" in cls.protocols:
_, message = jsonrpc2_encode(f"{method}_accept", {"success": success})
conn.send(message.encode(client_encoding))
print (f"Accepted request with {cls.protocols[0]} protocol")
print(f"Accepted request with {cls.protocols[0]} protocol")
@classmethod
def readall(cls, conn):
if 'tcp' in cls.protocols:
data = b''
if "tcp" in cls.protocols:
data = b""
while True:
try:
chunk = conn.recv(cls.buffer_size)
@ -132,16 +236,16 @@ class Extension():
pass
return data
elif 'http' in cls.protocols:
elif "http" in cls.protocols:
# empty binary when an file not exists
if 'file' not in conn.request.files:
return b''
if "file" not in conn.request.files:
return b""
# read an uploaded file with binary mode
file = conn.request.files['file']
file = conn.request.files["file"]
return file.read()
def __init__(self):
self.type = None
self.method = None
@ -151,8 +255,43 @@ class Extension():
def test(self, filtered, data, webserver, port, scheme, method, url):
raise NotImplementedError
def dispatch(self, type, id, params, method = None, conn = None):
def dispatch(self, type, id, params, method=None, conn=None):
raise NotImplementedError
def connect(self, conn, data, webserver, port, scheme, method, url):
raise NotImplementedError
class Logger(logging.Logger):
def __init__(self, name: str, level: int = logging.NOTSET):
super().__init__(name, level)
self.formatter = logging.Formatter(
"[%(asctime)s] %(levelname)s %(module)s: %(message)s"
)
if not os.path.isdir("logs"):
os.mkdir("logs")
stream_handler = logging.StreamHandler()
file_handler = logging.FileHandler(
"logs/" + name + "-" + self._generate_timestamp() + ".log"
)
self._set_formatters([stream_handler, file_handler])
self._add_handlers([stream_handler, file_handler])
@staticmethod
def _generate_timestamp():
date = datetime.now(tz=timezone.utc).strftime("%Y-%m-%d")
return date
def _set_formatters(
self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]
):
for handler in handlers:
handler.setFormatter(self.formatter)
def _add_handlers(
self, handlers: List[Union[logging.StreamHandler, logging.FileHandler]]
):
for handler in handlers:
self.addHandler(handler)

View File

@ -1,4 +0,0 @@
#!/bin/sh
openssl genrsa -out ca.key 2048
openssl req -new -x509 -days 3650 -key ca.key -out ca.crt -subj "/CN=php-httpproxy CA"
openssl genrsa -out cert.key 2048

View File

@ -1,50 +1,80 @@
<!doctype html>
<html>
<head>
<title>Caterpillar Proxy Web Console</title>
<title>Caterpillar Proxy Console</title>
<meta http-equiv="Content-Type" content="text/html; charset=utf-8">
<meta http-equiv="Content-Security-Policy" content="upgrade-insecure-requests">
<link href="https://cdnjs.cloudflare.com/ajax/libs/jquery.terminal/2.42.0/css/jquery.terminal.min.css" rel="stylesheet"/>
<!--<meta http-equiv="Content-Security-Policy" content="upgrade-insecure-requests">-->
<meta name="referrer" content="unsafe-url">
<link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/jquery.terminal/2.44.1/css/jquery.terminal.min.css">
<link rel="stylesheet" href="https://unpkg.com/leaflet@1.9.4/dist/leaflet.css">
<style type="text/css">/*<!--<![CDATA[*/
body {
html, body, main {
width: 100%;
height: 100%;
padding: 0;
margin: 0;
}
#content {
float: right;
width: 80%;
height: 100%;
scroll: hidden;
}
#cover {
float: left;
width: 20%;
height: 100%;
scroll: hidden;
background: #2e8d36 url(https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/bg.jpg) no-repeat;
background-size: cover;
background-position: center;
}
h1, p {
color: #093923;
#cover article {
margin: 30px;
}
p a {
color: #fff;
padding: 0 2px;
text-decoration: none;
border-bottom: 2px solid #fff;
}
main {
width: 640px;
margin: 0 auto;
}
.terminal, .cmd {
background: #093923;
#console {
height: 100%;
}
/*]]>-->*/</style>
</head>
<body>
<main>
<h1>Caterpillar Proxy Web Console</h1>
<p>Download an worker script of <a href="https://github.com/gnh1201/caterpillar">Caterpillar Proxy</a>.</p>
<div id="console"></div>
<p><a href="https://github.com/gnh1201/caterpillar">Fork me. gnh1201/caterpillar (GitHub)</a></p>
<section id="content">
<div id="console"></div>
<div id="map"></div>
<div id="embed"></div>
</section>
<section id="cover">
<article>
<h1>Caterpillar Proxy Console</h1>
<p>Source code available</p>
<p><a href="https://github.com/gnh1201/caterpillar">gnh1201/caterpillar (GitHub)</a></p>
<p><a href="https://github.com/gnh1201/caterpillar-plugins">gnh1201/caterpillar-plugins (GitHub)</a></p>
</article>
</section>
</main>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.7.1/jquery.min.js"></script>
<script type="text/javascript" src="https://cdnjs.cloudflare.com/ajax/libs/jquery.terminal/2.42.0/js/jquery.terminal.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery/3.7.1/jquery.min.js"></script>
<script src="https://cdnjs.cloudflare.com/ajax/libs/jquery.terminal/2.44.1/js/jquery.terminal.min.js"></script>
<script src="https://unpkg.com/leaflet@1.9.4/dist/leaflet.js"></script>
<script type="text/javascript">//<!--<![CDATA[
var env = {
"target": "http://localhost/",
"target": "https://azure-ashlan-40.tiiny.io/",
"method": "",
"filename": null
};
var set_default_env = function(_env) {
for (k in _env) {
if (!(k in env)) {
env[k] = _env[k];
}
}
};
var pretty_jsonify = function(data) {
return JSON.stringify(data, null, 4);
};
@ -56,7 +86,27 @@
document.body.appendChild(element);
element.click();
document.body.removeChild(element);
}
};
var show_embed = function(term, url) {
term.echo('', {
finalize: function($div) {
var $embed = $("#embed");
$embed.html($("<iframe/>").attr({
"title": "embed web page",
"src": url,
"allow": "accelerometer; autoplay; clipboard-write; encrypted-media; gyroscope; picture-in-picture; web-share",
"referrerpolicy": "unsafe-url",
"allowfullscreen": true
}).css({
"width": "100%",
"height": "240px",
"border": "none"
}));
$div.children().last().append($embed);
term.echo();
}
});
};
var jsonrpc2_request = function(term, method, params) {
var requestData = {
jsonrpc: "2.0",
@ -84,7 +134,12 @@
// for dirty response (e.g., magic header, advertise logo)
try {
var start = s.indexOf('{');
var end = s.lastIndexOf('}');
var end = [s.indexOf("}\r\n\r\n"), s.lastIndexOf('}')].reduce(function(a, x) {
if (x > 0 && a > x) {
a = x; // set new value if x greater than 0 and x less than previous value
}
return a;
}, s.length);
if (start > -1 && end > -1 && end > start) {
responseData = JSON.parse(s.substring(start, end + 1));
} else {
@ -118,12 +173,71 @@
text = responseData.result.data;
}
}
term.echo(text);
// save as a file
if (env.filename != null) {
download_text(env.filename, text);
}
// method(relay_get_geolocation)
if (env.method == "relay_get_geolocation") {
term.echo(text);
term.echo('', {
finalize: function($div) {
var geodata = responseData.result.data;
var $map = $("#map").css({
"height": "240px"
});
$div.children().last().append($map);
map.setView([geodata.lat, geodata.lon], 13);
var circle = L.circle([geodata.lat, geodata.lon], {
color: 'red',
fillColor: '#f03',
fillOpacity: 0.5,
radius: 500
}).addTo(map);
term.echo();
}
});
return;
}
// method(relay_web_search)
if (env.method == "relay_web_search") {
var searchdata = responseData.result.data;
if ("error" in searchdata) {
term.echo(searchdata.error.message);
term.echo('');
return;
}
var results = Object.values(searchdata);
if (results.length > 0) {
results.forEach(function(x) {
if (typeof x !== "object") return;
if ("special_response" in x) {
term.echo("< " + x.special_response.response);
term.echo("< " + x.special_response.source);
term.echo('');
} else {
var base_domain = (function(s) {
return s.split("/")[2];
})(x.base_url);
term.echo("< [[!;;;;" + x.url + ";{}]" + x.title.trim() + " (" + base_domain + ")]: " + x.description.trim());
}
});
} else {
term.echo("No any results");
}
term.echo('');
return;
}
// print a response
term.echo(text);
},
error: function(xhr, status, error) {
term.echo(error);
@ -133,32 +247,68 @@
jQuery(function($, undefined) {
$('#console').terminal({
set: function(k, v) {
set: function(...args) {
var k = (args.length > 0 ? args[0] : '');
var v = (args.length > 1 ? args.slice(1) : []).join(' ');
// "env" is the reserved word
if (k == "env") {
this.echo("env is the reserved word");
return;
}
// check a variable is it Array
if (k in env && env[k] instanceof Array) {
env[k].push(v);
return;
}
// method(relay_web_search)
if (env.method == "relay_web_search" && k == "page") {
env[k] = parseInt(v);
return;
}
env[k] = v || null;
if (k == "method") {
this.set_prompt('method([[b;red;black]' + env.method + '])> ');
// method(relay_invoke_method)
if (env.method == "relay_invoke_method") {
set_default_env({
"requires": []
});
}
// method(relay_sendmail)
if (env.method == "relay_sendmail") {
set_default_env({
"mail_to": "noreply@example.org",
"mail_from": "noreply@example.org",
"mail_subject": "Important Message from System Administrator"
});
}
// method(relay_mysql_query)
if (env.method == "relay_mysql_query") {
var _env = {
set_default_env({
"mysql_hostname": "localhost",
"mysql_username": "root",
"mysql_password": null,
"mysql_database": null,
"mysql_port": "3306",
"mysql_charset": "utf8"
};
for (k in _env) {
if (!(k in env)) {
env[k] = _env[k];
}
}
});
}
// method(relay_web_search)
if (env.method == "relay_web_search") {
set_default_env({
"keyword": "",
"page": 1
});
}
}
},
@ -188,6 +338,7 @@
jsonrpc2_request(this, env.method, {
"callback": args[0],
"requires": env.requires,
"args": args.slice(1)
});
return;
@ -203,7 +354,6 @@
jsonrpc2_request(this, env.method, {
"hostname": args[0]
});
return;
}
@ -217,7 +367,20 @@
jsonrpc2_request(this, env.method, {
"url": args[0]
});
return;
}
// method(relay_sendmail)
if (env.method == "relay_sendmail") {
this.echo("From: " + env.mail_from + "\r\nTo: " + env.mail_to + "\r\nSubject: " + env.mail_subject);
this.read("Enter your message:\r\n", function(message) {
jsonrpc2_request(this, env.method, {
"to": env.mail_to,
"from": env.mail_from,
"subject": env.mail_subject,
"message": message
});
});
return;
}
@ -243,17 +406,115 @@
}
return;
}
// method(analyze_sequence)
if (env.method == "analyze_sequence") {
var _this = this;
this.read("Enter the sequence:\r\n", function(message) {
jsonrpc2_request(_this, env.method, {
"sequence": message
});
});
return;
}
// method(gc_content_calculation)
if (env.method == "gc_content_calculation") {
var _this = this;
this.read("Enter the sequence:\r\n", function(message) {
jsonrpc2_request(_this, env.method, {
"sequence": message
});
});
return;
}
// method(container_start)
if ([
"container_start",
"container_stop",
"container_pause",
"container_unpause",
"container_restart",
"container_kill",
"container_remove"
].indexOf(env.method) > -1) {
if (args.length < 1) {
this.echo("Please set a container name");
return;
}
jsonrpc2_request(this, env.method, {
"name": args[0]
});
return;
}
// method(relay_web_search)
if (env.method == "relay_web_search") {
jsonrpc2_request(this, env.method, {
"keyword": env.keyword,
"page": env.page,
"type": "text"
});
return;
}
// method(*)
jsonrpc2_request(this, env.method, {});
}
},
show_embed: function(url) {
show_embed(this, url);
},
youtube: function(...args) {
if (args.length < 1) {
this.echo("Please let me know what do you want to do.");
}
var action = args[0];
switch (action) {
case "play":
if (args.length < 2) {
this.echo("Please let me know the video ID");
}
var video_id = args[1];
show_embed(this, "https://www.youtube.com/embed/" + video_id);
break;
}
},
search: function(...args) {
this.exec("set method relay_web_search");
this.exec("set page 1");
this.exec("set keyword " + args.join(' '));
this.exec("do");
},
next: function() {
if (env.method == "relay_web_search") {
var num = parseInt(env.page) + 1;
this.exec("set page " + num);
this.exec("do");
}
},
prev: function() {
if (env.method == "relay_web_search") {
var num = (env.page > 1 ? env.page - 1 : 1);
this.exec("set page " + num);
this.exec("do");
}
},
}, {
height: 480,
width: 640,
height: "100%",
width: "100%",
prompt: '> ',
checkArity: false
});
});
var map = L.map('map');
L.tileLayer('https://tile.openstreetmap.org/{z}/{x}/{y}.png', {
maxZoom: 19,
attribution: '&copy; <a href="http://www.openstreetmap.org/copyright">OpenStreetMap</a>'
}).addTo(map);
//]]>--></script>
</body>
</html>

9
download_certs.bat Normal file
View File

@ -0,0 +1,9 @@
@echo off
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt %CD%\ca.crt
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key %CD%\ca.key
bitsadmin /transfer certsjob /download /priority normal https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key %CD%\cert.key
REM echo if you want generate a certificate...
REM openssl genrsa -out ca.key 2048
REM openssl req -new -x509 -days 3650 -key ca.key -out ca.crt -subj "/CN=php-httpproxy CA"
REM openssl genrsa -out cert.key 2048

9
download_certs.sh Executable file
View File

@ -0,0 +1,9 @@
#!/bin/sh
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.crt
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/ca.key
wget https://pub-1a7a176eea68479cb5423e44273657ad.r2.dev/cert.key
# echo "if you want generate a certificate..."
#openssl genrsa -out ca.key 2048
#openssl req -new -x509 -days 3650 -key ca.key -out ca.crt -subj "/CN=php-httpproxy CA"
#openssl genrsa -out cert.key 2048

1
plugins Submodule

@ -0,0 +1 @@
Subproject commit 59833335c31a120feb99481be1606bd0dfecc9f4

View File

@ -1,57 +0,0 @@
#!/usr/bin/python3
#
# container.py
# Linux Container (e.g. Docker) plugin for Caterpillar Proxy
#
# Caterpillar Proxy - The simple and parasitic web proxy with SPAM filter
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
# https://github.com/gnh1201/caterpillar
# Created at: 2024-03-04
# Updated at: 2024-03-13
#
import docker
from server import Extension
class Container(Extension):
def __init__(self):
self.type = "rpcmethod"
self.method = "container_init"
self.exported_methods = ["container_run", "container_stop"]
# docker
self.client = docker.from_env()
def dispatch(self, type, id, params, conn):
print ("[*] Greeting! dispatch")
conn.send(b'Greeting! dispatch')
def container_run(self, type, id, params, conn):
devices = params['devices']
image = params['image']
devices = params['devices']
name = params['name']
environment = params['environment']
volumes = params['volumes']
container = client.containers.run(
image,
devices=devices,
name=name,
volumes=volumes,
environment=environment,
detach=True
)
container.logs()
print ("[*] Running...")
def container_stop(self, type, id, params, conn):
name = params['name']
container = client.containers.get(name)
container.stop()
print ("[*] Stopped")

View File

@ -1,274 +0,0 @@
#!/usr/bin/python3
#
# fediverse.py
# Fediverse (Mastodon, Misskey, Pleroma, ...) SPAM filter plugin for Caterpillar Proxy
#
# Caterpillar Proxy - The simple and parasitic web proxy with SPAM filter (formerly, php-httpproxy)
# Namyheon Go (Catswords Research) <abuse@catswords.net>
# https://github.com/gnh1201/caterpillar
#
# Created in: 2022-10-06
# Updated in: 2024-06-05
#
import io
import re
import requests
import os.path
from decouple import config
from PIL import Image
from server import Extension
try:
client_encoding = config('CLIENT_ENCODING', default='utf-8')
truecaptcha_userid = config('TRUECAPTCHA_USERID') # truecaptcha.org
truecaptcha_apikey = config('TRUECAPTCHA_APIKEY') # truecaptcha.org
dictionary_file = config('DICTIONARY_FILE', default='words_alpha.txt') # https://github.com/dwyl/english-words
librey_apiurl = config('LIBREY_APIURL', default='https://search.catswords.net') # https://github.com/Ahwxorg/librey
except Exception as e:
print ("[*] Invaild configration: %s" % (str(e)))
class Fediverse(Extension):
def __init__(self):
self.type = "filter" # this is a filter
# Load data to use KnownWords4 strategy
# Download data: https://github.com/dwyl/english-words
self.known_words = []
if dictionary_file != '' and os.path.isfile(dictionary_file):
with open(dictionary_file, "r") as file:
words = file.readlines()
self.known_words = [word.strip() for word in words if len(word.strip()) > 3]
print ("[*] Data loaded to use KnownWords4 strategy")
def test(self, filtered, data, webserver, port, scheme, method, url):
# prevent cache confusing
if data.find(b'<title>Welcome to nginx!</title>') > -1:
return True
# allowed conditions
if method == b'GET' or url.find(b'/api') > -1:
return False
# convert to text
data_length = len(data)
text = data.decode(client_encoding, errors='ignore')
error_rate = (data_length - len(text)) / data_length
if error_rate > 0.2: # it is a binary data
return False
# check ID with K-Anonymity strategy
pattern = r'\b(?:(?<=\/@)|(?<=acct:))([a-zA-Z0-9]{10})\b'
matches = list(set(re.findall(pattern, text)))
if len(matches) > 0:
print ("[*] Found ID: %s" % (', '.join(matches)))
try:
filtered = not all(map(self.pwnedpasswords_test, matches))
except Exception as e:
print ("[*] K-Anonymity strategy not working! %s" % (str(e)))
filtered = True
# feedback
if filtered and len(matches) > 0:
score = 0
strategies = []
# check ID with VowelRatio10 strategy
def vowel_ratio_test(s):
ratio = self.calculate_vowel_ratio(s)
return ratio > 0.2 and ratio < 0.8
if all(map(vowel_ratio_test, matches)):
score += 1
strategies.append('VowelRatio10')
# check ID with Palindrome4 strategy
if all(map(self.has_palindrome, matches)):
score += 1
strategies.append('Palindrome4')
# check ID with KnownWords4 strategy
if all(map(self.has_known_word, matches)):
score += 2
strategies.append('KnownWords4')
# check ID with SearchEngine3 strategy
if librey_apiurl != '' and all(map(self.search_engine_test, matches)):
score += 1
strategies.append('SearchEngine3')
# check ID with RepeatedNumbers3 strategy
if all(map(self.repeated_numbers_test, matches)):
score += 1
strategies.append('RepeatedNumbers3')
# logging score
with open('score.log', 'a') as file:
file.write("%s\t%s\t%s\r\n" % ('+'.join(matches), str(score), '+'.join(strategies)))
# make decision
if score > 1:
filtered = False
# check an attached images (check images with Not-CAPTCHA strategy)
if truecaptcha_userid != '' and not filtered and len(matches) > 0:
def webp_to_png_base64(url):
try:
response = requests.get(url)
img = Image.open(io.BytesIO(response.content))
img_png = img.convert("RGBA")
buffered = io.BytesIO()
img_png.save(buffered, format="PNG")
encoded_image = base64.b64encode(buffered.getvalue()).decode(client_encoding)
return encoded_image
except:
return None
urls = re.findall(r'https://[^\s"]+\.webp', text)
if len(urls) > 0:
for url in urls:
if filtered:
break
print ("[*] downloading... %s" % (url))
encoded_image = webp_to_png_base64(url)
print ("[*] downloaded.")
if encoded_image:
print ("[*] solving...")
try:
solved = truecaptcha_solve(encoded_image)
if solved:
print ("[*] solved: %s" % (solved))
filtered = filtered or (solved.lower() in ['ctkpaarr', 'spam'])
else:
print ("[*] not solved")
except Exception as e:
print ("[*] Not CAPTCHA strategy not working! %s" % (str(e)))
return filtered
# Strategy: K-Anonymity test - use api.pwnedpasswords.com
def pwnedpasswords_test(self, s):
# convert to lowercase
s = s.lower()
# SHA1 of the password
p_sha1 = hashlib.sha1(s.encode()).hexdigest()
# First 5 char of SHA1 for k-anonymity API use
f5_sha1 = p_sha1[:5]
# Last 5 char of SHA1 to match API output
l5_sha1 = p_sha1[-5:]
# Making GET request using Requests library
response = requests.get(f'https://api.pwnedpasswords.com/range/{f5_sha1}')
# Checking if request was successful
if response.status_code == 200:
# Parsing response text
hashes = response.text.split('\r\n')
# Using list comprehension to find matching hashes
matching_hashes = [line.split(':')[0] for line in hashes if line.endswith(l5_sha1)]
# If there are matching hashes, return True, else return False
return bool(matching_hashes)
else:
raise Exception("api.pwnedpasswords.com response status: %s" % (str(response.status_code)))
return False
# Strategy: Not-CAPTCHA - use truecaptcha.org
def truecaptcha_solve(self, encoded_image):
url = 'https://api.apitruecaptcha.org/one/gettext'
data = {
'userid': truecaptcha_userid,
'apikey': truecaptcha_apikey,
'data': encoded_image,
'mode': 'human'
}
response = requests.post(url = url, json = data)
if response.status_code == 200:
data = response.json()
if 'error_message' in data:
print ("[*] Error: %s" % (data['error_message']))
return None
if 'result' in data:
return data['result']
else:
raise Exception("api.apitruecaptcha.org response status: %s" % (str(response.status_code)))
return None
# Strategy: VowelRatio10
def calculate_vowel_ratio(self, s):
# Calculate the length of the string.
length = len(s)
if length == 0:
return 0.0
# Count the number of vowels ('a', 'e', 'i', 'o', 'u', 'w', 'y') in the string.
vowel_count = sum(1 for char in s if char.lower() in 'aeiouwy')
# Define vowel-ending patterns
vowel_ending_patterns = ['ang', 'eng', 'ing', 'ong', 'ung', 'ank', 'ink', 'dge']
# Count the occurrences of vowel-ending patterns in the string.
vowel_count += sum(s.count(pattern) for pattern in vowel_ending_patterns)
# Calculate the ratio of vowels to the total length of the string.
vowel_ratio = vowel_count / length
return vowel_ratio
# Strategy: Palindrome4
def has_palindrome(self, input_string):
def is_palindrome(s):
return s == s[::-1]
input_string = input_string.lower()
n = len(input_string)
for i in range(n):
for j in range(i + 4, n + 1): # Find substrings of at least 5 characters
substring = input_string[i:j]
if is_palindrome(substring):
return True
return False
# Strategy: KnownWords4
def has_known_word(self, input_string):
def is_known_word(s):
return s in self.known_words
input_string = input_string.lower()
n = len(input_string)
for i in range(n):
for j in range(i + 4, n + 1): # Find substrings of at least 5 characters
substring = input_string[i:j]
if is_known_word(substring):
return True
return False
# Strategy: SearchEngine3
def search_engine_test(self, s):
url = "%s/api.php?q=%s" % (librey_apiurl, s)
response = requests.get(url, verify=False)
if response.status_code != 200:
return False
data = response.json()
if 'results_source' in data:
del data['results_source']
num_results = len(data)
return num_results > 2
# Strategy: RepeatedNumbers3
def repeated_numbers_test(self, s):
return bool(re.search(r'\d{3,}', s))

View File

@ -1,100 +0,0 @@
#!/usr/bin/python3
#
# wayback.py
# Cached previous page (e.g. Wayback Machine) integration plugin for Caterpillar Proxy
#
# Caterpillar Proxy - The simple and parasitic web proxy with SPAM filter
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
# https://github.com/gnh1201/caterpillar
# Created at: 2024-03-13
# Updated at: 2024-03-13
#
import requests
from server import Extension
try:
client_encoding = config('CLIENT_ENCODING')
except Exception as e:
print ("[*] Invaild configration: %s" % (str(e)))
def get_cached_page_from_google(url):
status_code, text = (0, '')
# Google Cache URL
google_cache_url = "https://webcache.googleusercontent.com/search?q=cache:" + url
# Send a GET request to Google Cache URL
response = requests.get(google_cache_url)
# Check if the request was successful (status code 200)
if response.status_code == 200:
text = response.text # Extract content from response
else:
status_code = response.status_code
return status_code, text
# API documentation: https://archive.org/help/wayback_api.php
def get_cached_page_from_wayback(url):
status_code, text = (0, '')
# Wayback Machine API URL
wayback_api_url = "http://archive.org/wayback/available?url=" + url
# Send a GET request to Wayback Machine API
response = requests.get(wayback_api_url)
# Check if the request was successful (status code 200)
if response.status_code == 200:
try:
# Parse JSON response
data = response.json()
archived_snapshots = data.get("archived_snapshots", {})
closest_snapshot = archived_snapshots.get("closest", {})
# Check if the URL is available in the archive
if closest_snapshot:
archived_url = closest_snapshot.get("url", "")
# If URL is available, fetch the content of the archived page
if archived_url:
archived_page_response = requests.get(archived_url)
status_code = archived_page_response.status_code;
if status_code == 200:
text = archived_page_response.text
else:
status_code = 404
else:
status_code = 404
except:
status_code = 502
else:
status_code = response.status_code
return status_code, text
class Wayback(Extension):
def __init__(self):
self.type = "connector" # this is a connctor
self.connection_type = "wayback"
def connect(self, conn, data, webserver, port, scheme, method, url):
connected = False
target_url = url.decode(client_encoding)
if not connected:
status_code, text = get_cached_page_from_google(target_url)
if status_code == 200:
conn.send(text.encode(client_encoding))
connected = True
if not connected:
status_code, text = get_cached_page_from_wayback(target_url)
if status_code == 200:
conn.send(text.encode(client_encoding))
connected = True
return connected

View File

@ -1,2 +1,6 @@
python-decouple
requests
aiosmtpd
ruff
flask
flask_cors

69
ruff.toml Normal file
View File

@ -0,0 +1,69 @@
exclude = [
".bzr",
".direnv",
".eggs",
".git",
".git-rewrite",
".hg",
".ipynb_checkpoints",
".mypy_cache",
".nox",
".pants.d",
".pyenv",
".pytest_cache",
".pytype",
".ruff_cache",
".svn",
".tox",
".venv",
".vscode",
"__pypackages__",
"_build",
"buck-out",
"build",
"dist",
"node_modules",
"site-packages",
"venv",
"assets",
"data"
]
target-version = "py310"
[lint]
# Enable Pyflakes (`F`) and a subset of the pycodestyle (`E`) codes by default.
# Unlike Flake8, Ruff doesn't enable pycodestyle warnings (`W`) or
# McCabe complexity (`C901`) by default.
select = ["E4", "E7", "E9", "F"]
ignore = ["E501"]
# Allow fix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
[format]
# Like Black, use double quotes for strings.
quote-style = "double"
# Like Black, indent with spaces, rather than tabs.
indent-style = "space"
# Like Black, respect magic trailing commas.
skip-magic-trailing-comma = false
# Like Black, automatically detect the appropriate line ending.
line-ending = "auto"
# Enable auto-formatting of code examples in docstrings. Markdown,
# reStructuredText code/literal blocks and doctests are all supported.
#
# This is currently disabled by default, but it is planned for this
# to be opt-out in the future.
docstring-code-format = false
# Set the line length limit used when formatting code snippets in
# docstrings.
#
# This only has an effect when the `docstring-code-format` setting is
# enabled.
docstring-code-line-length = "dynamic"

584
server.py
View File

@ -7,57 +7,72 @@
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
# https://github.com/gnh1201/caterpillar
# Created at: 2022-10-06
# Updated at: 2024-06-20
# Updated at: 2025-02-17
#
import argparse
import socket
import sys
import os
import re
from _thread import *
from subprocess import PIPE, Popen
import base64
import json
import ssl
import time
import hashlib
import traceback
import textwrap
import importlib
from datetime import datetime
from platform import python_version
import logging
import re
import requests
from requests.auth import HTTPBasicAuth
from urllib.parse import urlparse
from decouple import config
from base import Extension, extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode
from base import (
Extension,
extract_credentials,
jsonrpc2_encode,
find_openssl_binpath,
Logger,
)
# initalization
logger = Logger(name="server", level=logging.DEBUG)
# initialization
try:
listening_port = config('PORT', default=5555, cast=int)
_username, _password, server_url = extract_credentials(config('SERVER_URL', default=''))
server_connection_type = config('SERVER_CONNECTION_TYPE', default='')
cakey = config('CA_KEY', default='ca.key')
cacert = config('CA_CERT', default='ca.crt')
certkey = config('CERT_KEY', default='cert.key')
certdir = config('CERT_DIR', default='certs/')
openssl_binpath = config('OPENSSL_BINPATH', default='openssl')
client_encoding = config('CLIENT_ENCODING', default='utf-8')
local_domain = config('LOCAL_DOMAIN', default='')
proxy_pass = config('PROXY_PASS', default='')
listening_port = config("PORT", default=5555, cast=int)
_username, _password, server_url = extract_credentials(
config("SERVER_URL", default="")
)
connection_timeout = config("CONNECTION_TIMEOUT", default=5, cast=int)
server_connection_type = config("SERVER_CONNECTION_TYPE", default="proxy")
ca_key = config("CA_KEY", default="ca.key")
ca_cert = config("CA_CERT", default="ca.crt")
cert_key = config("CERT_KEY", default="cert.key")
cert_dir = config("CERT_DIR", default="certs/")
openssl_bin_path = config("OPENSSL_BINPATH", default=find_openssl_binpath())
client_encoding = config("CLIENT_ENCODING", default="utf-8")
local_domain = config("LOCAL_DOMAIN", default="")
proxy_pass = config("PROXY_PASS", default="")
use_extensions = config("USE_EXTENSIONS", default="")
except KeyboardInterrupt:
print("\n[*] User has requested an interrupt")
print("[*] Application Exiting.....")
logger.warning("[*] User has requested an interrupt")
logger.warning("[*] Application Exiting.....")
sys.exit()
except Exception as e:
print("[*] Failed to initialize:", str(e))
logger.error("[*] Failed to initialize:", exc_info=e)
parser = argparse.ArgumentParser()
parser.add_argument('--max_conn', help="Maximum allowed connections", default=255, type=int)
parser.add_argument('--buffer_size', help="Number of samples to be used", default=8192, type=int)
parser.add_argument(
"--max_conn", help="Maximum allowed connections", default=255, type=int
)
parser.add_argument(
"--buffer_size", help="Number of samples to be used", default=8192, type=int
)
args = parser.parse_args()
max_connection = args.max_conn
@ -67,169 +82,260 @@ resolved_address_list = []
# set environment of Extension
Extension.set_buffer_size(buffer_size)
Extension.set_protocol('tcp')
Extension.set_protocol("tcp")
# set basic authentication
auth = None
if _username:
auth = HTTPBasicAuth(_username, _password)
def parse_first_data(data):
parsed_data = (b'', b'', b'', b'', b'')
def parse_first_data(data: bytes):
parsed_data = (b"", b"", b"", b"", b"")
try:
first_line = data.split(b'\n')[0]
first_line = data.split(b"\n")[0]
method, url = first_line.split()[0:2]
http_pos = url.find(b'://') #Finding the position of ://
scheme = b'http' # check http/https or other protocol
http_pos = url.find(b"://") # Finding the position of ://
scheme = b"http" # check http/https or other protocol
if http_pos == -1:
temp = url
else:
temp = url[(http_pos+3):]
temp = url[(http_pos + 3) :]
scheme = url[0:http_pos]
port_pos = temp.find(b':')
port_pos = temp.find(b":")
webserver_pos = temp.find(b'/')
webserver_pos = temp.find(b"/")
if webserver_pos == -1:
webserver_pos = len(temp)
webserver = b''
webserver = b""
port = -1
if port_pos == -1 or webserver_pos < port_pos:
port = 80
webserver = temp[:webserver_pos]
else:
port = int((temp[(port_pos+1):])[:webserver_pos-port_pos-1])
port = int((temp[(port_pos + 1) :])[: webserver_pos - port_pos - 1])
webserver = temp[:port_pos]
if port == 443:
scheme = b'https'
scheme = b"https"
parsed_data = (webserver, port, scheme, method, url)
except Exception as e:
print("[*] Exception on parsing the header. Cause: %s" % (str(e)))
logger.error("[*] Exception on parsing the header", exc_info=e)
return parsed_data
def conn_string(conn, data, addr):
def conn_string(conn: socket.socket, data: bytes, addr: bytes):
# JSON-RPC 2.0 request
def process_jsonrpc2(data):
jsondata = json.loads(data.decode(client_encoding, errors='ignore'))
if jsondata['jsonrpc'] == "2.0":
jsonrpc2_server(conn, jsondata['id'], jsondata['method'], jsondata['params'])
def process_jsonrpc2(_data: bytes):
json_data = json.loads(_data.decode(client_encoding, errors="ignore"))
if json_data["jsonrpc"] == "2.0":
jsonrpc2_server(
conn, json_data["id"], json_data["method"], json_data["params"]
)
return True
return False
# debugging
logger.debug("@ " + ("%s:%s" % addr))
logger.debug("> " + data.hex(' '))
# JSON-RPC 2.0 request over Socket (stateful)
if data.find(b'{') == 0 and process_jsonrpc2(data):
if data.find(b"{") == 0 and process_jsonrpc2(data):
# will be close by the client
return
# Check a preludes in connectors
connector = Extension.test_connectors(data)
if connector:
logger.info("[*] Connecting...")
connector.connect(conn, data, b'', b'', b'', b'', b'')
return
# parse first data (header)
webserver, port, scheme, method, url = parse_first_data(data)
# JSON-RPC 2.0 request over HTTP (stateless)
path = urlparse(url.decode(client_encoding)).path
if path == "/proxy-cgi/jsonrpc2":
conn.send(b'HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n')
pos = data.find(b'\r\n\r\n')
if pos > -1 and process_jsonrpc2(data[pos+4:]):
conn.close() # will be close by the server
conn.send(b"HTTP/1.1 200 OK\r\nContent-Type: application/json\r\n\r\n")
pos = data.find(b"\r\n\r\n")
if pos > -1 and process_jsonrpc2(data[pos + 4 :]):
conn.close() # will be close by the server
return
# if it is reverse proxy
if local_domain != '':
localserver = local_domain.encode(client_encoding)
if webserver == localserver or data.find(b'\nHost: ' + localserver) > -1:
print ("[*] Detected the reverse proxy request: %s" % (local_domain))
scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b':')
local_domains = list(filter(None, map(str.strip, local_domain.split(','))))
for domain in local_domains:
localserver = domain.encode(client_encoding)
# Resolve a cache mismatch issue when making requests to a local domain.
header_end = data.find(b"\r\n\r\n")
header_section_data = data[:header_end] if header_end > -1 else b''
header_host_pattern = re.compile(rb"\n\s*host\s*:\s*" + re.escape(localserver), re.IGNORECASE)
if webserver == localserver or header_host_pattern.search(header_section_data):
logger.info("[*] Reverse proxy requested: %s" % local_domain)
scheme, _webserver, _port = proxy_pass.encode(client_encoding).split(b":")
webserver = _webserver[2:]
port = int(_port.decode(client_encoding))
method = b"CONNECT" if scheme == b"https" else method # proxy pass on HTTPS
break
proxy_server(webserver, port, scheme, method, url, conn, addr, data)
def jsonrpc2_server(conn, id, method, params):
def jsonrpc2_server(
conn: socket.socket, _id: str, method: str, params: dict[str, str | int]
):
if method == "relay_accept":
accepted_relay[id] = conn
connection_speed = params['connection_speed']
print ("[*] connection speed: %s miliseconds" % (str(connection_speed)))
accepted_relay[_id] = conn
connection_speed = params["connection_speed"]
logger.info("[*] connection speed: %s milliseconds" % str(connection_speed))
while conn.fileno() > -1:
time.sleep(1)
del accepted_relay[id]
print ("[*] relay destroyed: %s" % (id))
del accepted_relay[_id]
logger.info("[*] relay destroyed: %s" % _id)
else:
Extension.dispatch_rpcmethod(method, "call", id, params, conn)
Extension.dispatch_rpcmethod(method, "call", _id, params, conn)
#return in conn_string()
# return in conn_string()
def proxy_connect(webserver, conn):
def proxy_connect(webserver: bytes, conn: socket.socket):
hostname = webserver.decode(client_encoding)
certpath = "%s/%s.crt" % (certdir.rstrip('/'), hostname)
cert_path = "%s/%s.crt" % (cert_dir.rstrip("/"), hostname)
if not os.path.exists(cert_dir):
os.makedirs(cert_dir)
# https://stackoverflow.com/questions/24055036/handle-https-request-in-proxy-server-by-c-sharp-connect-tunnel
conn.send(b'HTTP/1.1 200 Connection Established\r\n\r\n')
conn.send(b"HTTP/1.1 200 Connection Established\r\n\r\n")
# https://github.com/inaz2/proxy2/blob/master/proxy2.py
try:
if not os.path.isfile(certpath):
if not os.path.isfile(cert_path):
epoch = "%d" % (time.time() * 1000)
p1 = Popen([openssl_binpath, "req", "-new", "-key", certkey, "-subj", "/CN=%s" % hostname], stdout=PIPE)
p2 = Popen([openssl_binpath, "x509", "-req", "-days", "3650", "-CA", cacert, "-CAkey", cakey, "-set_serial", epoch, "-out", certpath], stdin=p1.stdout, stderr=PIPE)
p1 = Popen(
[
openssl_bin_path,
"req",
"-new",
"-key",
cert_key,
"-subj",
"/CN=%s" % hostname,
],
stdout=PIPE,
)
p2 = Popen(
[
openssl_bin_path,
"x509",
"-req",
"-days",
"3650",
"-CA",
ca_cert,
"-CAkey",
ca_key,
"-set_serial",
epoch,
"-out",
cert_path,
],
stdin=p1.stdout,
stderr=PIPE,
)
p2.communicate()
except FileNotFoundError as e:
logger.error(
"[*] OpenSSL distribution not found on this system. Skipping certificate issuance.",
exc_info=e,
)
cert_path = "default.crt"
except Exception as e:
print("[*] Skipped generating the certificate. Cause: %s" % (str(e)))
logger.error("[*] Skipping certificate issuance.", exc_info=e)
cert_path = "default.crt"
logger.info("[*] Certificate file: %s" % cert_path)
logger.info("[*] Private key file: %s" % cert_key)
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
# https://docs.python.org/3/library/ssl.html
context = ssl.SSLContext(ssl.PROTOCOL_TLS_SERVER)
context.load_cert_chain(certpath, certkey)
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
context.load_cert_chain(certfile=cert_path, keyfile=cert_key)
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
conn = context.wrap_socket(conn, server_side=True)
data = conn.recv(buffer_size)
try:
# https://stackoverflow.com/questions/11255530/python-simple-ssl-socket-server
conn = context.wrap_socket(conn, server_side=True)
data = conn.recv(buffer_size)
except ssl.SSLError as e:
logger.error(
"[*] SSL negotiation failed.",
exc_info=e,
)
return conn, b""
return (conn, data)
return conn, data
def proxy_check_filtered(data, webserver, port, scheme, method, url):
def proxy_check_filtered(
data: bytes, webserver: bytes, port: bytes, scheme: bytes, method: bytes, url: bytes
):
filtered = False
filters = Extension.get_filters()
print ("[*] Checking data with %s filters..." % (str(len(filters))))
logger.info("[*] Checking data with %s filters..." % (str(len(filters))))
for f in filters:
filtered = f.test(filtered, data, webserver, port, scheme, method, url)
return filtered
def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
def proxy_server(
webserver: bytes,
port: bytes,
scheme: bytes,
method: bytes,
url: bytes,
conn: socket.socket,
addr: bytes,
data: bytes,
):
try:
print("[*] Started the request. %s" % (str(addr[0])))
logger.info("[*] Started the request. %s" % (str(addr[0])))
# SSL negotiation
is_ssl = scheme in [b'https', b'tls', b'ssl']
if is_ssl and method == b'CONNECT':
is_ssl = scheme in [b"https", b"tls", b"ssl"]
if is_ssl and method == b"CONNECT":
while True:
try:
conn, data = proxy_connect(webserver, conn)
break # success
#except OSError as e:
break # success
# except OSError as e:
# print ("[*] Retrying SSL negotiation... (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
except Exception as e:
raise Exception("SSL negotiation failed. (%s:%s) %s" % (webserver.decode(client_encoding), str(port), str(e)))
raise Exception(
"SSL negotiation failed. (%s:%s) %s"
% (webserver.decode(client_encoding), str(port), str(e))
)
# override data
if is_ssl:
_, _, _, method, url = parse_first_data(data)
# https://stackoverflow.com/questions/44343739/python-sockets-ssl-eof-occurred-in-violation-of-protocol
def sock_close(sock, is_ssl = False):
#if is_ssl:
# sock = sock.unwrap()
#sock.shutdown(socket.SHUT_RDWR)
sock.close()
def sock_close(_sock: socket.socket):
_sock.close()
# Wait to see if there is more data to transmit
def sendall(sock, conn, data):
def sendall(_sock: socket.socket, _conn: socket.socket, _data: bytes):
# send first chuck
if proxy_check_filtered(data, webserver, port, scheme, method, url):
sock.close()
@ -239,25 +345,27 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
return
# send following chunks
buffered = b''
conn.settimeout(1)
buffered = b""
conn.settimeout(connection_timeout)
while True:
try:
chunk = conn.recv(buffer_size)
if not chunk:
break
buffered += chunk
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
sock_close(sock, is_ssl)
if proxy_check_filtered(
buffered, webserver, port, scheme, method, url
):
sock_close(sock)
raise Exception("Filtered request")
sock.send(chunk)
if len(buffered) > buffer_size*2:
buffered = buffered[-buffer_size*2:]
if len(buffered) > buffer_size * 2:
buffered = buffered[-buffer_size * 2 :]
except:
break
# localhost mode
if server_url == "localhost":
if server_url == "localhost" and server_connection_type == "proxy":
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if is_ssl:
@ -265,159 +373,226 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
context.check_hostname = False
context.verify_mode = ssl.CERT_NONE
sock = context.wrap_socket(sock, server_hostname=webserver.decode(client_encoding))
sock = context.wrap_socket(
sock, server_hostname=webserver.decode(client_encoding)
)
sock.connect((webserver, port))
#sock.sendall(data)
# sock.sendall(data)
sendall(sock, conn, data)
else:
sock.connect((webserver, port))
#sock.sendall(data)
# sock.sendall(data)
sendall(sock, conn, data)
i = 0
is_http_403 = False
buffered = b''
_buffered = b""
while True:
chunk = sock.recv(buffer_size)
if not chunk:
break
if i == 0 and chunk.find(b'HTTP/1.1 403') == 0:
if i == 0 and chunk.find(b"HTTP/1.1 403") == 0:
is_http_403 = True
break
buffered += chunk
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
sock_close(sock, is_ssl)
add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
_buffered += chunk
if proxy_check_filtered(
_buffered, webserver, port, scheme, method, url
):
sock_close(sock)
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1")
raise Exception("Filtered response")
conn.send(chunk)
if len(buffered) > buffer_size*2:
buffered = buffered[-buffer_size*2:]
if len(_buffered) > buffer_size * 2:
_buffered = _buffered[-buffer_size * 2 :]
i += 1
# when blocked
if is_http_403:
print ("[*] Blocked the request by remote server: %s" % (webserver.decode(client_encoding)))
logger.warning(
"[*] Blocked the request by remote server: %s"
% webserver.decode(client_encoding)
)
def bypass_callback(response, *args, **kwargs):
def bypass_callback(response: requests.Response):
if response.status_code != 200:
conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}')
return
# https://stackoverflow.com/questions/20658572/python-requests-print-entire-http-request-raw
format_headers = lambda d: '\r\n'.join(f'{k}: {v}' for k, v in d.items())
format_headers = lambda d: "\r\n".join(
f"{k}: {v}" for k, v in d.items()
)
first_data = textwrap.dedent('HTTP/1.1 {res.status_code} {res.reason}\r\n{reshdrs}\r\n\r\n').format(
res=response,
reshdrs=format_headers(response.headers),
).encode(client_encoding)
first_data = (
textwrap.dedent(
"HTTP/1.1 {res.status_code} {res.reason}\r\n{reshdrs}\r\n\r\n"
)
.format(
res=response,
reshdrs=format_headers(response.headers),
)
.encode(client_encoding)
)
conn.send(first_data)
for chunk in response.iter_content(chunk_size=buffer_size):
conn.send(chunk)
if is_ssl and method == b'GET':
print ("[*] Trying to bypass blocked request...")
remote_url = "%s://%s%s" % (scheme.decode(client_encoding), webserver.decode(client_encoding), url.decode(client_encoding))
requests.get(remote_url, stream=True, verify=False, hooks={'response': bypass_callback})
if is_ssl and method == b"GET":
logger.info("[*] Trying to bypass blocked request...")
remote_url = "%s://%s%s" % (
scheme.decode(client_encoding),
webserver.decode(client_encoding),
url.decode(client_encoding),
)
requests.get(
remote_url,
stream=True,
verify=False,
hooks={"response": bypass_callback},
)
else:
conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}')
sock_close(sock, is_ssl)
sock_close(sock)
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
logger.info(
"[*] Received %s chunks. (%s bytes per chunk)"
% (str(i), str(buffer_size))
)
# stateful mode
elif server_connection_type == "stateful":
client_address = str(addr[0])
proxy_data = {
'headers': {
"User-Agent": "php-httpproxy/0.1.5 (Client; Python " + python_version() + "; abuse@catswords.net)",
"headers": {
"User-Agent": "php-httpproxy/0.1.5 (Client; Python "
+ python_version()
+ "; abuse@catswords.net)",
},
'data': {
"data": {
"buffer_size": str(buffer_size),
"client_address": str(addr[0]),
"client_address": client_address,
"client_port": str(listening_port),
"client_encoding": client_encoding,
"remote_address": webserver.decode(client_encoding),
"remote_port": str(port),
"scheme": scheme.decode(client_encoding),
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
}
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"),
},
}
# get client address
print ("[*] resolving the client address...")
logger.info("[*] Resolving the client address...")
while len(resolved_address_list) == 0:
try:
_, query_data = jsonrpc2_encode('get_client_address')
query = requests.post(server_url, headers=proxy_data['headers'], data=query_data, timeout=1, auth=auth)
_, query_data = jsonrpc2_encode("get_client_address")
query = requests.post(
server_url,
headers=proxy_data["headers"],
data=query_data,
timeout=1,
auth=auth,
)
if query.status_code == 200:
result = query.json()['result']
resolved_address_list.append(result['data'])
print ("[*] resolved IP: %s" % (result['data']))
except requests.exceptions.ReadTimeout as e:
pass
proxy_data['data']['client_address'] = resolved_address_list[0]
result = query.json()["result"]
if isinstance(result["data"], str):
client_address = result["data"]
resolved_address_list.append(client_address)
elif isinstance(result["data"], list):
client_address = result["data"][0]
resolved_address_list.append(client_address)
else:
logger.warn("[*] Failed to resolve a client address. Retrying...")
else:
logger.warn("[*] Failed to resolve a client address. Retrying...")
except requests.exceptions.ReadTimeout:
logger.warn("[*] Failed to resolve a client address. Retrying...")
# update the client address
logger.info("[*] Use the client address: %s" % (client_address))
proxy_data["data"]["client_address"] = client_address
# build a tunnel
def relay_connect(id, raw_data, proxy_data):
try:
# The tunnel connect forever until the client destroy it
relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, timeout=None, auth=auth)
relay = requests.post(
server_url,
headers=proxy_data["headers"],
data=raw_data,
stream=True,
timeout=None,
auth=auth,
)
for chunk in relay.iter_content(chunk_size=buffer_size):
jsondata = json.loads(chunk.decode(client_encoding, errors='ignore'))
if jsondata['jsonrpc'] == "2.0" and ("error" in jsondata):
e = jsondata['error']
print ("[*] Error received from the relay server: (%s) %s" % (str(e['code']), str(e['message'])))
jsondata = json.loads(
chunk.decode(client_encoding, errors="ignore")
)
if jsondata["jsonrpc"] == "2.0" and ("error" in jsondata):
e = jsondata["error"]
logger.error(
"[*] Error received from the relay server: (%s) %s"
% (str(e["code"]), str(e["message"]))
)
except requests.exceptions.ReadTimeout as e:
pass
id, raw_data = jsonrpc2_encode('relay_connect', proxy_data['data'])
id, raw_data = jsonrpc2_encode("relay_connect", proxy_data["data"])
start_new_thread(relay_connect, (id, raw_data, proxy_data))
# wait for the relay
print ("[*] waiting for the relay... %s" % (id))
logger.info("[*] waiting for the relay... %s" % id)
max_reties = 30
t = 0
while t < max_reties and not id in accepted_relay:
while t < max_reties and id not in accepted_relay:
time.sleep(1)
t += 1
if t < max_reties:
sock = accepted_relay[id]
print ("[*] connected the relay. %s" % (id))
logger.info("[*] connected the relay. %s" % id)
sendall(sock, conn, data)
else:
resolved_address_list.remove(resolved_address_list[0])
print ("[*] the relay is gone. %s" % (id))
sock_close(sock, is_ssl)
logger.info("[*] the relay is gone. %s" % id)
sock_close(sock)
return
# get response
i = 0
buffered = b''
buffered = b""
while True:
chunk = sock.recv(buffer_size)
if not chunk:
_chunk = sock.recv(buffer_size)
if not _chunk:
break
buffered += chunk
buffered += _chunk
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
sock_close(sock, is_ssl)
add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
sock_close(sock)
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1")
raise Exception("Filtered response")
conn.send(chunk)
if len(buffered) > buffer_size*2:
buffered = buffered[-buffer_size*2:]
conn.send(_chunk)
if len(buffered) > buffer_size * 2:
buffered = buffered[-buffer_size * 2 :]
i += 1
sock_close(sock, is_ssl)
sock_close(sock)
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
logger.info(
"[*] Received %s chunks. (%s bytes per chunk)"
% (str(i), str(buffer_size))
)
# stateless mode
elif server_connection_type == "stateless":
proxy_data = {
'headers': {
"User-Agent": "php-httpproxy/0.1.5 (Client; Python " + python_version() + "; abuse@catswords.net)",
"headers": {
"User-Agent": "php-httpproxy/0.1.5 (Client; Python "
+ python_version()
+ "; abuse@catswords.net)",
},
'data': {
"data": {
"buffer_size": str(buffer_size),
"request_data": base64.b64encode(data).decode(client_encoding),
"request_length": str(len(data)),
@ -427,81 +602,110 @@ def proxy_server(webserver, port, scheme, method, url, conn, addr, data):
"remote_address": webserver.decode(client_encoding),
"remote_port": str(port),
"scheme": scheme.decode(client_encoding),
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f")
}
"datetime": datetime.now().strftime("%Y-%m-%d %H:%M:%S.%f"),
},
}
_, raw_data = jsonrpc2_encode('relay_request', proxy_data['data'])
_, raw_data = jsonrpc2_encode("relay_request", proxy_data["data"])
print("[*] Sending %s bytes..." % (str(len(raw_data))))
logger.info("[*] Sending %s bytes..." % (str(len(raw_data))))
i = 0
relay = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, stream=True, auth=auth)
buffered = b''
relay = requests.post(
server_url,
headers=proxy_data["headers"],
data=raw_data,
stream=True,
auth=auth,
)
buffered = b""
for chunk in relay.iter_content(chunk_size=buffer_size):
buffered += chunk
if proxy_check_filtered(buffered, webserver, port, scheme, method, url):
add_filtered_host(webserver.decode(client_encoding), '127.0.0.1')
add_filtered_host(webserver.decode(client_encoding), "127.0.0.1")
raise Exception("Filtered response")
conn.send(chunk)
if len(buffered) > buffer_size*2:
buffered = buffered[-buffer_size*2:]
if len(buffered) > buffer_size * 2:
buffered = buffered[-buffer_size * 2 :]
i += 1
print("[*] Received %s chunks. (%s bytes per chunk)" % (str(i), str(buffer_size)))
logger.info(
"[*] Received %s chunks. (%s bytes per chunk)"
% (str(i), str(buffer_size))
)
# nothing at all
else:
connector = Extension.get_connector(server_connection_type)
if connector:
logger.info("[*] Connecting...")
connector.connect(conn, data, webserver, port, scheme, method, url)
else:
raise Exception("Unsupported connection type")
raise Exception("[*] The request from " + ("%s:%s" % addr) + " is ignored due to an undefined connector type.")
print("[*] Request and received. Done. %s" % (str(addr[0])))
logger.info("[*] Request and received. Done. %s" % (str(addr[0])))
conn.close()
except Exception as e:
print(traceback.format_exc())
print("[*] Exception on requesting the data. Cause: %s" % (str(e)))
conn.sendall(b"HTTP/1.1 403 Forbidden\r\n\r\n{\"status\":403}")
logger.warning("[*] Ignored the request.", exc_info=e)
conn.sendall(b'HTTP/1.1 403 Forbidden\r\n\r\n{"status":403}')
conn.close()
# journaling a filtered hosts
def add_filtered_host(domain, ip_address):
hosts_path = './filtered.hosts'
with open(hosts_path, 'r') as file:
def add_filtered_host(domain: str, ip_address: str):
hosts_path = "./filtered.hosts"
with open(hosts_path, "r") as file:
lines = file.readlines()
domain_exists = any(domain in line for line in lines)
if not domain_exists:
lines.append(f"{ip_address}\t{domain}\n")
with open(hosts_path, 'w') as file:
with open(hosts_path, "w") as file:
file.writelines(lines)
def start(): #Main Program
def start(): # Main Program
try:
sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(('', listening_port))
sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
sock.bind(("", listening_port))
sock.listen(max_connection)
print("[*] Server started successfully [ %d ]" %(listening_port))
logger.warning("[*] Server started successfully [ %d ]" % listening_port)
except Exception as e:
print("[*] Unable to Initialize Socket:", str(e))
logger.error("[*] Unable to Initialize Socket", exc_info=e)
sys.exit(2)
def recv(conn):
conn.settimeout(connection_timeout)
try:
data = conn.recv(buffer_size)
if not data:
data = b''
except socket.timeout:
logger.warning(f"No data received from " + ("%s:%s" % addr) + ". Attempting to request data.")
data = b''
return data
while True:
try:
conn, addr = sock.accept() #Accept connection from client browser
data = conn.recv(buffer_size) #Recieve client data
start_new_thread(conn_string, (conn, data, addr)) #Starting a thread
conn, addr = sock.accept() # Accept connection from client browser
data = recv(conn) # Recieve client data
start_new_thread(conn_string, (conn, data, addr)) # Starting a thread
except KeyboardInterrupt:
sock.close()
print("\n[*] Graceful Shutdown")
logger.info("[*] Graceful Shutdown")
sys.exit(1)
if __name__== "__main__":
# load extensions
#Extension.register(importlib.import_module("plugins.fediverse").Fediverse())
#Extension.register(importlib.import_module("plugins.container").Container())
#Extension.register(importlib.import_module("plugins.wayback").Wayback())
if __name__ == "__main__":
# Fix Value error
if use_extensions:
# load extensions
for s in use_extensions.split(","):
Extension.register(s)
else:
logger.warning("[*] No extensions registered")
# start Caterpillar
start()

129
smtp.py
View File

@ -1,88 +1,113 @@
#!/usr/bin/python3
#
# smtp.py
# SMTP over HTTP gateway
# SMTP mail sender over HTTP/S
#
# Caterpillar Proxy - The simple web debugging proxy (formerly, php-httpproxy)
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
# https://github.com/gnh1201/caterpillar
# Created at: 2024-03-01
# Updated at: 2024-05-20
# Updated at: 2024-07-12
#
import asyncore
from smtpd import SMTPServer
import re
import json
import asyncio
from aiosmtpd.controller import Controller
from email.message import EmailMessage
import sys
import requests
from platform import python_version
from decouple import config
from requests.auth import HTTPBasicAuth
from base import extract_credentials, jsonrpc2_create_id, jsonrpc2_encode, jsonrpc2_result_encode
from base import (
extract_credentials,
jsonrpc2_encode,
Logger, jsonrpc2_decode,
)
logger = Logger(name="smtp")
try:
smtp_host = config('SMTP_HOST', default='127.0.0.1')
smtp_port = config('SMTP_PORT', default=25, cast=int)
_username, _password, server_url = extract_credentials(config('SERVER_URL', default=''))
smtp_host = config("SMTP_HOST", default="127.0.0.1")
smtp_port = config("SMTP_PORT", default=25, cast=int)
_username, _password, server_url = extract_credentials(
config("SERVER_URL", default="")
)
except KeyboardInterrupt:
print("\n[*] User has requested an interrupt")
print("[*] Application Exiting.....")
logger.warning("[*] User has requested an interrupt")
logger.warning("[*] Application Exiting.....")
sys.exit()
auth = None
if _username:
auth = HTTPBasicAuth(_username, _password)
class CaterpillarSMTPServer(SMTPServer):
def __init__(self, localaddr, remoteaddr):
self.__class__.smtpd_hostname = "CaterpillarSMTPServer"
self.__class__.smtp_version = "0.1.6"
super().__init__(localaddr, remoteaddr)
def process_message(self, peer, mailfrom, rcpttos, data, **kwargs):
message_lines = data.decode('utf-8').split('\n')
subject = ''
to = ''
for line in message_lines:
pos = line.find(':')
if pos > -1:
k = line[0:pos]
v = line[pos+1:]
if k == 'Subject':
subject = v
elif k == 'To':
to = v
class CaterpillarSMTPHandler:
def __init__(self):
self.smtpd_hostname = "CaterpillarSMTPServer"
self.smtp_version = "0.1.6"
async def handle_DATA(self, server, session, envelope):
mail_from = envelope.mail_from
rcpt_tos = envelope.rcpt_tos
data = envelope.content
message = EmailMessage()
message.set_content(data)
subject = message.get("Subject", "")
to = message.get("To", "")
# build a data
proxy_data = {
'headers': {
"User-Agent": "php-httpproxy/0.1.6 (Client; Python " + python_version() + "; Caterpillar; abuse@catswords.net)",
"headers": {
"User-Agent": "php-httpproxy/0.1.6 (Client; Python "
+ python_version()
+ "; Caterpillar; abuse@catswords.net)",
},
'data': {
"data": {
"to": to,
"from": mailfrom,
"from": mail_from,
"subject": subject,
"message": data.decode('utf-8')
}
"message": data.decode("utf-8"),
},
}
_, raw_data = jsonrpc2_encode('relay_sendmail', proxy_data['data'])
_, raw_data = jsonrpc2_encode("relay_sendmail", proxy_data["data"])
# send HTTP POST request
try:
response = requests.post(server_url, headers=proxy_data['headers'], data=raw_data, auth=auth)
response = await asyncio.to_thread(
requests.post,
server_url,
headers=proxy_data["headers"],
data=raw_data,
auth=auth,
)
if response.status_code == 200:
type, id, method, rpcdata = jsonrpc2_decode(response.text)
if rpcdata['success']:
print("[*] Email sent successfully.")
_type, _id, rpc_data = jsonrpc2_decode(response.text)
if rpc_data["success"]:
logger.info("[*] Email sent successfully.")
else:
raise Exception("(%s) %s" % (str(rpcdata['code']), rpcdata['message']))
raise Exception(f"({rpc_data['code']}) {rpc_data['message']}")
else:
raise Exception("Status %s" % (str(response.status_code)))
raise Exception(f"Status {response.status_code}")
except Exception as e:
print("[*] Failed to send email:", str(e))
logger.error("[*] Failed to send email", exc_info=e)
return "500 Could not process your message. " + str(e)
# Start SMTP server
smtp_server = CaterpillarSMTPServer((smtp_host, smtp_port), None)
return "250 OK"
# Start asynchronous event loop
asyncore.loop()
# https://aiosmtpd-pepoluan.readthedocs.io/en/latest/migrating.html
def main():
handler = CaterpillarSMTPHandler()
controller = Controller(handler, hostname=smtp_host, port=smtp_port)
# Run the event loop in a separate thread.
controller.start()
# Wait for the user to press Return.
input("SMTP server running. Press Return to stop server and exit.")
controller.stop()
logger.warning("[*] User has requested an interrupt")
logger.warning("[*] Application Exiting.....")
sys.exit()
if __name__ == "__main__":
main()

103
web.py
View File

@ -7,93 +7,108 @@
# Namyheon Go (Catswords Research) <gnh1201@gmail.com>
# https://github.com/gnh1201/caterpillar
# Created at: 2024-05-20
# Updated at: 2024-05-20
# Updated at: 2024-10-25
#
from flask import Flask, request, redirect, url_for, render_template
import os
import sys
import json
import importlib
import hashlib
from decouple import config
from flask import Flask, request, render_template
from flask_cors import CORS
from base import Extension, jsonrpc2_error_encode, Logger
from base import Extension, jsonrpc2_create_id, jsonrpc2_result_encode, jsonrpc2_error_encode
# TODO: 나중에 Flask 커스텀 핸들러 구현 해야 함
logger = Logger(name="web")
app = Flask(__name__)
app.config['UPLOAD_FOLDER'] = 'data/'
CORS(app)
app.config["UPLOAD_FOLDER"] = "data/"
if not os.path.exists(app.config["UPLOAD_FOLDER"]):
os.makedirs(app.config["UPLOAD_FOLDER"])
if not os.path.exists(app.config['UPLOAD_FOLDER']):
os.makedirs(app.config['UPLOAD_FOLDER'])
@app.route('/')
@app.route("/")
def upload_form():
return render_template('upload.html')
return render_template("upload.html")
@app.route('/upload', methods=['POST'])
@app.route("/upload", methods=["POST"])
def process_upload():
# make connection profile from Flask request
conn = Connection(request)
# pass to the method
method = request.form['method']
filename = request.files['file'].filename
params = {
'filename': filename
}
# just do it
return Extension.dispatch_rpcmethod(method, 'call', '', params, conn)
method = request.form["method"]
filename = request.files["file"].filename
params = {"filename": filename}
@app.route('/jsonrpc2', methods=['POST'])
# just do it
return Extension.dispatch_rpcmethod(method, "call", "", params, conn)
@app.route("/jsonrpc2", methods=["POST"])
def process_jsonrpc2():
# make connection profile from Flask request
conn = Connection(request)
# JSON-RPC 2.0 request
jsondata = request.get_json(silent=True)
if jsondata['jsonrpc'] == "2.0":
return Extension.dispatch_rpcmethod(jsondata['method'], 'call', jsondata['id'], jsondata['params'], conn)
json_data = request.get_json(silent=True)
if json_data["jsonrpc"] == "2.0":
result = Extension.dispatch_rpcmethod(
json_data["method"], "call", json_data["id"], json_data["params"], conn)
return {
"jsonrpc": "2.0",
"result": {
"data": result
},
"id": None
}
# when error
return jsonrpc2_error_encode({
'message': "Not vaild JSON-RPC 2.0 request"
})
return jsonrpc2_error_encode({"message": "Not valid JSON-RPC 2.0 request"})
def jsonrpc2_server(conn, id, method, params):
return Extension.dispatch_rpcmethod(method, "call", id, params, conn)
class Connection():
def jsonrpc2_server(conn, _id, method, params):
return Extension.dispatch_rpcmethod(method, "call", _id, params, conn)
class Connection:
def send(self, data):
self.messages.append(data)
def recv(self, size):
print ("Not allowed method")
logger.info("Not allowed method")
def close(self):
print ("Not allowed method")
logger.info("Not allowed method")
def __init__(self, req):
self.messages = []
self.request = req
if __name__ == "__main__":
# initalization
# initialization
try:
listening_port = config('PORT', default=5555, cast=int)
client_encoding = config('CLIENT_ENCODING', default='utf-8')
listening_port = config("PORT", default=5555, cast=int)
client_encoding = config("CLIENT_ENCODING", default="utf-8")
use_extensions = config("USE_EXTENSIONS", default="")
except KeyboardInterrupt:
print("\n[*] User has requested an interrupt")
print("[*] Application Exiting.....")
logger.warning("[*] User has requested an interrupt")
logger.warning("[*] Application Exiting.....")
sys.exit()
except Exception as e:
print("[*] Failed to initialize:", str(e))
logger.error("[*] Failed to initialize", exc_info=e)
# set environment of Extension
Extension.set_protocol('http')
Extension.set_protocol("http")
# load extensions
#Extension.register(importlib.import_module("plugins.yourownplugin").YourOwnPlugin())
# Fix Value error
if use_extensions:
# load extensions
for s in use_extensions.split(","):
Extension.register(s)
else:
logger.warning("[*] No extensions registered")
app.run(debug=True, host='0.0.0.0', port=listening_port)
app.run(debug=True, host="0.0.0.0", port=listening_port)