Skip to content

Commit db8d5b1

Browse files
authored
Merge pull request #8 from GhostTypes/sentinel-fix-ssrf-2155436756076202811
🛡️ Sentinel: [CRITICAL] Fix SSRF in proxy endpoint
2 parents b177828 + b0a709d commit db8d5b1

3 files changed

Lines changed: 130 additions & 7 deletions

File tree

.jules/sentinel.md

Lines changed: 4 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,4 @@
1+
## 2024-05-22 - SSRF in Proxy Endpoint
2+
**Vulnerability:** The `/api/proxy/<path:url>` endpoint accepted and requested any URL provided by the user, including `localhost` and private IP ranges. This allowed potential access to internal network services or the proxy server itself.
3+
**Learning:** Proxy applications are inherently vulnerable to SSRF. Relying on "it's just a local tool" is insufficient as it might be deployed in containers or environments with access to other services.
4+
**Prevention:** Implemented strict URL validation (`is_safe_url`) that resolves the hostname and checks if the IP belongs to private/loopback blocks before making the request. Also added timeouts and generic error messages to prevent DoS and info leakage.

server.py

Lines changed: 48 additions & 7 deletions
Original file line numberDiff line numberDiff line change
@@ -1,7 +1,9 @@
11
import cloudscraper
22
import time
3+
import socket
4+
import ipaddress
35

4-
from urllib.parse import unquote
6+
from urllib.parse import unquote, urlparse
57
from flask import Flask, request, Response
68

79
scraper = cloudscraper.create_scraper(
@@ -41,13 +43,44 @@ def set_origin_and_ref(headers, origin, ref):
4143

4244
def generate_origin_and_ref(url, headers):
4345
data = url.split('/')
44-
first = data[0]
45-
base = data[2]
46-
c_url = f"{first}//{base}/"
47-
headers = set_origin_and_ref(headers, c_url, c_url)
46+
if len(data) > 2:
47+
first = data[0]
48+
base = data[2]
49+
c_url = f"{first}//{base}/"
50+
headers = set_origin_and_ref(headers, c_url, c_url)
4851
return headers
4952

5053

54+
def is_safe_url(url):
55+
"""
56+
Validates URL to prevent SSRF attacks by blocking local/private IP ranges.
57+
"""
58+
try:
59+
parsed = urlparse(url)
60+
if parsed.scheme not in ('http', 'https'):
61+
return False, "Only HTTP/HTTPS protocols are allowed"
62+
63+
hostname = parsed.hostname
64+
if not hostname:
65+
return False, "Invalid hostname"
66+
67+
try:
68+
# Resolve hostname to IP to check against blocklist
69+
ip_str = socket.gethostbyname(hostname)
70+
ip = ipaddress.ip_address(ip_str)
71+
except (socket.gaierror, ValueError):
72+
# If we can't resolve it, it might be safer to block or allow if it's external.
73+
# For security, fail closed if resolution fails.
74+
return False, "Could not resolve hostname or invalid IP"
75+
76+
if ip.is_loopback or ip.is_private or ip.is_reserved or ip.is_multicast or ip.is_unspecified:
77+
return False, "Access to private/local network is forbidden"
78+
79+
return True, None
80+
except Exception:
81+
return False, "Invalid URL format"
82+
83+
5184
app = Flask(__name__)
5285

5386
HOP_BY_HOP_HEADERS = {
@@ -133,11 +166,18 @@ def get_proxy_request_headers(req, url):
133166
def handle_proxy(url):
134167
if request.method == 'GET':
135168
full_url = get_proxy_request_url(request, url) # parse request url
169+
170+
# Sentinel: SSRF Protection
171+
is_safe, error_msg = is_safe_url(full_url)
172+
if not is_safe:
173+
return {'error': error_msg}, 400
174+
136175
headers = get_proxy_request_headers(request, url) # generate headers for the request
137176

138177
try:
139178
start = time.time()
140-
response = scraper.get(full_url, headers=headers)
179+
# Sentinel: Added timeout to prevent hanging
180+
response = scraper.get(full_url, headers=headers, timeout=30)
141181
end = time.time()
142182
elapsed = end - start
143183
print(f"Proxied request for {full_url.split('?')[0]} in {elapsed:.6f} seconds")
@@ -147,7 +187,8 @@ def handle_proxy(url):
147187

148188
except Exception as e:
149189
print(f"Proxy Request Error: {str(e)}")
150-
return {'error': str(e)}, 500
190+
# Sentinel: Don't leak stack traces or internal details
191+
return {'error': "Proxy request failed. Check server logs for details."}, 500
151192

152193

153194
if __name__ == "__main__":

tests/test_ssrf.py

Lines changed: 78 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
import subprocess
2+
import time
3+
import requests
4+
import sys
5+
import os
6+
7+
SERVER_PORT = 5000
8+
BASE_URL = f"http://localhost:{SERVER_PORT}"
9+
10+
def wait_for_server():
11+
retries = 30
12+
while retries > 0:
13+
try:
14+
# The root path 404s, but connection refused means it's not up
15+
requests.get(BASE_URL)
16+
return True
17+
except requests.exceptions.ConnectionError:
18+
time.sleep(0.5)
19+
retries -= 1
20+
return False
21+
22+
def test_ssrf():
23+
print("Starting server...")
24+
# Start server in background
25+
# We use sys.executable to ensure we use the same python interpreter
26+
process = subprocess.Popen([sys.executable, "server.py"], stdout=subprocess.PIPE, stderr=subprocess.PIPE)
27+
28+
try:
29+
if not wait_for_server():
30+
print("Server failed to start")
31+
sys.exit(1)
32+
33+
print("Server started. Running tests...")
34+
35+
# Test 1: Proxy to google.com (Valid)
36+
# We might not have internet access in some sandboxes, but let's assume we do or handle it.
37+
# If we don't have internet, this might fail with connection error, but the status code won't be 400 from our validation.
38+
print("Testing valid external URL...")
39+
try:
40+
resp = requests.get(f"{BASE_URL}/api/proxy/http://example.com")
41+
print(f"External URL Status: {resp.status_code}")
42+
# It might be 200 or 500 depending on network, but we are looking for behavior.
43+
except Exception as e:
44+
print(f"External URL Request failed: {e}")
45+
46+
# Test 2: SSRF to localhost (Vulnerability)
47+
# We try to proxy to the server itself.
48+
# Since the server has no root route, accessing http://127.0.0.1:5000/ should return 404.
49+
# If the proxy works, it will return that 404 (or 500 if recursive blowup).
50+
# If blocked, it should return 400 (or whatever we decide).
51+
print("Testing SSRF to localhost...")
52+
target_url = f"http://127.0.0.1:{SERVER_PORT}/"
53+
proxy_url = f"{BASE_URL}/api/proxy/{target_url}"
54+
55+
resp = requests.get(proxy_url)
56+
print(f"SSRF Status: {resp.status_code}")
57+
58+
# In the vulnerable state, we expect the code to execute the request.
59+
# Since 127.0.0.1:5000/ returns 404, the proxy will return 404.
60+
if resp.status_code == 404:
61+
print("VULNERABILITY CONFIRMED: Proxied request to localhost (received 404 from internal).")
62+
elif resp.status_code == 200:
63+
print("VULNERABILITY CONFIRMED: Proxied request to localhost (received 200).")
64+
elif resp.status_code == 500:
65+
# 500 could mean it tried and failed (e.g. max retries), which still means it tried.
66+
print("VULNERABILITY CONFIRMED: Proxied request to localhost (received 500).")
67+
elif resp.status_code == 400 or resp.status_code == 403:
68+
print("SECURE: Request to localhost blocked.")
69+
else:
70+
print(f"Unexpected status: {resp.status_code}")
71+
72+
finally:
73+
print("Stopping server...")
74+
process.terminate()
75+
process.wait()
76+
77+
if __name__ == "__main__":
78+
test_ssrf()

0 commit comments

Comments
 (0)