Skip to content

Commit 2627173

Browse files
committed
Add adaptive timeout settings and optimize scan options to prevent timeouts
1 parent 7660430 commit 2627173

4 files changed

Lines changed: 140 additions & 8 deletions

File tree

config.yaml

Lines changed: 7 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -69,6 +69,13 @@ sqlmap:
6969
max_risk: 2
7070
max_threads: 20
7171
max_timeout: 600
72+
timeout_settings:
73+
initial_scan: 120
74+
follow_up_scan: 300
75+
data_extraction: 240
76+
complex_scan: 480
77+
adaptive_multiplier: 2.0
78+
max_adaptive_timeout: 600
7279
ui:
7380
confirm_dangerous_operations: true
7481
enable_colors: true

sqlmap_ai/ai_analyzer.py

Lines changed: 46 additions & 5 deletions
Original file line numberDiff line numberDiff line change
@@ -77,11 +77,36 @@ def ai_suggest_next_steps(report, scan_history=None, extracted_data=None):
7777
if any(opt.startswith('--data=') for opt in options) and "json" in ' '.join(options).lower() and not any(opt == '--json' for opt in options):
7878
options.append("--json")
7979

80-
# Filter out options that might cause issues
80+
# Filter out options that might cause issues and optimize for timeout prevention
8181
valid_options = []
82+
has_timeout_risk = False
83+
8284
for opt in options:
83-
if not opt.startswith('-d ') and not opt == '-d' and not opt == '--dump-all':
84-
valid_options.append(opt)
85+
# Skip potentially problematic options
86+
if opt.startswith('-d ') or opt == '-d' or opt == '--dump-all':
87+
continue
88+
89+
# Check for high-complexity options that might cause timeouts
90+
if any(high_risk in opt for high_risk in ['--level=4', '--level=5', '--risk=4', '--risk=5', '--dump-all']):
91+
has_timeout_risk = True
92+
93+
valid_options.append(opt)
94+
95+
# If we have timeout risk, suggest a more conservative approach
96+
if has_timeout_risk:
97+
print_warning("High-complexity options detected. Consider using more conservative settings to avoid timeouts.")
98+
# Replace high-risk options with safer alternatives
99+
safer_options = []
100+
for opt in valid_options:
101+
if opt == '--level=4' or opt == '--level=5':
102+
safer_options.append('--level=3')
103+
elif opt == '--risk=4' or opt == '--risk=5':
104+
safer_options.append('--risk=2')
105+
elif opt == '--dump-all':
106+
safer_options.append('--tables') # Start with table enumeration instead
107+
else:
108+
safer_options.append(opt)
109+
valid_options = safer_options
85110

86111
if not valid_options and structured_info.get("dbms", "").lower() == "sqlite":
87112
print_info("Using SQLite-specific options as fallback")
@@ -109,6 +134,13 @@ def create_advanced_prompt(report, structured_info, scan_history=None, extracted
109134
Look at the scan report, previous steps, and any data extracted to decide the most effective next steps.
110135
Analyze what has been discovered so far and what remains to be explored.
111136
137+
# IMPORTANT: TIMEOUT CONSIDERATIONS
138+
- Avoid suggesting overly aggressive options that might cause timeouts
139+
- Prefer incremental approaches over comprehensive scans
140+
- Start with lower levels (1-2) and risks (1-2) before escalating
141+
- Use specific techniques rather than broad enumeration when possible
142+
- Consider the target's response time and stability
143+
112144
# SCAN REPORT SUMMARY:
113145
DBMS: {dbms}
114146
Vulnerable Parameters: {vulnerable_params}
@@ -132,6 +164,14 @@ def create_advanced_prompt(report, structured_info, scan_history=None, extracted
132164
3. Dumping interesting tables when appropriate
133165
4. Using techniques that haven't been tried yet
134166
5. Avoiding techniques that have failed
167+
6. Using conservative settings to prevent timeouts
168+
169+
# OPTIMIZATION GUIDELINES:
170+
- Start with level 1-2 and risk 1-2 for initial scans
171+
- Use specific techniques (B, E, U, S, T) rather than all at once
172+
- Prefer targeted enumeration over broad scanning
173+
- Use --tables before --dump to avoid excessive data extraction
174+
- Consider using --threads=3-5 for better performance
135175
136176
# DBMS-SPECIFIC GUIDELINES:
137177
- For SQLite databases: Use '--tables' instead of '--dbs' as SQLite doesn't support database enumeration.
@@ -142,7 +182,7 @@ def create_advanced_prompt(report, structured_info, scan_history=None, extracted
142182
# SQL INJECTION SCENARIOS:
143183
- Classic GET Parameter: For URLs like 'http://target.com/page.php?id=1', use basic options like '--dbs'
144184
- URL Path Parameter: For URLs like 'http://target.com/page/1/', use asterisk as injection marker (e.g., 'page/1*') and '--dbs'
145-
- Multiple Parameters: For URLs with multiple parameters, specify which to test with '-p' or use '--level=3' to test all
185+
- Multiple Parameters: For URLs with multiple parameters, specify which to test with '-p' or use '--level=2' to test all
146186
- POST Parameter: Use '--data' or '--forms' to test POST parameters
147187
- Cookie-Based: Use '--cookie' to specify cookie values to test
148188
- Header-Based: Use '--headers' to test HTTP headers for injection
@@ -155,8 +195,9 @@ def create_advanced_prompt(report, structured_info, scan_history=None, extracted
155195
}}
156196
```
157197
158-
Each option should be a separate string in the array (e.g., "--level=3", "--risk=2").
198+
Each option should be a separate string in the array (e.g., "--level=2", "--risk=1").
159199
Be specific and concise. Don't include basic options like -u (URL) as these will be added automatically.
200+
Prefer conservative settings to avoid timeouts.
160201
"""
161202
report_lines = report.split('\n')
162203
report_excerpt = '\n'.join(report_lines[-30:]) if len(report_lines) > 30 else report

sqlmap_ai/config_manager.py

Lines changed: 70 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -434,3 +434,73 @@ def save_config() -> bool:
434434
def validate_config() -> List[str]:
435435
"""Validate current configuration"""
436436
return config_manager.validate_config()
437+
438+
439+
def get_timeout_settings():
440+
"""Get timeout configuration settings"""
441+
config = get_config()
442+
443+
# Access timeout settings from the config object
444+
try:
445+
timeout_settings = config.sqlmap.timeout_settings
446+
return {
447+
'initial_scan': getattr(timeout_settings, 'initial_scan', 120),
448+
'follow_up_scan': getattr(timeout_settings, 'follow_up_scan', 300),
449+
'data_extraction': getattr(timeout_settings, 'data_extraction', 240),
450+
'complex_scan': getattr(timeout_settings, 'complex_scan', 480),
451+
'adaptive_multiplier': getattr(timeout_settings, 'adaptive_multiplier', 2.0),
452+
'max_adaptive_timeout': getattr(timeout_settings, 'max_adaptive_timeout', 600)
453+
}
454+
except AttributeError:
455+
# Fallback to default values if timeout_settings doesn't exist
456+
return {
457+
'initial_scan': 120,
458+
'follow_up_scan': 300,
459+
'data_extraction': 240,
460+
'complex_scan': 480,
461+
'adaptive_multiplier': 2.0,
462+
'max_adaptive_timeout': 600
463+
}
464+
465+
def calculate_adaptive_timeout(base_timeout, scan_options, scan_type="follow_up"):
466+
"""Calculate adaptive timeout based on scan complexity and type"""
467+
timeout_settings = get_timeout_settings()
468+
469+
# Start with base timeout
470+
complexity_multiplier = 1.0
471+
472+
# Adjust based on scan type
473+
if scan_type == "initial":
474+
complexity_multiplier = 1.0
475+
elif scan_type == "follow_up":
476+
complexity_multiplier = timeout_settings['adaptive_multiplier']
477+
elif scan_type == "data_extraction":
478+
complexity_multiplier = 0.8 # More conservative for data extraction
479+
elif scan_type == "complex":
480+
complexity_multiplier = 2.0
481+
482+
# Adjust based on scan options
483+
if scan_options:
484+
options_str = ' '.join(scan_options) if isinstance(scan_options, list) else str(scan_options)
485+
486+
# High complexity options
487+
if any(high_risk in options_str for high_risk in ['--level=3', '--level=4', '--level=5']):
488+
complexity_multiplier += 0.5
489+
if any(high_risk in options_str for high_risk in ['--risk=3', '--risk=4', '--risk=5']):
490+
complexity_multiplier += 0.3
491+
if '--dump' in options_str:
492+
complexity_multiplier += 0.4
493+
if '--tables' in options_str:
494+
complexity_multiplier += 0.2
495+
if '--forms' in options_str:
496+
complexity_multiplier += 0.3
497+
if '--technique=BEUST' in options_str:
498+
complexity_multiplier += 0.4
499+
if '--dump-all' in options_str:
500+
complexity_multiplier += 0.6
501+
502+
# Cap the multiplier to prevent excessive timeouts
503+
max_multiplier = timeout_settings['max_adaptive_timeout'] / base_timeout
504+
complexity_multiplier = min(complexity_multiplier, max_multiplier)
505+
506+
return int(base_timeout * complexity_multiplier)

sqlmap_ai/main.py

Lines changed: 17 additions & 3 deletions
Original file line numberDiff line numberDiff line change
@@ -14,7 +14,7 @@
1414
confirm_save_report
1515
)
1616
from sqlmap_ai.enhanced_cli import create_cli, handle_cli_commands, EnhancedCLI
17-
from sqlmap_ai.config_manager import config_manager, get_config
17+
from sqlmap_ai.config_manager import config_manager, get_config, calculate_adaptive_timeout
1818
from sqlmap_ai.security_manager import security_manager, SecurityError
1919
from sqlmap_ai.runner import SQLMapRunner
2020
from sqlmap_ai.parser import display_report, save_report_to_file, extract_sqlmap_info, create_json_report
@@ -356,11 +356,17 @@ def run_standard_mode(runner, target_url, user_timeout, interactive_mode):
356356
user_options = get_user_choice(next_options)
357357
if user_options:
358358
print_info("Running follow-up scan...")
359-
second_timeout = int(user_timeout * 1.5)
359+
360+
# Calculate adaptive timeout based on scan complexity
361+
second_timeout = calculate_adaptive_timeout(user_timeout, user_options, "follow_up")
362+
363+
print_info(f"Using adaptive timeout: {second_timeout} seconds")
364+
360365
result = runner.run_sqlmap(target_url, user_options, timeout=second_timeout, interactive_mode=interactive_mode)
361366
if result and "TIMEOUT:" in result:
362367
print_warning("Follow-up scan timed out.")
363368
print_info("You may still get useful results from the partial scan data.")
369+
print_info("Consider using less aggressive options or increasing timeout for complex scans.")
364370
if result:
365371
print_success("Test completed successfully!")
366372
followup_info = extract_sqlmap_info(result)
@@ -377,10 +383,15 @@ def run_standard_mode(runner, target_url, user_timeout, interactive_mode):
377383
):
378384
print_info("Starting data extraction...")
379385
extraction_options = f"--dump -T {','.join(followup_info['tables'][:3])}"
386+
387+
# Use adaptive timeout for data extraction
388+
extraction_timeout = calculate_adaptive_timeout(user_timeout, extraction_options, "data_extraction")
389+
print_info(f"Using extraction timeout: {extraction_timeout} seconds")
390+
380391
extraction_result = runner.run_sqlmap(
381392
target_url,
382393
extraction_options,
383-
timeout=second_timeout,
394+
timeout=extraction_timeout,
384395
interactive_mode=interactive_mode
385396
)
386397
if extraction_result:
@@ -394,6 +405,9 @@ def run_standard_mode(runner, target_url, user_timeout, interactive_mode):
394405
if extraction_info.get("extracted"):
395406
extracted_data.update(extraction_info["extracted"])
396407
display_report(extraction_result)
408+
elif extraction_result and "TIMEOUT:" in extraction_result:
409+
print_warning("Data extraction timed out.")
410+
print_info("Partial data may be available in the report.")
397411
if confirm_save_report():
398412
print_info("Creating beautiful HTML report...")
399413
try:

0 commit comments

Comments
 (0)