Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

refactor(scan): custom headers #90

Merged
merged 6 commits into from
Jun 2, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
43 changes: 33 additions & 10 deletions default_yaml_config.yaml
Original file line number Diff line number Diff line change
@@ -1,13 +1,18 @@
# Global vars for all tools
#
# custom_header: 'Cookie: Test' # FFUF, Nuclei, Dalfox, CRL Fuzz, HTTP Crawl, Fetch URL
# user_agent: '' # Dalfox only
# enable_http_crawl: true # All tools
# timeout: 10 # Subdomain discovery, Screenshot, Port scan, FFUF, Nuclei
# threads: 30 # All tools
# rate_limit: 150 # Port scan, FFUF, Nuclei
# intensity: 'normal' # Screenshot (grab only the root endpoints of each subdomain), Nuclei (reduce number of endpoints to scan), OSINT (not implemented yet)
# retries: 1 # Nuclei
# Custom header - FFUF, Nuclei, Dalfox, CRL Fuzz, HTTPx, Fetch URL (Hakrawler, Katana, Gospider)
# custom_header: {
# 'Cookie':'Test',
# 'User-Agent': 'Mozilla/5.0',
# 'Custom-Header': 'My custom header'
# }
# 'user_agent': '' # Dalfox only
# 'enable_http_crawl': true # All tools
# 'timeout': 10 # Subdomain discovery, Screenshot, Port scan, FFUF, Nuclei
# 'threads': 30 # All tools
# 'rate_limit': 150 # Port scan, FFUF, Nuclei
# 'intensity': 'normal' # Screenshot (grab only the root endpoints of each subdomain), Nuclei (reduce number of endpoints to scan), OSINT (not implemented yet)
# 'retries': 1 # Nuclei

subdomain_discovery: {
'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'], # amass-passive, amass-active, All
Expand All @@ -19,7 +24,10 @@ subdomain_discovery: {
# 'amass_wordlist': 'deepmagic.com-prefixes-top50000'
}
http_crawl: {
# 'custom_header': 'Cookie: Test',
# 'custom_header': {
# 'Cookie':'Test',
# 'User-Agent': 'Mozilla/5.0'
# },
# 'threads': 30,
# 'follow_redirect': true
}
Expand Down Expand Up @@ -65,6 +73,11 @@ osint: {
'documents_limit': 50
}
dir_file_fuzz: {
# 'custom_header': {
# 'Cookie':'Test',
# 'User-Agent': 'Mozilla/5.0',
# 'Custom-Header': 'My custom header'
# },
'auto_calibration': true,
'enable_http_crawl': true,
'rate_limit': 150,
Expand All @@ -76,9 +89,14 @@ dir_file_fuzz: {
'stop_on_error': false,
'timeout': 5,
'threads': 30,
'wordlist_name': 'dicc'
'wordlist_name': 'dicc',
}
fetch_url: {
# 'custom_header': {
# 'Cookie':'Test',
# 'User-Agent': 'Mozilla/5.0',
# 'Custom-Header': 'My custom header'
# },
'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],
'remove_duplicate_endpoints': true,
'duplicate_fields': ['content_length', 'page_title'],
Expand All @@ -89,6 +107,11 @@ fetch_url: {
# 'exclude_subdomains': false
}
vulnerability_scan: {
# 'custom_header': {
# 'Cookie':'Test',
# 'User-Agent': 'Mozilla/5.0',
# 'Custom-Header': 'My custom header'
# },
'run_nuclei': true,
'run_dalfox': false,
'run_crlfuzz': false,
Expand Down
77 changes: 77 additions & 0 deletions web/reNgine/common_func.py
Original file line number Diff line number Diff line change
Expand Up @@ -951,3 +951,80 @@ def extract_between(text, pattern):
if match:
return match.group(1).strip()
return ""

def parse_custom_header(custom_header):
"""
Parse the custom_header input to ensure it is a dictionary.

Args:
custom_header (dict or str): Dictionary or string containing the custom headers.

Returns:
dict: Parsed dictionary of custom headers.
"""

if isinstance(custom_header, str):
header_dict = {}
headers = custom_header.split(',')
for header in headers:
parts = header.split(':', 1)
if len(parts) == 2:
key, value = parts
header_dict[key.strip()] = value.strip()
else:
raise ValueError(f"Invalid header format: '{header}'")
return header_dict
elif isinstance(custom_header, dict):
return custom_header
else:
raise ValueError("custom_header must be a dictionary or a string")

def generate_header_param(custom_header, tool_name=None):
"""
Generate command-line parameters for a specific tool based on the custom header.

Args:
custom_header (dict or str): Dictionary or string containing the custom headers.
tool_name (str, optional): Name of the tool. Defaults to None.

Returns:
str: Command-line parameter for the specified tool.
"""
# Ensure the custom_header is a dictionary
custom_header = parse_custom_header(custom_header)

# Common formats
common_headers = [f"{key}: {value}" for key, value in custom_header.items()]
semi_colon_headers = ';;'.join(common_headers)
colon_headers = [f"{key}:{value}" for key, value in custom_header.items()]

# Define format mapping for each tool
format_mapping = {
'common': ' '.join([f' -H "{header}"' for header in common_headers]),
'dalfox': ' '.join([f' -H "{header}"' for header in colon_headers]),
'hakrawler': f' -h "{semi_colon_headers}"',
'gospider': generate_gospider_params(custom_header),
}

# Return the corresponding parameter for the specified tool or default to common_headers format
return format_mapping.get(tool_name, format_mapping.get('common'))

def generate_gospider_params(custom_header):
"""
Generate command-line parameters for gospider based on the custom header.

Args:
custom_header (dict): Dictionary containing the custom headers.

Returns:
str: Command-line parameters for gospider.
"""
params = []
for key, value in custom_header.items():
if key.lower() == 'user-agent':
params.append(f' -u "{value}"')
elif key.lower() == 'cookie':
params.append(f' --cookie "{value}"')
else:
params.append(f' -H "{key}:{value}"')
return ' '.join(params)
49 changes: 28 additions & 21 deletions web/reNgine/tasks.py
Original file line number Diff line number Diff line change
Expand Up @@ -1583,7 +1583,9 @@ def dir_file_fuzz(self, ctx={}, description=None):
# Config
cmd = 'ffuf'
config = self.yaml_configuration.get(DIR_FILE_FUZZ) or {}
custom_header = self.yaml_configuration.get(CUSTOM_HEADER)
custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
if custom_header:
custom_header = generate_header_param(custom_header,'common')
auto_calibration = config.get(AUTO_CALIBRATION, True)
enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL)
rate_limit = config.get(RATE_LIMIT) or self.yaml_configuration.get(RATE_LIMIT, DEFAULT_RATE_LIMIT)
Expand Down Expand Up @@ -1619,7 +1621,7 @@ def dir_file_fuzz(self, ctx={}, description=None):
cmd += ' -fr' if follow_redirect else ''
cmd += ' -ac' if auto_calibration else ''
cmd += f' -mc {mc}' if mc else ''
cmd += f' -H "{custom_header}"' if custom_header else ''
cmd += f' {custom_header}' if custom_header else ''

# Grab URLs to fuzz
urls = get_http_urls(
Expand Down Expand Up @@ -1767,7 +1769,9 @@ def fetch_url(self, urls=[], ctx={}, description=None):
tools = config.get(USES_TOOLS, ENDPOINT_SCAN_DEFAULT_TOOLS)
threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS)
domain_request_headers = self.domain.request_headers if self.domain else None
custom_header = domain_request_headers or self.yaml_configuration.get(CUSTOM_HEADER)
custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
if domain_request_headers or custom_header:
custom_header = domain_request_headers or custom_header
exclude_subdomains = config.get(EXCLUDED_SUBDOMAINS, False)

# Get URLs to scan and save to input file
Expand All @@ -1792,8 +1796,8 @@ def fetch_url(self, urls=[], ctx={}, description=None):
'gau': f'gau',
'hakrawler': 'hakrawler -subs -u',
'waybackurls': 'waybackurls',
'gospider': f'gospider -S {input_path} --js -d 2 --sitemap --robots -w -r',
'katana': f'katana -list {input_path} -silent -jc -kf all -d 3 -fs rdn',
'gospider': f'gospider --js -d 2 --sitemap --robots -w -r',
'katana': f'katana -silent -jc -kf all -d 3 -fs rdn',
}
if proxy:
cmd_map['gau'] += f' --proxy "{proxy}"'
Expand All @@ -1805,14 +1809,9 @@ def fetch_url(self, urls=[], ctx={}, description=None):
cmd_map['gospider'] += f' -t {threads}'
cmd_map['katana'] += f' -c {threads}'
if custom_header:
header_string = ';;'.join([
f'{key}: {value}' for key, value in custom_header.items()
])
cmd_map['hakrawler'] += f' -h {header_string}'
cmd_map['katana'] += f' -H {header_string}'
header_flags = [':'.join(h) for h in header_string.split(';;')]
for flag in header_flags:
cmd_map['gospider'] += f' -H {flag}'
cmd_map['gospider'] += generate_header_param(custom_header, 'gospider')
cmd_map['hakrawler'] += generate_header_param(custom_header, 'hakrawler')
cmd_map['katana'] += generate_header_param(custom_header, 'common')
cat_input = f'cat {input_path}'
grep_output = f'grep -Eo {host_regex}'
cmd_map = {
Expand Down Expand Up @@ -2316,6 +2315,8 @@ def nuclei_scan(self, urls=[], ctx={}, description=None):
retries = config.get(RETRIES) or self.yaml_configuration.get(RETRIES, DEFAULT_RETRIES)
timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT)
custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
if custom_header:
custom_header = generate_header_param(custom_header, 'common')
should_fetch_gpt_report = config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT)
proxy = get_random_proxy()
nuclei_specific_config = config.get('nuclei', {})
Expand Down Expand Up @@ -2382,7 +2383,7 @@ def nuclei_scan(self, urls=[], ctx={}, description=None):
cmd = 'nuclei -j'
cmd += ' -config /root/.config/nuclei/config.yaml' if use_nuclei_conf else ''
cmd += f' -irr'
cmd += f' -H "{custom_header}"' if custom_header else ''
cmd += f' {custom_header}' if custom_header else ''
cmd += f' -l {input_path}'
cmd += f' -c {str(concurrency)}' if concurrency > 0 else ''
cmd += f' -proxy {proxy} ' if proxy else ''
Expand Down Expand Up @@ -2433,6 +2434,8 @@ def dalfox_xss_scan(self, urls=[], ctx={}, description=None):
should_fetch_gpt_report = vuln_config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT)
dalfox_config = vuln_config.get(DALFOX) or {}
custom_header = dalfox_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
if custom_header:
custom_header = generate_header_param(custom_header, 'dalfox')
proxy = get_random_proxy()
is_waf_evasion = dalfox_config.get(WAF_EVASION, False)
blind_xss_server = dalfox_config.get(BLIND_XSS_SERVER)
Expand Down Expand Up @@ -2468,7 +2471,7 @@ def dalfox_xss_scan(self, urls=[], ctx={}, description=None):
cmd += f' --delay {delay}' if delay else ''
cmd += f' --timeout {timeout}' if timeout else ''
cmd += f' --user-agent {user_agent}' if user_agent else ''
cmd += f' --header {custom_header}' if custom_header else ''
cmd += f' {custom_header}' if custom_header else ''
cmd += f' --worker {threads}' if threads else ''
cmd += f' --format json'

Expand Down Expand Up @@ -2558,6 +2561,8 @@ def crlfuzz_scan(self, urls=[], ctx={}, description=None):
vuln_config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {}
should_fetch_gpt_report = vuln_config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT)
custom_header = vuln_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
if custom_header:
custom_header = generate_header_param(custom_header, 'common')
proxy = get_random_proxy()
user_agent = vuln_config.get(USER_AGENT) or self.yaml_configuration.get(USER_AGENT)
threads = vuln_config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS)
Expand All @@ -2582,7 +2587,7 @@ def crlfuzz_scan(self, urls=[], ctx={}, description=None):
cmd = 'crlfuzz -s'
cmd += f' -l {input_path}'
cmd += f' -x {proxy}' if proxy else ''
cmd += f' --H {custom_header}' if custom_header else ''
cmd += f' {custom_header}' if custom_header else ''
cmd += f' -o {output_path}'

run_command(
Expand Down Expand Up @@ -2731,10 +2736,12 @@ def http_crawl(
if is_ran_from_subdomain_scan:
logger.info('Running From Subdomain Scan...')
cmd = '/go/bin/httpx'
cfg = self.yaml_configuration.get(HTTP_CRAWL) or {}
custom_header = cfg.get(CUSTOM_HEADER, '')
threads = cfg.get(THREADS, DEFAULT_THREADS)
follow_redirect = cfg.get(FOLLOW_REDIRECT, True)
config = self.yaml_configuration.get(HTTP_CRAWL) or {}
custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
if custom_header:
custom_header = generate_header_param(custom_header, 'common')
threads = config.get(THREADS, DEFAULT_THREADS)
follow_redirect = config.get(FOLLOW_REDIRECT, True)
self.output_path = None
input_path = f'{self.results_dir}/httpx_input.txt'
history_file = f'{self.results_dir}/commands.txt'
Expand Down Expand Up @@ -2767,7 +2774,7 @@ def http_crawl(
cmd += f' -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent'
cmd += f' -t {threads}' if threads > 0 else ''
cmd += f' --http-proxy {proxy}' if proxy else ''
cmd += f' -H "{custom_header}"' if custom_header else ''
cmd += f' {custom_header}' if custom_header else ''
cmd += f' -json'
cmd += f' -u {urls[0]}' if len(urls) == 1 else f' -l {input_path}'
cmd += f' -x {method}' if method else ''
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -122,7 +122,7 @@ <h5 class="text-secondary">reNgine YAML Documentation</h5>
<br>
<span class="text-uppercase">Please, do not modify the configuration unless you know what what you are doing.</span>
<br>
<span class="text-danger">If default YAML configuration doesn't automatically load, <a href="https://raw.githubusercontent.com/yogeshojha/rengine/master/default_yaml_config.yaml" target="_blank">download default configuration from here</a> and paste it. (Firefox may have issues loading default YAML configuration.)</span>
<span class="text-danger">If default YAML configuration doesn't automatically load, <a href="https://raw.githubusercontent.com/Security-Tools-Alliance/rengine/master/default_yaml_config.yaml" target="_blank">download default configuration from here</a> and paste it. (Firefox may have issues loading default YAML configuration.)</span>
</p>
{{form.yaml_configuration}}
</div>
Expand Down