From c9a2571e983f91205192c172dcaee5a40c5f6769 Mon Sep 17 00:00:00 2001
From: psyray
Date: Fri, 31 May 2024 14:45:37 +0200
Subject: [PATCH 1/6] fix(scan): rework of the custom header parameter
injection
---
web/reNgine/common_func.py | 57 ++++++++++++++++++++++++++++++++++++++
web/reNgine/tasks.py | 45 +++++++++++++++++-------------
2 files changed, 83 insertions(+), 19 deletions(-)
diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py
index f7a98097..04b32efa 100644
--- a/web/reNgine/common_func.py
+++ b/web/reNgine/common_func.py
@@ -951,3 +951,60 @@ def extract_between(text, pattern):
if match:
return match.group(1).strip()
return ""
+
+def parse_custom_header(custom_header):
+ """
+ Parse the custom_header input to ensure it is a dictionary.
+
+ Args:
+ custom_header (dict or str): Dictionary or string containing the custom headers.
+
+ Returns:
+ dict: Parsed dictionary of custom headers.
+ """
+
+ if isinstance(custom_header, str):
+ header_dict = {}
+ headers = custom_header.split(',')
+ for header in headers:
+ parts = header.split(':', 1)
+ if len(parts) == 2:
+ key, value = parts
+ header_dict[key.strip()] = value.strip()
+ else:
+ raise ValueError(f"Invalid header format: '{header}'")
+ return header_dict
+ elif isinstance(custom_header, dict):
+ return custom_header
+ else:
+ raise ValueError("custom_header must be a dictionary or a string")
+
+def generate_header_param(custom_header, tool_name=None):
+ """
+ Generate command-line parameters for a specific tool based on the custom header.
+
+ Args:
+ custom_header (dict or str): Dictionary or string containing the custom headers.
+ tool_name (str, optional): Name of the tool. Defaults to None.
+
+ Returns:
+ str: Command-line parameter for the specified tool.
+ """
+ # Ensure the custom_header is a dictionary
+ custom_header = parse_custom_header(custom_header)
+
+ # Common formats
+ common_headers = [f"{key}: {value}" for key, value in custom_header.items()]
+ semi_colon_headers = ';;'.join(common_headers)
+ colon_headers = [f"{key}:{value}" for key, value in custom_header.items()]
+
+ # Define format mapping for each tool
+ format_mapping = {
+ 'common': ' '.join([f' -H "{header}"' for header in common_headers]),
+ 'hakrawler': f' -h "{semi_colon_headers}"',
+ 'katana': f' -H "{semi_colon_headers}"',
+ 'gospider': ' '.join([f' -H "{header}"' for header in colon_headers]),
+ }
+
+ # Return the corresponding parameter for the specified tool or default to common_headers format
+ return format_mapping.get(tool_name, format_mapping.get('common'))
\ No newline at end of file
diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py
index c2b11728..7cc53e3e 100644
--- a/web/reNgine/tasks.py
+++ b/web/reNgine/tasks.py
@@ -1583,7 +1583,9 @@ def dir_file_fuzz(self, ctx={}, description=None):
# Config
cmd = 'ffuf'
config = self.yaml_configuration.get(DIR_FILE_FUZZ) or {}
- custom_header = self.yaml_configuration.get(CUSTOM_HEADER)
+ custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
+ if custom_header:
+ custom_header = generate_header_param(custom_header,'common')
auto_calibration = config.get(AUTO_CALIBRATION, True)
enable_http_crawl = config.get(ENABLE_HTTP_CRAWL, DEFAULT_ENABLE_HTTP_CRAWL)
rate_limit = config.get(RATE_LIMIT) or self.yaml_configuration.get(RATE_LIMIT, DEFAULT_RATE_LIMIT)
@@ -1619,7 +1621,7 @@ def dir_file_fuzz(self, ctx={}, description=None):
cmd += ' -fr' if follow_redirect else ''
cmd += ' -ac' if auto_calibration else ''
cmd += f' -mc {mc}' if mc else ''
- cmd += f' -H "{custom_header}"' if custom_header else ''
+ cmd += f' {custom_header}' if custom_header else ''
# Grab URLs to fuzz
urls = get_http_urls(
@@ -1767,7 +1769,9 @@ def fetch_url(self, urls=[], ctx={}, description=None):
tools = config.get(USES_TOOLS, ENDPOINT_SCAN_DEFAULT_TOOLS)
threads = config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS)
domain_request_headers = self.domain.request_headers if self.domain else None
- custom_header = domain_request_headers or self.yaml_configuration.get(CUSTOM_HEADER)
+ custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
+ if domain_request_headers or custom_header:
+ custom_header = domain_request_headers or custom_header
exclude_subdomains = config.get(EXCLUDED_SUBDOMAINS, False)
# Get URLs to scan and save to input file
@@ -1805,14 +1809,9 @@ def fetch_url(self, urls=[], ctx={}, description=None):
cmd_map['gospider'] += f' -t {threads}'
cmd_map['katana'] += f' -c {threads}'
if custom_header:
- header_string = ';;'.join([
- f'{key}: {value}' for key, value in custom_header.items()
- ])
- cmd_map['hakrawler'] += f' -h {header_string}'
- cmd_map['katana'] += f' -H {header_string}'
- header_flags = [':'.join(h) for h in header_string.split(';;')]
- for flag in header_flags:
- cmd_map['gospider'] += f' -H {flag}'
+ cmd_map['gospider'] += generate_header_param(custom_header, 'gospider')
+ cmd_map['hakrawler'] += generate_header_param(custom_header, 'hakrawler')
+ cmd_map['katana'] += generate_header_param(custom_header, 'katana')
cat_input = f'cat {input_path}'
grep_output = f'grep -Eo {host_regex}'
cmd_map = {
@@ -2316,6 +2315,8 @@ def nuclei_scan(self, urls=[], ctx={}, description=None):
retries = config.get(RETRIES) or self.yaml_configuration.get(RETRIES, DEFAULT_RETRIES)
timeout = config.get(TIMEOUT) or self.yaml_configuration.get(TIMEOUT, DEFAULT_HTTP_TIMEOUT)
custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
+ if custom_header:
+ custom_header = generate_header_param(custom_header, 'common')
should_fetch_gpt_report = config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT)
proxy = get_random_proxy()
nuclei_specific_config = config.get('nuclei', {})
@@ -2382,7 +2383,7 @@ def nuclei_scan(self, urls=[], ctx={}, description=None):
cmd = 'nuclei -j'
cmd += ' -config /root/.config/nuclei/config.yaml' if use_nuclei_conf else ''
cmd += f' -irr'
- cmd += f' -H "{custom_header}"' if custom_header else ''
+ cmd += f' {custom_header}' if custom_header else ''
cmd += f' -l {input_path}'
cmd += f' -c {str(concurrency)}' if concurrency > 0 else ''
cmd += f' -proxy {proxy} ' if proxy else ''
@@ -2433,6 +2434,8 @@ def dalfox_xss_scan(self, urls=[], ctx={}, description=None):
should_fetch_gpt_report = vuln_config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT)
dalfox_config = vuln_config.get(DALFOX) or {}
custom_header = dalfox_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
+ if custom_header:
+ custom_header = generate_header_param(custom_header, 'common')
proxy = get_random_proxy()
is_waf_evasion = dalfox_config.get(WAF_EVASION, False)
blind_xss_server = dalfox_config.get(BLIND_XSS_SERVER)
@@ -2468,7 +2471,7 @@ def dalfox_xss_scan(self, urls=[], ctx={}, description=None):
cmd += f' --delay {delay}' if delay else ''
cmd += f' --timeout {timeout}' if timeout else ''
cmd += f' --user-agent {user_agent}' if user_agent else ''
- cmd += f' --header {custom_header}' if custom_header else ''
+ cmd += f' {custom_header}' if custom_header else ''
cmd += f' --worker {threads}' if threads else ''
cmd += f' --format json'
@@ -2558,6 +2561,8 @@ def crlfuzz_scan(self, urls=[], ctx={}, description=None):
vuln_config = self.yaml_configuration.get(VULNERABILITY_SCAN) or {}
should_fetch_gpt_report = vuln_config.get(FETCH_GPT_REPORT, DEFAULT_GET_GPT_REPORT)
custom_header = vuln_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
+ if custom_header:
+ custom_header = generate_header_param(custom_header, 'common')
proxy = get_random_proxy()
user_agent = vuln_config.get(USER_AGENT) or self.yaml_configuration.get(USER_AGENT)
threads = vuln_config.get(THREADS) or self.yaml_configuration.get(THREADS, DEFAULT_THREADS)
@@ -2582,7 +2587,7 @@ def crlfuzz_scan(self, urls=[], ctx={}, description=None):
cmd = 'crlfuzz -s'
cmd += f' -l {input_path}'
cmd += f' -x {proxy}' if proxy else ''
- cmd += f' --H {custom_header}' if custom_header else ''
+ cmd += f' {custom_header}' if custom_header else ''
cmd += f' -o {output_path}'
run_command(
@@ -2731,10 +2736,12 @@ def http_crawl(
if is_ran_from_subdomain_scan:
logger.info('Running From Subdomain Scan...')
cmd = '/go/bin/httpx'
- cfg = self.yaml_configuration.get(HTTP_CRAWL) or {}
- custom_header = cfg.get(CUSTOM_HEADER, '')
- threads = cfg.get(THREADS, DEFAULT_THREADS)
- follow_redirect = cfg.get(FOLLOW_REDIRECT, True)
+ config = self.yaml_configuration.get(HTTP_CRAWL) or {}
+ custom_header = config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
+ if custom_header:
+ custom_header = generate_header_param(custom_header, 'common')
+ threads = config.get(THREADS, DEFAULT_THREADS)
+ follow_redirect = config.get(FOLLOW_REDIRECT, True)
self.output_path = None
input_path = f'{self.results_dir}/httpx_input.txt'
history_file = f'{self.results_dir}/commands.txt'
@@ -2767,7 +2774,7 @@ def http_crawl(
cmd += f' -cl -ct -rt -location -td -websocket -cname -asn -cdn -probe -random-agent'
cmd += f' -t {threads}' if threads > 0 else ''
cmd += f' --http-proxy {proxy}' if proxy else ''
- cmd += f' -H "{custom_header}"' if custom_header else ''
+ cmd += f' {custom_header}' if custom_header else ''
cmd += f' -json'
cmd += f' -u {urls[0]}' if len(urls) == 1 else f' -l {input_path}'
cmd += f' -x {method}' if method else ''
From a12b2f6e0304b30727e79df8b7b56ad677a9b646 Mon Sep 17 00:00:00 2001
From: psyray
Date: Fri, 31 May 2024 14:46:06 +0200
Subject: [PATCH 2/6] fix(scan): update default yaml config for custom header
---
default_yaml_config.yaml | 39 ++++++++++++++-----
.../scanEngine/_items/form_engine.html | 2 +-
2 files changed, 30 insertions(+), 11 deletions(-)
diff --git a/default_yaml_config.yaml b/default_yaml_config.yaml
index b5d125ab..5f032690 100644
--- a/default_yaml_config.yaml
+++ b/default_yaml_config.yaml
@@ -1,13 +1,17 @@
# Global vars for all tools
#
-# custom_header: 'Cookie: Test' # FFUF, Nuclei, Dalfox, CRL Fuzz, HTTP Crawl, Fetch URL
-# user_agent: '' # Dalfox only
-# enable_http_crawl: true # All tools
-# timeout: 10 # Subdomain discovery, Screenshot, Port scan, FFUF, Nuclei
-# threads: 30 # All tools
-# rate_limit: 150 # Port scan, FFUF, Nuclei
-# intensity: 'normal' # Screenshot (grab only the root endpoints of each subdomain), Nuclei (reduce number of endpoints to scan), OSINT (not implemented yet)
-# retries: 1 # Nuclei
+# Custom header - FFUF, Nuclei, Dalfox, CRL Fuzz, HTTPx, Fetch URL (Hakrawler, Katana, Gospider)
+# custom_header: {
+# 'Cookie':'Test',
+# 'User-Agent': 'Mozilla/5.0'
+# }
+# 'user_agent': '' # Dalfox only
+# 'enable_http_crawl': true # All tools
+# 'timeout': 10 # Subdomain discovery, Screenshot, Port scan, FFUF, Nuclei
+# 'threads': 30 # All tools
+# 'rate_limit': 150 # Port scan, FFUF, Nuclei
+# 'intensity': 'normal' # Screenshot (grab only the root endpoints of each subdomain), Nuclei (reduce number of endpoints to scan), OSINT (not implemented yet)
+# 'retries': 1 # Nuclei
subdomain_discovery: {
'uses_tools': ['subfinder', 'ctfr', 'sublist3r', 'tlsx', 'oneforall', 'netlas'], # amass-passive, amass-active, All
@@ -19,7 +23,10 @@ subdomain_discovery: {
# 'amass_wordlist': 'deepmagic.com-prefixes-top50000'
}
http_crawl: {
- # 'custom_header': 'Cookie: Test',
+ # 'custom_header': {
+ # 'Cookie':'Test',
+ # 'User-Agent': 'Mozilla/5.0'
+ # },
# 'threads': 30,
# 'follow_redirect': true
}
@@ -65,6 +72,10 @@ osint: {
'documents_limit': 50
}
dir_file_fuzz: {
+ # 'custom_header': {
+ # 'Cookie':'Test',
+ # 'User-Agent': 'Mozilla/5.0'
+ # },
'auto_calibration': true,
'enable_http_crawl': true,
'rate_limit': 150,
@@ -76,9 +87,13 @@ dir_file_fuzz: {
'stop_on_error': false,
'timeout': 5,
'threads': 30,
- 'wordlist_name': 'dicc'
+ 'wordlist_name': 'dicc',
}
fetch_url: {
+ # 'custom_header': {
+ # 'Cookie':'Test',
+ # 'User-Agent': 'Mozilla/5.0'
+ # },
'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],
'remove_duplicate_endpoints': true,
'duplicate_fields': ['content_length', 'page_title'],
@@ -89,6 +104,10 @@ fetch_url: {
# 'exclude_subdomains': false
}
vulnerability_scan: {
+ # 'custom_header': {
+ # 'Cookie':'Test',
+ # 'User-Agent': 'Mozilla/5.0'
+ # },
'run_nuclei': true,
'run_dalfox': false,
'run_crlfuzz': false,
diff --git a/web/scanEngine/templates/scanEngine/_items/form_engine.html b/web/scanEngine/templates/scanEngine/_items/form_engine.html
index 0ae0a656..805415e5 100644
--- a/web/scanEngine/templates/scanEngine/_items/form_engine.html
+++ b/web/scanEngine/templates/scanEngine/_items/form_engine.html
@@ -122,7 +122,7 @@ reNgine YAML Documentation
Please, do not modify the configuration unless you know what what you are doing.
- If default YAML configuration doesn't automatically load, download default configuration from here and paste it. (Firefox may have issues loading default YAML configuration.)
+ If default YAML configuration doesn't automatically load, download default configuration from here and paste it. (Firefox may have issues loading default YAML configuration.)
{{form.yaml_configuration}}
From 0369d04d92fb3e8330eb8f2e6f9df72c1be0bced Mon Sep 17 00:00:00 2001
From: psyray
Date: Fri, 31 May 2024 15:46:02 +0200
Subject: [PATCH 3/6] fix(scan): fix dalfox error at startup with custom
headers
---
web/reNgine/common_func.py | 1 +
web/reNgine/tasks.py | 2 +-
2 files changed, 2 insertions(+), 1 deletion(-)
diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py
index 04b32efa..7f3178f0 100644
--- a/web/reNgine/common_func.py
+++ b/web/reNgine/common_func.py
@@ -1001,6 +1001,7 @@ def generate_header_param(custom_header, tool_name=None):
# Define format mapping for each tool
format_mapping = {
'common': ' '.join([f' -H "{header}"' for header in common_headers]),
+ 'dalfox': ' '.join([f' -H "{header}"' for header in colon_headers]),
'hakrawler': f' -h "{semi_colon_headers}"',
'katana': f' -H "{semi_colon_headers}"',
'gospider': ' '.join([f' -H "{header}"' for header in colon_headers]),
diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py
index 7cc53e3e..1ea2aa42 100644
--- a/web/reNgine/tasks.py
+++ b/web/reNgine/tasks.py
@@ -2435,7 +2435,7 @@ def dalfox_xss_scan(self, urls=[], ctx={}, description=None):
dalfox_config = vuln_config.get(DALFOX) or {}
custom_header = dalfox_config.get(CUSTOM_HEADER) or self.yaml_configuration.get(CUSTOM_HEADER)
if custom_header:
- custom_header = generate_header_param(custom_header, 'common')
+ custom_header = generate_header_param(custom_header, 'dalfox')
proxy = get_random_proxy()
is_waf_evasion = dalfox_config.get(WAF_EVASION, False)
blind_xss_server = dalfox_config.get(BLIND_XSS_SERVER)
From f0ef74a125e5301dde6524d1b920a2a777f25267 Mon Sep 17 00:00:00 2001
From: psyray
Date: Fri, 31 May 2024 18:13:56 +0200
Subject: [PATCH 4/6] fix(scan): fix katana and gospider headers format
---
web/reNgine/common_func.py | 24 ++++++++++++++++++++++--
web/reNgine/tasks.py | 6 +++---
2 files changed, 25 insertions(+), 5 deletions(-)
diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py
index 7f3178f0..dcbd497c 100644
--- a/web/reNgine/common_func.py
+++ b/web/reNgine/common_func.py
@@ -1004,8 +1004,28 @@ def generate_header_param(custom_header, tool_name=None):
'dalfox': ' '.join([f' -H "{header}"' for header in colon_headers]),
'hakrawler': f' -h "{semi_colon_headers}"',
'katana': f' -H "{semi_colon_headers}"',
- 'gospider': ' '.join([f' -H "{header}"' for header in colon_headers]),
+ 'gospider': generate_gospider_params(custom_header),
}
# Return the corresponding parameter for the specified tool or default to common_headers format
- return format_mapping.get(tool_name, format_mapping.get('common'))
\ No newline at end of file
+ return format_mapping.get(tool_name, format_mapping.get('common'))
+
+def generate_gospider_params(custom_header):
+ """
+ Generate command-line parameters for gospider based on the custom header.
+
+ Args:
+ custom_header (dict): Dictionary containing the custom headers.
+
+ Returns:
+ str: Command-line parameters for gospider.
+ """
+ params = []
+ for key, value in custom_header.items():
+ if key.lower() == 'user-agent':
+ params.append(f' -u "{value}"')
+ elif key.lower() == 'cookie':
+ params.append(f' --cookie "{value}"')
+ else:
+ params.append(f' -H "{key}:{value}"')
+ return ' '.join(params)
\ No newline at end of file
diff --git a/web/reNgine/tasks.py b/web/reNgine/tasks.py
index 1ea2aa42..ddaa54a3 100644
--- a/web/reNgine/tasks.py
+++ b/web/reNgine/tasks.py
@@ -1796,8 +1796,8 @@ def fetch_url(self, urls=[], ctx={}, description=None):
'gau': f'gau',
'hakrawler': 'hakrawler -subs -u',
'waybackurls': 'waybackurls',
- 'gospider': f'gospider -S {input_path} --js -d 2 --sitemap --robots -w -r',
- 'katana': f'katana -list {input_path} -silent -jc -kf all -d 3 -fs rdn',
+ 'gospider': f'gospider --js -d 2 --sitemap --robots -w -r',
+ 'katana': f'katana -silent -jc -kf all -d 3 -fs rdn',
}
if proxy:
cmd_map['gau'] += f' --proxy "{proxy}"'
@@ -1811,7 +1811,7 @@ def fetch_url(self, urls=[], ctx={}, description=None):
if custom_header:
cmd_map['gospider'] += generate_header_param(custom_header, 'gospider')
cmd_map['hakrawler'] += generate_header_param(custom_header, 'hakrawler')
- cmd_map['katana'] += generate_header_param(custom_header, 'katana')
+ cmd_map['katana'] += generate_header_param(custom_header, 'common')
cat_input = f'cat {input_path}'
grep_output = f'grep -Eo {host_regex}'
cmd_map = {
From 0bfd0b35c63251e15d45641cc6a46072024ac2da Mon Sep 17 00:00:00 2001
From: psyray
Date: Fri, 31 May 2024 18:33:37 +0200
Subject: [PATCH 5/6] feat(scan): remove useless katana header format
---
web/reNgine/common_func.py | 1 -
1 file changed, 1 deletion(-)
diff --git a/web/reNgine/common_func.py b/web/reNgine/common_func.py
index dcbd497c..82aefdc6 100644
--- a/web/reNgine/common_func.py
+++ b/web/reNgine/common_func.py
@@ -1003,7 +1003,6 @@ def generate_header_param(custom_header, tool_name=None):
'common': ' '.join([f' -H "{header}"' for header in common_headers]),
'dalfox': ' '.join([f' -H "{header}"' for header in colon_headers]),
'hakrawler': f' -h "{semi_colon_headers}"',
- 'katana': f' -H "{semi_colon_headers}"',
'gospider': generate_gospider_params(custom_header),
}
From 5493db1862d24bd2be466820ec93219b8250ce49 Mon Sep 17 00:00:00 2001
From: psyray
Date: Fri, 31 May 2024 18:41:28 +0200
Subject: [PATCH 6/6] feat(scan): update default yam config with custom header
---
default_yaml_config.yaml | 12 ++++++++----
1 file changed, 8 insertions(+), 4 deletions(-)
diff --git a/default_yaml_config.yaml b/default_yaml_config.yaml
index 5f032690..7f8ee078 100644
--- a/default_yaml_config.yaml
+++ b/default_yaml_config.yaml
@@ -3,7 +3,8 @@
# Custom header - FFUF, Nuclei, Dalfox, CRL Fuzz, HTTPx, Fetch URL (Hakrawler, Katana, Gospider)
# custom_header: {
# 'Cookie':'Test',
-# 'User-Agent': 'Mozilla/5.0'
+# 'User-Agent': 'Mozilla/5.0',
+# 'Custom-Header': 'My custom header'
# }
# 'user_agent': '' # Dalfox only
# 'enable_http_crawl': true # All tools
@@ -74,7 +75,8 @@ osint: {
dir_file_fuzz: {
# 'custom_header': {
# 'Cookie':'Test',
- # 'User-Agent': 'Mozilla/5.0'
+ # 'User-Agent': 'Mozilla/5.0',
+ # 'Custom-Header': 'My custom header'
# },
'auto_calibration': true,
'enable_http_crawl': true,
@@ -92,7 +94,8 @@ dir_file_fuzz: {
fetch_url: {
# 'custom_header': {
# 'Cookie':'Test',
- # 'User-Agent': 'Mozilla/5.0'
+ # 'User-Agent': 'Mozilla/5.0',
+ # 'Custom-Header': 'My custom header'
# },
'uses_tools': ['gospider', 'hakrawler', 'waybackurls', 'katana', 'gau'],
'remove_duplicate_endpoints': true,
@@ -106,7 +109,8 @@ fetch_url: {
vulnerability_scan: {
# 'custom_header': {
# 'Cookie':'Test',
- # 'User-Agent': 'Mozilla/5.0'
+ # 'User-Agent': 'Mozilla/5.0',
+ # 'Custom-Header': 'My custom header'
# },
'run_nuclei': true,
'run_dalfox': false,