Skip to content

Commit

Permalink
2.2.7 build
Browse files Browse the repository at this point in the history
  • Loading branch information
s0md3v committed Nov 3, 2024
1 parent 93c6c96 commit 1b11c35
Show file tree
Hide file tree
Showing 15 changed files with 139 additions and 26 deletions.
6 changes: 6 additions & 0 deletions CHANGELOG.md
Original file line number Diff line number Diff line change
@@ -1,3 +1,9 @@
#### 2.2.7
- Added `--casing` option for casing style enforcement
- Added `--ratelimit` option for explicitly defining requests/second
- Fixed "decrease chunk size/use --stable" type errors in some cases
- Fixed a bug in anamoly detection

#### 2.2.6
- Fixed Arjun getting infinitely stuck on some webpages

Expand Down
2 changes: 1 addition & 1 deletion arjun/__init__.py
Original file line number Diff line number Diff line change
@@ -1 +1 @@
__version__ = '2.2.6'
__version__ = '2.2.7'
26 changes: 17 additions & 9 deletions arjun/__main__.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,6 +13,7 @@
from arjun.core.utils import fetch_params, stable_request, random_str, slicer, confirm, populate, reader, nullify, prepare_requests, compatible_path

from arjun.plugins.heuristic import heuristic
from arjun.plugins.wl import detect_casing, covert_to_case

arjun_dir = compatible_path(mem.__file__.replace(compatible_path('/core/config.py'), ''))

Expand All @@ -25,7 +26,7 @@
parser.add_argument('-d', help='Delay between requests in seconds. (default: 0)', dest='delay', type=float, default=0)
parser.add_argument('-t', help='Number of concurrent threads. (default: 5)', dest='threads', type=int, default=5)
parser.add_argument('-w', help='Wordlist file path. (default: {arjundir}/db/large.txt)', dest='wordlist', default=arjun_dir+'/db/large.txt')
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON/HEADERS. (default: GET)', dest='method', default='GET')
parser.add_argument('-m', help='Request method to use: GET/POST/XML/JSON. (default: GET)', dest='method', default='GET')
parser.add_argument('-i', help='Import target URLs from file.', dest='import_file', nargs='?', const=True)
parser.add_argument('-T', help='HTTP request timeout in seconds. (default: 15)', dest='timeout', type=float, default=15)
parser.add_argument('-c', help='Chunk size. The number of parameters to be sent at once', type=int, dest='chunks', default=250)
Expand All @@ -36,6 +37,7 @@
parser.add_argument('--stable', help='Prefer stability over speed.', dest='stable', action='store_true')
parser.add_argument('--include', help='Include this data in every request.', dest='include', default={})
parser.add_argument('--disable-redirects', help='disable redirects', dest='disable_redirects', action='store_true')
parser.add_argument('--casing', help='casing style for params e.g. like_this, likeThis, likethis', dest='casing')
args = parser.parse_args() # arguments to be parsed

if args.quiet:
Expand Down Expand Up @@ -77,7 +79,11 @@
passive_params = fetch_params(host)
wordlist.update(passive_params)
print('%s Collected %s parameters, added to the wordlist' % (info, len(passive_params)))
wordlist = list(wordlist)
if args.casing:
delimiter, casing = detect_casing(args.casing)
wordlist = [covert_to_case(word, delimiter, casing) for word in wordlist]
else:
wordlist = list(wordlist)
except FileNotFoundError:
exit('%s The specified file for parameters doesn\'t exist' % bad)

Expand Down Expand Up @@ -118,11 +124,15 @@ def initialize(request, wordlist, single_url=False):
return 'skipped'
print('%s Probing the target for stability' % run)
request['url'] = stable_request(url, request['headers'])
mem.var['healthy_url'] = True
if not request['url']:
return 'skipped'
else:
fuzz = "z" + random_str(6)
response_1 = requester(request, {fuzz[:-1]: fuzz[::-1][:-1]})
mem.var['healthy_url'] = response_1.status_code not in (400, 413, 418, 429, 503)
if not mem.var['healthy_url']:
print('%s Target returned HTTP %i, this may cause problems.' % (bad, request.status_code))
if single_url:
print('%s Analysing HTTP response for anomalies' % run)
response_2 = requester(request, {fuzz[:-1]: fuzz[::-1][:-1]})
Expand All @@ -139,16 +149,14 @@ def initialize(request, wordlist, single_url=False):
reason = compare(response_3, factors, {zzuf[:-1]: zzuf[::-1][:-1]})[2]
if not reason:
break
factors[reason] = False
if single_url:
print('%s Analysing HTTP response for potential parameter names' % run)
factors[reason] = None
if found:
num = len(found)
if words_exist:
print('%s Heuristic scanner found %i parameters' % (good, num))
print('%s Extracted %i parameters from response for testing' % (good, num))
else:
s = 's' if num > 1 else ''
print('%s Heuristic scanner found %i parameter%s: %s' % (good, num, s, ', '.join(found)))
print('%s Extracted %i parameter%s from response for testing: %s' % (good, num, s, ', '.join(found)))
if single_url:
print('%s Logicforcing the URL endpoint' % run)
populated = populate(wordlist)
Expand Down Expand Up @@ -192,11 +200,11 @@ def main():
count = 0
for request in requests:
url = request['url']
print('%s Scanning %d/%d: %s' % (run, count, len(requests), url))
these_params = initialize(request, wordlist, single_url=is_single)
count += 1
mem.var['kill'] = False
mem.var['bad_req_count'] = 0
print('%s Scanning %d/%d: %s' % (run, count, len(requests), url))
if these_params == 'skipped':
print('%s Skipped %s due to errors' % (bad, url))
elif these_params:
Expand All @@ -205,7 +213,7 @@ def main():
final_result[url]['method'] = request['method']
final_result[url]['headers'] = request['headers']
exporter(final_result)
print('%s Parameters found: %s\n' % (good, ', '.join(final_result[url]['params'])))
print('%s Parameters found: %-4s\n' % (good, ', '.join(final_result[url]['params'])))
if not mem.var['json_file']:
final_result = {}
continue
Expand Down
2 changes: 1 addition & 1 deletion arjun/core/anomaly.py
Original file line number Diff line number Diff line change
Expand Up @@ -44,7 +44,7 @@ def define(response_1, response_2, param, value, wordlist):
elif remove_tags(body_1) == remove_tags(body_2):
factors['same_plaintext'] = remove_tags(body_1)
elif body_1 and body_2 and body_1.count('\\n') == body_2.count('\\n'):
factors['lines_diff'] = diff_map(body_1, body_2)
factors['lines_diff'] = diff_map(body_1, body_2)
if param not in response_2.text:
factors['param_missing'] = [word for word in wordlist if word in response_2.text]
if value not in response_2.text:
Expand Down
4 changes: 4 additions & 0 deletions arjun/core/error_handler.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from arjun.core.colors import bad


def connection_refused():
"""
checks if a request should be retried if the server refused connection
Expand All @@ -17,6 +18,7 @@ def connection_refused():
print('%s Target has rate limiting in place, please use --stable switch' % bad)
return 'kill'


def error_handler(response, factors):
"""
decides what to do after performing a HTTP request
Expand All @@ -26,6 +28,8 @@ def error_handler(response, factors):
returns str
"""
if type(response) != str and response.status_code in (400, 413, 418, 429, 503):
if not mem.var['healthy_url']:
return 'ok'
if response.status_code == 503:
mem.var['kill'] = True
print('%s Target is unable to process requests, try --stable switch' % bad)
Expand Down
4 changes: 4 additions & 0 deletions arjun/core/exporter.py
Original file line number Diff line number Diff line change
Expand Up @@ -6,13 +6,15 @@

from arjun.core.utils import create_query_string


def json_export(result):
"""
exports result to a file in JSON format
"""
with open(mem.var['json_file'], 'w+', encoding='utf8') as json_output:
json.dump(result, json_output, sort_keys=True, indent=4)


def burp_export(result):
"""
exports results to Burp Suite by sending request to Burp proxy
Expand All @@ -30,6 +32,7 @@ def burp_export(result):
elif data['method'] == 'JSON':
requests.post(url, json=populate(data['params']), headers=data['headers'], proxies=proxies, verify=False)


def text_export(result):
"""
exports results to a text file, one url per line
Expand All @@ -48,6 +51,7 @@ def text_export(result):
elif data['method'] == 'POST':
text_file.write(clean_url + '\t' + query_string + '\n')


def exporter(result):
"""
main exporter function that calls other export functions
Expand Down
27 changes: 15 additions & 12 deletions arjun/core/importer.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,18 @@
import re

burp_regex = re.compile(r'''(?m)^ <url><!\[CDATA\[(.+?)\]\]></url>
<host ip="[^"]*">[^<]+</host>
<port>[^<]*</port>
<protocol>[^<]*</protocol>
<method><!\[CDATA\[(.+?)\]\]></method>
<path>.*</path>
<extension>(.*)</extension>
<request base64="(?:false|true)"><!\[CDATA\[([\s\S]+?)]]></request>
<status>([^<]*)</status>
<responselength>([^<]*)</responselength>
<mimetype>([^<]*)</mimetype>''')


def reader(path, mode='string'):
"""
reads a file
Expand All @@ -11,6 +24,7 @@ def reader(path, mode='string'):
else:
return ''.join([line for line in file])


def parse_request(string):
"""
parses http request
Expand All @@ -25,6 +39,7 @@ def parse_request(string):
result['data'] = match.group(4)
return result


def parse_headers(string):
"""
parses headers
Expand All @@ -37,18 +52,6 @@ def parse_headers(string):
result[splitted[0]] = ':'.join(splitted[1:]).strip()
return result

burp_regex = re.compile(r'''(?m)^ <url><!\[CDATA\[(.+?)\]\]></url>
<host ip="[^"]*">[^<]+</host>
<port>[^<]*</port>
<protocol>[^<]*</protocol>
<method><!\[CDATA\[(.+?)\]\]></method>
<path>.*</path>
<extension>(.*)</extension>
<request base64="(?:false|true)"><!\[CDATA\[([\s\S]+?)]]></request>
<status>([^<]*)</status>
<responselength>([^<]*)</responselength>
<mimetype>([^<]*)</mimetype>''')


def burp_import(path):
"""
Expand Down
1 change: 1 addition & 0 deletions arjun/core/prompt.py
Original file line number Diff line number Diff line change
@@ -1,6 +1,7 @@
import os
import tempfile


def prompt(default=None):
"""
lets user paste input by opening a temp file in a text editor
Expand Down
1 change: 1 addition & 0 deletions arjun/core/requester.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,6 +11,7 @@

warnings.filterwarnings('ignore') # Disable SSL related warnings


@sleep_and_retry
@limits(calls=mem.var['rate_limit'], period=1)
def requester(request, payload={}):
Expand Down
5 changes: 3 additions & 2 deletions arjun/core/utils.py
Original file line number Diff line number Diff line change
Expand Up @@ -153,7 +153,7 @@ def create_query_string(params):
pair = param + '=' + random_str(4) + '&'
query_string += pair
if query_string.endswith('&'):
query_string = query_string[:-1]
query_string = query_string[:-1]
return '?' + query_string


Expand All @@ -180,6 +180,7 @@ def extract_js(response):
scripts.append(actual_parts[0])
return scripts


def parse_headers(string):
"""
parses headers
Expand Down Expand Up @@ -274,7 +275,7 @@ def prepare_requests(args):
'headers': headers,
'include': params
}
)
)
elif args.import_file:
result = importer(args.import_file, mem.var['method'], headers, args.include)
return result
Expand Down
3 changes: 2 additions & 1 deletion arjun/plugins/commoncrawl.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,9 +2,10 @@

from urllib.parse import urlparse


def commoncrawl(host, page=0):
these_params = set()
response = requests.get('http://index.commoncrawl.org/CC-MAIN-2020-29-index?url=*.%s&fl=url&page=%s&limit=10000' % (host, page), verify=False).text
response = requests.get('http://index.commoncrawl.org/CC-MAIN-2024-42-index?url=*.%s&fl=url&page=%s&limit=10000' % (host, page), verify=False).text
if response.startswith('<!DOCTYPE html>'):
return ([], False, 'commoncrawl')
urls = response.split('\n')
Expand Down
2 changes: 2 additions & 0 deletions arjun/plugins/heuristic.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,9 +11,11 @@
re_empty_vars = re.compile(r'''(?:[;\n]|\bvar|\blet)(\w+)\s*=\s*(?:['"`]{1,2}|true|false|null)''')
re_map_keys = re.compile(r'''['"](\w+?)['"]\s*:\s*['"`]''')


def is_not_junk(param):
return (re_not_junk.match(param) is not None)


def heuristic(raw_response, wordlist):
words_exist = False
potential_params = []
Expand Down
1 change: 1 addition & 0 deletions arjun/plugins/otx.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from urllib.parse import urlparse


def otx(host, page):
these_params = set()
data = requests.get('https://otx.alienvault.com/api/v1/indicators/hostname/%s/url_list?limit=50&page=%d' % (host, page+1), verify=False).json()
Expand Down
1 change: 1 addition & 0 deletions arjun/plugins/wayback.py
Original file line number Diff line number Diff line change
Expand Up @@ -2,6 +2,7 @@

from urllib.parse import urlparse


def wayback(host, page):
payload = {
'url': host,
Expand Down
80 changes: 80 additions & 0 deletions arjun/plugins/wl.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,80 @@
def detect_casing(string):
"""Detect the casing style and delimiter of given string."""
delimiter = ""
casing = ""

if string.islower():
casing = "l"
elif string.isupper():
casing = "u"
else:
casing = casing = "c" if string[0].islower() else "p"

if "-" in string:
delimiter = "-"
elif "_" in string:
delimiter = "_"
elif "." in string:
delimiter = "."

return delimiter, casing


def transform(parts, delimiter, casing):
"""Combine list of strings to form a string with given casing style."""
if len(parts) == 1:
if casing == "l":
return parts[0].lower()
elif casing == "u":
return parts[0].upper()
return parts[0]

result = []
for i, part in enumerate(parts):
if casing == "l":
transformed = part.lower()
elif casing == "u":
transformed = part.upper()
elif casing == "c":
if i == 0:
transformed = part.lower()
else:
transformed = part.lower().title()
else: # casing == "p"
transformed = part.lower().title()

result.append(transformed)

return delimiter.join(result)


def handle(text):
"""Break down a string into array of 'words'."""
if "-" in text:
return text.split("-")
elif "_" in text:
return text.split("_")
elif "." in text:
return text.split(".")

if not text.islower() and not text.isupper():
parts = []
temp = ""
for char in text:
if not char.isupper():
temp += char
else:
if temp:
parts.append(temp)
temp = char
if temp:
parts.append(temp)
return parts

return [text]


def covert_to_case(string, delimiter, casing):
"""Process input stream and write transformed text to output stream."""
parts = handle(string)
return transform(parts, delimiter, casing)

0 comments on commit 1b11c35

Please sign in to comment.