Skip to content

Commit 417e0a6

Browse files
committed
env and new docker added
1 parent 5703e42 commit 417e0a6

36 files changed

+658
-1247
lines changed

.env

Lines changed: 1 addition & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1 @@
1+
API_KEY = ''

.pre-commit-config.yaml

Lines changed: 41 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
# Borrowed initially from https://github.com/lyft/cartography
2+
default_language_version:
3+
# force all unspecified python hooks to run python3
4+
python: python3
5+
repos:
6+
- repo: https://github.com/pre-commit/pre-commit-hooks
7+
rev: v4.0.1
8+
hooks:
9+
- id: check-executables-have-shebangs
10+
- id: check-merge-conflict
11+
- id: check-vcs-permalinks
12+
# Disabling this as it is giving false positives for sam templates
13+
# - id: check-yaml
14+
# args: ['--unsafe'] # Just check the syntax
15+
- id: debug-statements
16+
- id: end-of-file-fixer
17+
- id: trailing-whitespace
18+
- repo: https://github.com/PyCQA/flake8
19+
rev: 3.9.2
20+
hooks:
21+
- id: flake8
22+
- repo: https://github.com/pre-commit/mirrors-autopep8
23+
rev: v1.5.7
24+
hooks:
25+
- id: autopep8
26+
# disable a few rewrites which will cause autopep8 to reflow
27+
args: [--in-place, '--ignore=E265,E501,W504']
28+
- repo: https://github.com/asottile/reorder_python_imports
29+
rev: v2.6.0
30+
hooks:
31+
- id: reorder-python-imports
32+
args: [--py3-plus]
33+
- repo: https://github.com/pre-commit/mirrors-mypy
34+
rev: v1.0.0
35+
hooks:
36+
- id: mypy
37+
exclude: ^pb/
38+
additional_dependencies:
39+
- types-requests
40+
- types-PyYAML
41+
- types-python-dateutil

.vscode/settings.json

Lines changed: 6 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,6 @@
1+
{
2+
"[python]": {
3+
"editor.defaultFormatter": "ms-python.autopep8"
4+
},
5+
"python.formatting.provider": "none"
6+
}

Dockerfile

Lines changed: 3 additions & 1 deletion
Original file line numberDiff line numberDiff line change
@@ -24,4 +24,6 @@ RUN apt update && apt upgrade -y
2424
RUN apt install nmap -y
2525
RUN pip install -r requirements.txt
2626

27-
CMD [ "python", "./app.py" ]
27+
ENV OPENAI_API_KEY=''
28+
29+
CMD [ "sh", "-c","python ./app.py ${OPENAI_API_KEY}" ]

Makefile

Lines changed: 2 additions & 0 deletions
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,2 @@
1+
lint:
2+
pre-commit run --all-files --show-diff-on-failure

README.md

Lines changed: 8 additions & 6 deletions
Original file line numberDiff line numberDiff line change
@@ -45,28 +45,29 @@ This is a implementation for our college PCL project which is still under develo
4545
#### Auth and User management
4646

4747
```text
48-
GET /register/<int:user_id>/<string:password>
48+
GET /register/<int:user_id>/<string:password>/<string:unique_key>
4949
```
5050
| Parameter | Type | Description |
5151
| :-------- | :------- | :------------------------- |
5252
|`ID`|`Int`|user ID|
5353
|`Passwd`| `String`| User Passwd|
54+
|`Unique_Key`| `String`| User Unique_Key|
5455

5556
## Improvements
5657
Added GPT functionality with chunking module.
5758
The methodology is based on how `Langchain GPT embeddings` operate. Basically the operation goes like this:
5859

5960
```text
60-
Data -> Chunks_generator ─┐ ┌─> AI_Loop -> Data_Extraction -> Return_Data
61-
├─> Chunk1 ─┤
62-
├─> Chunk2 ─┤
61+
Data -> Chunks_generator ─┐ ┌─> AI_Loop -> Data_Extraction -> Return_Dat
62+
(GPT3 - 1500 TOKENS) ├─> Chunk1 ─┤
63+
(GPT4 - 3500 TOKENS) ├─> Chunk2 ─┤
6364
├─> Chunk3 ─┤
6465
└─> Chunk N ─┘
6566
```
6667

6768
AI code:
6869
```python
69-
def AI(analize: str) -> dict[str, any]:
70+
def AI(analyze: str) -> dict[str, any]:
7071
# Prompt about what the query is all about
7172
prompt = f"""
7273
Do a vulnerability analysis report on the following JSON data and
@@ -113,4 +114,5 @@ def AI(analize: str) -> dict[str, any]:
113114
return ai_output
114115
```
115116

116-
**Default_Key**: **e43d4c3b79**
117+
#### Default User Keys
118+
**Default_Key**: **cff649285012c6caae4d**

__pycache__/app.cpython-310.pyc

601 Bytes
Binary file not shown.
4.65 KB
Binary file not shown.

app.py

Lines changed: 61 additions & 39 deletions
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,23 @@
1-
import nmap
2-
import sqlite3
3-
import re
4-
import openai
5-
import os
61
import hashlib
72
import json
8-
from flask import Flask, render_template
9-
from flask_restful import Api, Resource
3+
import os
4+
import re
5+
import sqlite3
6+
from typing import Any
7+
from typing import Callable
8+
from typing import cast
109

11-
openai.api_key = "__API__KEY__"
10+
import nmap
11+
import openai
12+
13+
from dotenv import load_dotenv
14+
from flask import Flask
15+
from flask import render_template
16+
from flask_restful import Api
17+
from flask_restful import Resource
18+
19+
load_dotenv()
20+
openai.api_key = os.getenv('API_KEY')
1221
model_engine = "text-davinci-003"
1322

1423
app = Flask(__name__)
@@ -19,36 +28,44 @@
1928

2029
# Index and Docx page
2130
@app.route('/', methods=['GET'])
22-
def home() -> any:
31+
def home() -> Any:
2332
return render_template("index.html")
2433

2534

2635
@app.route('/doc', methods=['GET'])
27-
def doc() -> any:
36+
def doc() -> Any:
2837
return render_template("doc.html")
2938

3039

31-
@app.route('/register/<int:user_id>/<string:password>')
32-
def store_auth_key(user_id: int, password: str) -> str:
40+
@app.route('/register/<int:user_id>/<string:password>/<string:unique_key>')
41+
def store_auth_key(user_id: int, password: str, unique_key: str) -> str:
3342
sanitized_username = user_id
3443
sanitized_passwd = password
35-
# Hash the user's ID and password together
44+
sanitized_key = unique_key
45+
# Hash the user's ID, password, and unique key together
3646
hash = hashlib.sha256()
3747
hash.update(str(sanitized_username).encode('utf-8'))
3848
hash.update(sanitized_passwd.encode('utf-8'))
49+
hash.update(sanitized_key.encode('utf-8'))
3950
# Use the hash to generate the auth key
40-
auth_key = hash.hexdigest()[:20] # Get the first 10 characters
51+
auth_key = hash.hexdigest()[:20] # Get the first 20 characters
4152
db_file = 'auth_keys.db'
4253
need_create_table = not os.path.exists(db_file)
4354
conn = sqlite3.connect(db_file)
4455
cursor = conn.cursor()
4556
if need_create_table:
4657
cursor.execute('''CREATE TABLE auth_keys
4758
(user_id INT PRIMARY KEY NOT NULL,
48-
auth_key TEXT NOT NULL);''')
59+
auth_key TEXT NOT NULL,
60+
unique_key TEXT NOT NULL);''')
61+
query = (
62+
"INSERT INTO auth_keys "
63+
"(user_id, auth_key, unique_key) "
64+
"VALUES (?, ?, ?)"
65+
)
4966
cursor.execute(
50-
"INSERT INTO auth_keys (user_id, auth_key) VALUES (?, ?)",
51-
(sanitized_passwd, auth_key)
67+
query,
68+
(sanitized_username, auth_key, sanitized_key)
5269
)
5370

5471
conn.commit()
@@ -57,23 +74,25 @@ def store_auth_key(user_id: int, password: str) -> str:
5774
return auth_key
5875

5976

77+
def to_int(s: str) -> int:
78+
return int(s)
79+
80+
6081
def sanitize(input_string: str) -> str:
6182
if not re.match("^[a-zA-Z0-9]*$", input_string):
6283
raise ValueError("Invalid characters in string")
6384
else:
6485
return input_string
6586

6687

67-
def chunk_output(
68-
scan_output: str, max_token_size: int
69-
) -> list[dict[str, any]]:
70-
scan_output_dict = json.loads(scan_output)
88+
def chunk_output(scan_output: dict,
89+
max_token_size: int) -> list[dict[str, Any]]:
7190
output_chunks = []
7291
current_chunk = {}
7392
current_token_count = 0
7493

7594
# Convert JSON to AI usable chunks
76-
for ip, scan_data in scan_output_dict.items():
95+
for ip, scan_data in scan_output.items():
7796
new_data_token_count = len(json.dumps({ip: scan_data}).split())
7897

7998
if current_token_count + new_data_token_count <= max_token_size:
@@ -90,7 +109,7 @@ def chunk_output(
90109
return output_chunks
91110

92111

93-
def AI(analize: str) -> dict[str, any]:
112+
def AI(analize: str) -> dict[str, Any]:
94113
# Prompt about what the query is all about
95114
prompt = f"""
96115
Do a vulnerability analysis report on the following JSON data and
@@ -118,7 +137,7 @@ def AI(analize: str) -> dict[str, any]:
118137
n=1,
119138
stop=None,
120139
)
121-
response = completion.choices[0].text
140+
response = completion.choices[0]['text']
122141

123142
# Assuming extract_ai_output returns a dictionary
124143
extracted_data = extract_ai_output(response)
@@ -152,7 +171,7 @@ def authenticate(auth_key: str) -> bool:
152171
return False
153172

154173

155-
def extract_ai_output(ai_output: str) -> dict[str, any]:
174+
def extract_ai_output(ai_output: str) -> dict[str, Any]:
156175
result = {
157176
"open_ports": [],
158177
"closed_ports": [],
@@ -169,13 +188,16 @@ def extract_ai_output(ai_output: str) -> dict[str, any]:
169188
# If found, convert string of ports to list
170189
if open_ports_match:
171190
result["open_ports"] = list(
172-
map(int, open_ports_match.group(1).split(',')))
191+
map(cast(Callable[[Any], str], int),
192+
open_ports_match.group(1).split(',')))
173193
if closed_ports_match:
174194
result["closed_ports"] = list(
175-
map(int, closed_ports_match.group(1).split(',')))
195+
map(cast(Callable[[Any], str], int),
196+
closed_ports_match.group(1).split(',')))
176197
if filtered_ports_match:
177198
result["filtered_ports"] = list(
178-
map(int, filtered_ports_match.group(1).split(',')))
199+
map(cast(Callable[[Any], str], int),
200+
filtered_ports_match.group(1).split(',')))
179201

180202
# Match and extract criticality score
181203
criticality_score_match = re.search(
@@ -186,24 +208,23 @@ def extract_ai_output(ai_output: str) -> dict[str, any]:
186208
return result
187209

188210

189-
def profile(auth: str, url: str, argument: str) -> dict[str, any]:
211+
def profile(auth: str, url: str, argument: str) -> dict[str, Any]:
190212
ip = url
191213
# Nmap Execution command
192214
usernamecheck = authenticate(auth)
193215
if usernamecheck is False:
194-
return [{"error": "passwd or username error"}]
216+
return {"error": "passwd or username error"}
195217
else:
196218
nm.scan('{}'.format(ip), arguments='{}'.format(argument))
197219
scan_data = nm.analyse_nmap_xml_scan()
198-
analize = scan_data["scan"]
199-
# chunk_data = str(chunk_output(analize, 500))
200-
# all_outputs = []
201-
# for chunks in chunk_data:
202-
# string_chunks = str(chunks)
203-
# data = AI(string_chunks)
204-
# all_outputs.append(data)
205-
# return json.dumps(all_outputs)
206-
return analize
220+
analyze = scan_data["scan"]
221+
chunk_data = str(chunk_output(analyze, 500))
222+
all_outputs = []
223+
for chunks in chunk_data:
224+
string_chunks = str(chunks)
225+
data = AI(string_chunks)
226+
all_outputs.append(data)
227+
return json.dumps(all_outputs)
207228

208229

209230
# Effective Scan
@@ -257,5 +278,6 @@ def get(self, auth, url):
257278
api.add_resource(
258279
p5, "/api/p5/<string:auth>/<string:url>")
259280

281+
260282
if __name__ == '__main__':
261283
app.run(host="0.0.0.0", port="80")

auth_keys.db

0 Bytes
Binary file not shown.

0 commit comments

Comments
 (0)