Skip to content
This repository was archived by the owner on Jan 13, 2025. It is now read-only.

Commit 3f4e3fe

Browse files
authored
revert: Revert release 0.4.1 (#44) (#45)
Signed-off-by: hayk96 <hayko5999@gmail.com>
1 parent 16a5a82 commit 3f4e3fe

File tree

13 files changed

+24
-631
lines changed

13 files changed

+24
-631
lines changed

CHANGELOG.md

-9
Original file line numberDiff line numberDiff line change
@@ -1,14 +1,5 @@
11
# Changelog
22

3-
## 0.4.1 / 2024-06-30
4-
5-
* [ENHANCEMENT] Added a new web page for reports. This page allows exporting Prometheus data in various formats directly from the web UI. #43
6-
* [ENHANCEMENT] Added functionality to change the timestamp format while exporting data via the /export API. Previously, the default value was Unix timestamp. Now, you can choose from the following options: iso8601, rfc2822, rfc3339, friendly, and unix (default). #41
7-
* [ENHANCEMENT] Added a new feature that allows replacing Prometheus labels (fields) in the final dataset: CSV, JSON, etc. #39
8-
* [ENHANCEMENT] Added support for exporting files in multiple formats via the /export API. Supported formats include: CSV, YAML (or YML), JSON, and JSON Lines (or NDJSON). E.g., ?format=csv|yaml|yml|json|ndjson|jsonlines. #37
9-
* [ENHANCEMENT] Improved the functionality that generates CSV files to ensure they have unique names instead of static names, resolving issues with responses getting mixed up between users. #35
10-
* [BUGFIX] Fixed exception handling for replace_fields in the /export API. #43
11-
123
## 0.4.0 / 2024-06-23
134

145
* [ENHANCEMENT] Added a new API endpoint: `/export` for exporting data from Prometheus as a CSV file. This feature allows users to export data from Prometheus easily.

src/api/v1/endpoints/export.py

+8-12
Original file line numberDiff line numberDiff line change
@@ -64,15 +64,12 @@ async def export(
6464
Body(
6565
openapi_examples=ExportData._request_body_examples,
6666
)
67-
],
68-
format: str = "csv"
67+
]
6968
):
7069
data = data.dict()
70+
filename = "data.csv"
7171
expr, start = data.get("expr"), data.get("start")
7272
end, step = data.get("end"), data.get("step")
73-
file, file_format = None, format.lower()
74-
custom_fields, timestamp_format = data.get(
75-
"replace_fields"), data.get("timestamp_format")
7673
validation_status, response.status_code, sts, msg = exp.validate_request(
7774
"export.json", data)
7875
if validation_status:
@@ -82,10 +79,9 @@ async def export(
8279
query=expr, start=start,
8380
end=end, step=step)
8481
if resp_status:
85-
labels, data_processed = exp.data_processor(
86-
source_data=resp_data, custom_fields=custom_fields, timestamp_format=timestamp_format)
87-
file_generator_status, sts, response.status_code, file, msg = exp.file_generator(
88-
file_format=file_format, data=data_processed, fields=labels)
82+
labels, data_processed = exp.data_processor(source_data=resp_data)
83+
csv_generator_status, sts, msg = exp.csv_generator(
84+
data=data_processed, fields=labels, filename=filename)
8985
else:
9086
sts, msg = resp_data.get("status"), resp_data.get("error")
9187

@@ -95,8 +91,8 @@ async def export(
9591
"status": response.status_code,
9692
"query": expr,
9793
"method": request.method,
98-
"request_path": f"{request.url.path}{'?' + request.url.query if request.url.query else ''}"})
94+
"request_path": request.url.path})
9995
if sts == "success":
100-
return FileResponse(path=file,
101-
background=BackgroundTask(exp.cleanup_files, file))
96+
return FileResponse(path=filename,
97+
background=BackgroundTask(exp.cleanup_files, filename))
10298
return {"status": sts, "query": expr, "message": msg}

src/api/v1/endpoints/web.py

+1-24
Original file line numberDiff line numberDiff line change
@@ -10,7 +10,6 @@
1010
if arg_parser().get("web.enable_ui") == "true":
1111
rules_management = "ui/rules-management"
1212
metrics_management = "ui/metrics-management"
13-
reports = "ui/reports"
1413
logger.info("Starting web management UI")
1514

1615
@router.get("/", response_class=HTMLResponse,
@@ -42,7 +41,7 @@ async def rules_management_files(path, request: Request):
4241
return f"{sts} {msg}"
4342

4443
@router.get("/metrics-management",
45-
description="Renders metrics management HTML page of this application",
44+
description="RRenders metrics management HTML page of this application",
4645
include_in_schema=False)
4746
async def metrics_management_page():
4847
return FileResponse(f"{metrics_management}/index.html")
@@ -62,25 +61,3 @@ async def metrics_management_files(path, request: Request):
6261
"method": request.method,
6362
"request_path": request.url.path})
6463
return f"{sts} {msg}"
65-
66-
@router.get("/reports",
67-
description="Renders Reports HTML page of this application",
68-
include_in_schema=False)
69-
async def reports_page():
70-
return FileResponse(f"{reports}/index.html")
71-
72-
@router.get(
73-
"/reports/{path}",
74-
description="Returns JavaScript and CSS files of the Reports",
75-
include_in_schema=False)
76-
async def reports_files(path, request: Request):
77-
if path in ["script.js", "style.css"]:
78-
return FileResponse(f"{reports}/{path}")
79-
sts, msg = "404", "Not Found"
80-
logger.info(
81-
msg=msg,
82-
extra={
83-
"status": sts,
84-
"method": request.method,
85-
"request_path": request.url.path})
86-
return f"{sts} {msg}"

src/core/export.py

+13-69
Original file line numberDiff line numberDiff line change
@@ -1,11 +1,7 @@
11
from jsonschema import validate, exceptions
22
from src.utils.arguments import arg_parser
3-
from email.utils import formatdate
4-
from datetime import datetime
5-
from uuid import uuid4
63
import requests
74
import json
8-
import yaml
95
import copy
106
import csv
117
import os
@@ -35,41 +31,7 @@ def prom_query(query, range_query=False, start="0", end="0",
3531
return True if r.status_code == 200 else False, r.status_code, r.json()
3632

3733

38-
def replace_fields(data, custom_fields) -> None:
39-
"""
40-
This function replaces (renames) the
41-
final Prometheus labels (fields) based
42-
on the 'replace_fields' object.
43-
"""
44-
for source_field, target_field in custom_fields.items():
45-
try:
46-
if isinstance(data, list):
47-
data[data.index(source_field)] = target_field
48-
elif isinstance(data, dict):
49-
data[target_field] = data.pop(source_field)
50-
except (ValueError, KeyError):
51-
pass
52-
53-
54-
def format_timestamp(timestamp, fmt) -> str:
55-
"""
56-
This function converts Unix timestamps
57-
to several common time formats.
58-
"""
59-
timestamp_formats = {
60-
"unix": timestamp,
61-
"rfc2822": formatdate(timestamp, localtime=True),
62-
"iso8601": datetime.fromtimestamp(timestamp).isoformat(),
63-
"rfc3339": datetime.fromtimestamp(timestamp).astimezone().isoformat(timespec='milliseconds'),
64-
"friendly": datetime.fromtimestamp(timestamp).strftime('%A, %B %d, %Y %I:%M:%S %p')
65-
}
66-
67-
return timestamp_formats[fmt]
68-
69-
70-
def data_processor(source_data: dict,
71-
custom_fields: dict,
72-
timestamp_format: str) -> tuple[list, list]:
34+
def data_processor(source_data: dict) -> tuple[list, list]:
7335
"""
7436
This function preprocesses the results
7537
of the Prometheus query for future formatting.
@@ -85,10 +47,8 @@ def vector_processor():
8547
ts_labels = set(ts["metric"].keys())
8648
unique_labels.update(ts_labels)
8749
series = ts["metric"]
88-
series["timestamp"] = format_timestamp(
89-
ts["value"][0], timestamp_format)
50+
series["timestamp"] = ts["value"][0]
9051
series["value"] = ts["value"][1]
91-
replace_fields(series, custom_fields)
9252
data_processed.append(series)
9353

9454
def matrix_processor():
@@ -98,10 +58,8 @@ def matrix_processor():
9858
series = ts["metric"]
9959
for idx in range(len(ts["values"])):
10060
series_nested = copy.deepcopy(series)
101-
series_nested["timestamp"] = format_timestamp(
102-
ts["values"][idx][0], timestamp_format)
61+
series_nested["timestamp"] = ts["values"][idx][0]
10362
series_nested["value"] = ts["values"][idx][1]
104-
replace_fields(series_nested, custom_fields)
10563
data_processed.append(series_nested)
10664
del series_nested
10765

@@ -112,7 +70,6 @@ def matrix_processor():
11270

11371
unique_labels = sorted(unique_labels)
11472
unique_labels.extend(["timestamp", "value"])
115-
replace_fields(unique_labels, custom_fields)
11673
return unique_labels, data_processed
11774

11875

@@ -145,31 +102,18 @@ def cleanup_files(file) -> tuple[True, str]:
145102
return True, "File has been removed successfully"
146103

147104

148-
def file_generator(file_format, data, fields):
105+
def csv_generator(data, fields, filename) -> tuple[bool, str, str]:
149106
"""
150-
This function generates a file depending
151-
on the provided file format/extension
107+
This function generates a CSV file
108+
based on the provided objects.
152109
"""
153-
154-
file_path = f"/tmp/{str(uuid4())}.{file_format}"
155110
try:
156-
with open(file_path, 'w') as f:
157-
if file_format == "csv":
158-
writer = csv.DictWriter(
159-
f, fieldnames=fields, extrasaction='ignore')
160-
writer.writeheader()
161-
writer.writerows(data)
162-
elif file_format in ["yml", "yaml"]:
163-
f.write(yaml.dump(data))
164-
elif file_format == "json":
165-
f.write(json.dumps(data))
166-
elif file_format in ["ndjson", "jsonlines"]:
167-
for i in data:
168-
f.write(f"{json.dumps(i)}\n")
169-
else:
170-
cleanup_files(file_path)
171-
return False, "error", 400, "", f"Unsupported file format '{file_format}'"
111+
with open(filename, 'w') as csvfile:
112+
writer = csv.DictWriter(
113+
csvfile, fieldnames=fields, extrasaction='ignore')
114+
writer.writeheader()
115+
writer.writerows(data)
172116
except BaseException as e:
173-
return False, "error", 500, "", str(e)
117+
return False, "error", str(e)
174118
else:
175-
return True, "success", 200, file_path, f"{file_format.upper()} file has been generated successfully"
119+
return True, "success", "CSV file has been generated successfully"

src/models/export.py

+1-28
Original file line numberDiff line numberDiff line change
@@ -7,41 +7,14 @@ class ExportData(BaseModel, extra=Extra.allow):
77
start: Optional[str] = None
88
end: Optional[str] = None
99
step: Optional[str] = None
10-
timestamp_format: Optional[str] = "unix"
11-
replace_fields: Optional[dict] = dict()
1210
_request_body_examples = {
13-
"User logins per hour in a day": {
11+
"Count of successful logins by users per hour in a day": {
1412
"description": "Count of successful logins by users per hour in a day",
1513
"value": {
1614
"expr": "users_login_count{status='success'}",
1715
"start": "2024-01-30T00:00:00Z",
1816
"end": "2024-01-31T23:59:59Z",
1917
"step": "1h"
2018
}
21-
},
22-
"User logins per hour in a day with a user-friendly time format": {
23-
"description": "Count of successful user logins per hour in a day with a user-friendly time format",
24-
"value": {
25-
"expr": "users_login_count{status='success'}",
26-
"start": "2024-01-30T00:00:00Z",
27-
"end": "2024-01-31T23:59:59Z",
28-
"step": "1h",
29-
"timestamp_format": "friendly"
30-
}
31-
},
32-
"User logins per hour with friendly time format and custom fields": {
33-
"description": "Count of successful user logins per hour in a day "
34-
"with a user-friendly time format and custom fields",
35-
"value": {
36-
"expr": "users_login_count{status='success'}",
37-
"start": "2024-01-30T00:00:00Z",
38-
"end": "2024-01-31T23:59:59Z",
39-
"step": "1h",
40-
"timestamp_format": "friendly",
41-
"replace_fields": {
42-
"__name__": "Name",
43-
"timestamp": "Time"
44-
}
45-
}
4619
}
4720
}

src/schemas/export.json

-7
Original file line numberDiff line numberDiff line change
@@ -16,13 +16,6 @@
1616
"step": {
1717
"type": ["string", "null"],
1818
"pattern": "^((([0-9]+)y)?(([0-9]+)w)?(([0-9]+)d)?(([0-9]+)h)?(([0-9]+)m)?(([0-9]+)s)?(([0-9]+)ms)?|0)$"
19-
},
20-
"timestamp_format": {
21-
"type": ["string"],
22-
"pattern": "^(unix|iso8601|rfc2822|rfc3339|friendly)$"
23-
},
24-
"replace_fields": {
25-
"type": "object"
2619
}
2720
},
2821
"required": ["expr"],

src/utils/openapi.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def openapi(app: FastAPI):
1616
"providing additional features and addressing its limitations. "
1717
"Running as a sidecar alongside the Prometheus server enables "
1818
"users to extend the capabilities of the API.",
19-
version="0.4.1",
19+
version="0.4.0",
2020
contact={
2121
"name": "Hayk Davtyan",
2222
"url": "https://hayk96.github.io",

ui/homepage/index.html

-5
Original file line numberDiff line numberDiff line change
@@ -117,7 +117,6 @@
117117
color: #ffffff;
118118
background-image: linear-gradient(45deg, #f6d365, #fda085);
119119
box-shadow: 0 3px 6px rgba(0, 0, 0, 0.16);
120-
width: 200px;
121120
}
122121
button:hover {
123122
animation: buttonPulse 0.5s ease;
@@ -171,7 +170,6 @@ <h1>The easiest Prometheus management interface</h1>
171170
<button id="openPrometheusButton">Open Prometheus</button>
172171
<button id="rulesManagementButton">Rules Management</button>
173172
<button id="metricsManagementButton">Metrics Management</button>
174-
<button id="reportsButton">Reports</button>
175173
</div>
176174
<script>
177175
document.addEventListener('DOMContentLoaded', function() {
@@ -185,9 +183,6 @@ <h1>The easiest Prometheus management interface</h1>
185183
document.getElementById('metricsManagementButton').onclick = function() {
186184
window.location.href = window.location.origin + '/metrics-management';
187185
};
188-
document.getElementById('reportsButton').onclick = function() {
189-
window.location.href = window.location.origin + '/reports';
190-
};
191186
});
192187
</script>
193188
</body>

ui/metrics-management/index.html

-4
Large diffs are not rendered by default.

0 commit comments

Comments
 (0)