Skip to content
This repository was archived by the owner on Jan 13, 2025. It is now read-only.

Commit 78c1909

Browse files
authored
feat(api): Add a new API for exporting data as a CSV (#33)
* feature(api): Add a new API for generating CSV files Signed-off-by: hayk96 <hayko5999@gmail.com> * chore(api): Bump app version #minor Signed-off-by: hayk96 <hayko5999@gmail.com> --------- Signed-off-by: hayk96 <hayko5999@gmail.com>
1 parent 8e30efb commit 78c1909

File tree

7 files changed

+286
-2
lines changed

7 files changed

+286
-2
lines changed

CHANGELOG.md

+5
Original file line numberDiff line numberDiff line change
@@ -1,5 +1,10 @@
11
# Changelog
22

3+
## 0.4.0 / 2024-06-23
4+
5+
* [ENHANCEMENT] Added a new API endpoint: `/export` for exporting data from Prometheus as a CSV file. This feature allows users to export data from Prometheus easily.
6+
It supports both instant queries and range queries. More details can be found in the [API documentation](https://hayk96.github.io/prometheus-api/). #33
7+
38
## 0.3.3 / 2024-06-16
49

510
* [ENHANCEMENT] Added a new endpoint: `/metrics-lifecycle-policies/trigger` for force-triggering all Metrics Lifecycle Policies. #29

src/api/v1/api.py

+2-1
Original file line numberDiff line numberDiff line change
@@ -1,8 +1,9 @@
1-
from .. v1.endpoints import reverse_proxy, rules, policies, web, health
1+
from .. v1.endpoints import reverse_proxy, rules, policies, web, health, export
22
from fastapi import APIRouter
33

44
api_router = APIRouter()
55
api_router.include_router(rules.router, prefix="/api/v1")
6+
api_router.include_router(export.router, prefix="/api/v1")
67
api_router.include_router(policies.router, prefix="/api/v1")
78
api_router.include_router(web.router, prefix="")
89
api_router.include_router(health.router, prefix="")

src/api/v1/endpoints/export.py

+98
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,98 @@
1+
from fastapi import APIRouter, Response, Request, Body, status
2+
from starlette.background import BackgroundTask
3+
from fastapi.responses import FileResponse
4+
from src.models.export import ExportData
5+
from src.core import export as exp
6+
from src.utils.log import logger
7+
from typing import Annotated
8+
9+
router = APIRouter()
10+
11+
12+
@router.post("/export",
13+
name="Export data from Prometheus",
14+
description="Exports data from Prometheus based on the provided PromQL",
15+
status_code=status.HTTP_200_OK,
16+
tags=["export"],
17+
responses={
18+
200: {
19+
"description": "OK",
20+
"content": {
21+
"text/csv; charset=utf-8": {
22+
"example": "__name__,instance,job,timestamp,value\n"
23+
"up,prometheus-api:5000,prometheus-api,1719131438.585,1\n"
24+
"up,localhost:9090,prometheus,1719131438.585,1"
25+
}
26+
}
27+
},
28+
400: {
29+
"description": "Bad Request",
30+
"content": {
31+
"application/json": {
32+
"example": [
33+
{
34+
"status": "error",
35+
"query": "sum by (instance) (prometheus_build_info",
36+
"message": "invalid parameter 'query': 1:41: parse error: unclosed left parenthesis"
37+
38+
}
39+
]
40+
}
41+
}
42+
},
43+
500: {
44+
"description": "Internal Server Error",
45+
"content": {
46+
"application/json": {
47+
"example": [
48+
{
49+
"status": "error",
50+
"query": "sum by (instance) (prometheus_build_info)",
51+
"message": "Prometheus query has failed. HTTPConnectionPool(host='localhost', port=9090)"
52+
}
53+
]
54+
}
55+
}
56+
}
57+
}
58+
)
59+
async def export(
60+
request: Request,
61+
response: FileResponse or Response,
62+
data: Annotated[
63+
ExportData,
64+
Body(
65+
openapi_examples=ExportData._request_body_examples,
66+
)
67+
]
68+
):
69+
data = data.dict()
70+
filename = "data.csv"
71+
expr, start = data.get("expr"), data.get("start")
72+
end, step = data.get("end"), data.get("step")
73+
validation_status, response.status_code, sts, msg = exp.validate_request(
74+
"export.json", data)
75+
if validation_status:
76+
range_query = True if all([start, end, step]) else False
77+
resp_status, response.status_code, resp_data = exp.prom_query(
78+
range_query=range_query,
79+
query=expr, start=start,
80+
end=end, step=step)
81+
if resp_status:
82+
labels, data_processed = exp.data_processor(source_data=resp_data)
83+
csv_generator_status, sts, msg = exp.csv_generator(
84+
data=data_processed, fields=labels, filename=filename)
85+
else:
86+
sts, msg = resp_data.get("status"), resp_data.get("error")
87+
88+
logger.info(
89+
msg=msg,
90+
extra={
91+
"status": response.status_code,
92+
"query": expr,
93+
"method": request.method,
94+
"request_path": request.url.path})
95+
if sts == "success":
96+
return FileResponse(path=filename,
97+
background=BackgroundTask(exp.cleanup_files, filename))
98+
return {"status": sts, "query": expr, "message": msg}

src/core/export.py

+119
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,119 @@
1+
from jsonschema import validate, exceptions
2+
from src.utils.arguments import arg_parser
3+
import requests
4+
import json
5+
import copy
6+
import csv
7+
import os
8+
9+
prom_addr = arg_parser().get("prom.addr")
10+
11+
12+
def prom_query(query, range_query=False, start="0", end="0",
13+
step="0", url=prom_addr) -> tuple[bool, int, dict]:
14+
"""
15+
This function queries data from Prometheus
16+
based on the information provided by the
17+
user and returns the data as a dictionary.
18+
"""
19+
try:
20+
r = requests.post(f"{url}/api/v1/{'query_range' if range_query else 'query'}",
21+
data={
22+
"query": query,
23+
"start": start,
24+
"end": end,
25+
"step": step},
26+
headers={"Content-Type": "application/x-www-form-urlencoded"})
27+
except BaseException as e:
28+
return False, 500, {"status": "error",
29+
"error": f"Prometheus query has failed. {e}"}
30+
else:
31+
return True if r.status_code == 200 else False, r.status_code, r.json()
32+
33+
34+
def data_processor(source_data: dict) -> tuple[list, list]:
35+
"""
36+
This function preprocesses the results
37+
of the Prometheus query for future formatting.
38+
It returns all labels of the query result
39+
and the data of each time series.
40+
"""
41+
data_raw = copy.deepcopy(source_data)
42+
data_processed, unique_labels = [], set()
43+
data_result = data_raw["data"]["result"]
44+
45+
def vector_processor():
46+
for ts in data_result:
47+
ts_labels = set(ts["metric"].keys())
48+
unique_labels.update(ts_labels)
49+
series = ts["metric"]
50+
series["timestamp"] = ts["value"][0]
51+
series["value"] = ts["value"][1]
52+
data_processed.append(series)
53+
54+
def matrix_processor():
55+
for ts in data_result:
56+
ts_labels = set(ts["metric"].keys())
57+
unique_labels.update(ts_labels)
58+
series = ts["metric"]
59+
for idx in range(len(ts["values"])):
60+
series_nested = copy.deepcopy(series)
61+
series_nested["timestamp"] = ts["values"][idx][0]
62+
series_nested["value"] = ts["values"][idx][1]
63+
data_processed.append(series_nested)
64+
del series_nested
65+
66+
if data_raw["data"]["resultType"] == "vector":
67+
vector_processor()
68+
elif data_raw["data"]["resultType"] == "matrix":
69+
matrix_processor()
70+
71+
unique_labels = sorted(unique_labels)
72+
unique_labels.extend(["timestamp", "value"])
73+
return unique_labels, data_processed
74+
75+
76+
def validate_request(schema_file, data) -> tuple[bool, int, str, str]:
77+
"""
78+
This function validates the request object
79+
provided by the user against the required schema.
80+
It will be moved into the utils package in the future.
81+
"""
82+
schema_file = f"src/schemas/{schema_file}"
83+
with open(schema_file) as f:
84+
schema = json.load(f)
85+
try:
86+
validate(instance=data, schema=schema)
87+
except exceptions.ValidationError as e:
88+
return False, 400, "error", e.args[0]
89+
return True, 200, "success", "Request is valid"
90+
91+
92+
def cleanup_files(file) -> tuple[True, str]:
93+
"""
94+
This function removes the generated file
95+
once it sends a response to the user.
96+
"""
97+
try:
98+
os.remove(file)
99+
except BaseException as e:
100+
return False, str(e)
101+
else:
102+
return True, "File has been removed successfully"
103+
104+
105+
def csv_generator(data, fields, filename) -> tuple[bool, str, str]:
106+
"""
107+
This function generates a CSV file
108+
based on the provided objects.
109+
"""
110+
try:
111+
with open(filename, 'w') as csvfile:
112+
writer = csv.DictWriter(
113+
csvfile, fieldnames=fields, extrasaction='ignore')
114+
writer.writeheader()
115+
writer.writerows(data)
116+
except BaseException as e:
117+
return False, "error", str(e)
118+
else:
119+
return True, "success", "CSV file has been generated successfully"

src/models/export.py

+20
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,20 @@
1+
from pydantic import BaseModel, Extra
2+
from typing import Optional
3+
4+
5+
class ExportData(BaseModel, extra=Extra.allow):
6+
expr: str
7+
start: Optional[str] = None
8+
end: Optional[str] = None
9+
step: Optional[str] = None
10+
_request_body_examples = {
11+
"Count of successful logins by users per hour in a day": {
12+
"description": "Count of successful logins by users per hour in a day",
13+
"value": {
14+
"expr": "users_login_count{status='success'}",
15+
"start": "2024-01-30T00:00:00Z",
16+
"end": "2024-01-31T23:59:59Z",
17+
"step": "1h"
18+
}
19+
}
20+
}

src/schemas/export.json

+41
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,41 @@
1+
{
2+
"$schema": "http://json-schema.org/draft-07/schema#",
3+
"type": "object",
4+
"properties": {
5+
"expr": {
6+
"type": "string"
7+
},
8+
"start": {
9+
"type": ["string", "null"],
10+
"format": "date-time"
11+
},
12+
"end": {
13+
"type": ["string", "null"],
14+
"format": "date-time"
15+
},
16+
"step": {
17+
"type": ["string", "null"],
18+
"pattern": "^((([0-9]+)y)?(([0-9]+)w)?(([0-9]+)d)?(([0-9]+)h)?(([0-9]+)m)?(([0-9]+)s)?(([0-9]+)ms)?|0)$"
19+
}
20+
},
21+
"required": ["expr"],
22+
"additionalProperties": false,
23+
"oneOf": [
24+
{
25+
"properties": {
26+
"start": { "type": "string" },
27+
"end": { "type": "string" },
28+
"step": { "type": "string" }
29+
},
30+
"required": ["start", "end", "step"]
31+
},
32+
{
33+
"properties": {
34+
"start": { "type": "null" },
35+
"end": { "type": "null" },
36+
"step": { "type": "null" }
37+
}
38+
}
39+
],
40+
"title": "Export data from Prometheus"
41+
}

src/utils/openapi.py

+1-1
Original file line numberDiff line numberDiff line change
@@ -16,7 +16,7 @@ def openapi(app: FastAPI):
1616
"providing additional features and addressing its limitations. "
1717
"Running as a sidecar alongside the Prometheus server enables "
1818
"users to extend the capabilities of the API.",
19-
version="0.3.3",
19+
version="0.4.0",
2020
contact={
2121
"name": "Hayk Davtyan",
2222
"url": "https://hayk96.github.io",

0 commit comments

Comments
 (0)