Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Feature app history #151

Merged
merged 12 commits into from
Feb 16, 2024
3 changes: 2 additions & 1 deletion pebblo/app/api/api.py
Original file line number Diff line number Diff line change
@@ -1,5 +1,6 @@
from fastapi import APIRouter
from pebblo.app.service.service import AppDiscover, AppLoaderDoc
from pebblo.app.service.service import AppLoaderDoc
from pebblo.app.service.discovery_service import AppDiscover


class App:
Expand Down
1 change: 1 addition & 0 deletions pebblo/app/enums/enums.py
Original file line number Diff line number Diff line change
Expand Up @@ -19,3 +19,4 @@ class CacheDir(Enum):
class ReportConstants(Enum):
snippets_limit = 100
top_findings_limit = 5
loader_history_limit = 5
24 changes: 16 additions & 8 deletions pebblo/app/models/models.py
Original file line number Diff line number Diff line change
@@ -1,11 +1,12 @@
from pydantic import BaseModel, Field
from typing import Optional, List, Union
from datetime import datetime
from uuid import UUID


class Metadata(BaseModel):
createdAt: datetime = datetime.now()
modifiedAt: datetime = datetime.now()
createdAt: datetime
modifiedAt: datetime

class Config:
arbitrary_types_allowed = True
Expand All @@ -17,7 +18,7 @@ class LoaderMetadata(BaseModel):
sourceType: str
sourceSize: int
sourceFiles: Optional[list] = []
lastModified: Optional[datetime] = datetime.now()
lastModified: Optional[datetime]


class AiDataModel(BaseModel):
Expand All @@ -39,7 +40,6 @@ class AiDocs(BaseModel):
entities: dict
topicCount: int
topics: dict
policyViolations: Optional[List[dict]] = []


class FrameworkInfo(BaseModel):
Expand All @@ -58,7 +58,7 @@ class InstanceDetails(BaseModel):
platform: Optional[str]
os: Optional[str]
osVersion: Optional[str]
createdAt: datetime = datetime.now()
createdAt: datetime


class AiApp(BaseModel):
Expand All @@ -77,10 +77,10 @@ class Summary(BaseModel):
findingsEntities: int
findingsTopics: int
totalFiles: int
filesWithRestrictedData: int
filesWithFindings: int
dataSources: int
owner: str
createdAt: datetime = datetime.now()
createdAt: datetime


class TopFindings(BaseModel):
Expand Down Expand Up @@ -110,12 +110,20 @@ class DataSource(BaseModel):
# snippets: Optional[List[Snippets]]


class LoadHistory(BaseModel):
loadId: UUID
reportName: str
findings: int
filesWithFindings: int
generatedOn: datetime


class ReportModel(BaseModel):
name: str
description: Optional[str]
framework: Optional[FrameworkInfo] = Field(default_factory=FrameworkInfo)
reportSummary: Optional[Summary]
loadHistory: Optional[dict]
topFindings: Optional[List[TopFindings]]
instanceDetails: Optional[InstanceDetails]
dataSources: Optional[List[DataSource]]
lastModified: datetime
134 changes: 134 additions & 0 deletions pebblo/app/service/discovery_service.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,134 @@
from datetime import datetime
from pebblo.app.enums.enums import CacheDir
from pebblo.app.utils.utils import write_json_to_file, read_json_file
from pebblo.app.libs.logger import logger
from pebblo.app.models.models import Metadata, AiApp, InstanceDetails
from pydantic import ValidationError
from fastapi import HTTPException


class AppDiscover:
def __init__(self, data: dict):
self.data = data
self.load_id = data.get('load_id')
self.application_name = self.data.get("name")

def _create_ai_apps_model(self, instance_details):
"""
Create an AI App Model and return the corresponding model object
"""
logger.debug("Creating AI App model")
# Initialize Variables
last_used = datetime.now()
metadata = Metadata(
createdAt=datetime.now(),
modifiedAt=datetime.now()
)
ai_apps_model = AiApp(
metadata=metadata,
name=self.data.get("name"),
description=self.data.get("description", "-"),
owner=self.data.get('owner'),
pluginVersion=self.data.get("plugin_version"),
instanceDetails=instance_details,
framework=self.data.get("framework"),
lastUsed=last_used
)
return ai_apps_model

def _fetch_runtime_instance_details(self):
"""
Retrieve instance details from input data and return its corresponding model object.
"""
logger.debug("Retrieving instance details from input data")
# Fetching runtime instance details
runtime_dict = self.data.get("runtime", {})
instance_details_model = InstanceDetails(
language=runtime_dict.get("language"),
languageVersion=runtime_dict.get("language_version"),
host=runtime_dict.get("host"),
ip=runtime_dict.get("ip"),
path=runtime_dict.get("path"),
runtime=runtime_dict.get("runtime"),
type=runtime_dict.get("type"),
platform=runtime_dict.get("platform"),
os=runtime_dict.get("os"),
osVersion=runtime_dict.get("os_version"),
createdAt=datetime.now()
)
logger.debug(f"AI_APPS [{self.application_name}]: Instance Details: {instance_details_model.dict()}")
return instance_details_model

@staticmethod
def _write_file_content_to_path(file_content, file_path):
"""
Write content to the specified file path
"""
logger.debug(f"Writing content to file path: {file_content}")
# Writing file content to given file path
write_json_to_file(file_content, file_path)

@staticmethod
def _read_file(file_path):
"""
Retrieve the content of the specified file.
"""
logger.debug(f"Reading content from file: {file_path}")
file_content = read_json_file(file_path)
return file_content

def _upsert_app_metadata_file(self):
"""
Update/Create app metadata file and write metadata for current run
"""
# Read metadata file & get current app metadata
app_metadata_file_path = (f"{CacheDir.home_dir.value}/"
f"{self.application_name}/{CacheDir.metadata_file_path.value}")
app_metadata = self._read_file(app_metadata_file_path)

# write metadata file if it is not present
if not app_metadata:
# Writing app metadata to metadata file
app_metadata = {"name": self.application_name, "load_ids": [self.load_id]}
else:
if "load_ids" in app_metadata.keys():
# Metadata file is already present, Appending the current metadata details
app_metadata.get("load_ids").append(self.load_id)
else:
# metadata file is present, but load_ids is not, This is to support backward compatibility
app_metadata["load_ids"] = [self.load_id]

# Writing metadata file
self._write_file_content_to_path(app_metadata, app_metadata_file_path)

def process_request(self):
"""
Process AI App discovery Request
"""
try:
logger.debug("AI App discovery request processing started")
# Input Data
logger.debug(f"AI_APP [{self.application_name}]: Input Data: {self.data}")

# Upset metadata file
self._upsert_app_metadata_file()

# getting instance details
instance_details = self._fetch_runtime_instance_details()

# create AiApps Model
ai_apps = self._create_ai_apps_model(instance_details)

# Write file to metadata location
file_path = (f"{CacheDir.home_dir.value}/{self.application_name}/{self.load_id}"
f"/{CacheDir.metadata_file_path.value}")
self._write_file_content_to_path(ai_apps.dict(), file_path)

logger.debug(f"AiApp discovery request completed successfully")
return {"message": "App Discover Request Processed Successfully"}
except ValidationError as ex:
logger.error(f"Error in process_request. Error:{ex}")
raise HTTPException(status_code=400, detail=str(ex))
except Exception as ex:
logger.error(f"Error in process_request. Error:{ex}")
raise HTTPException(status_code=500, detail="Internal Server Error")
Loading
Loading