Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Add request field to Domain and Vulnerability endpoints #742

Merged
merged 5 commits into from
Dec 18, 2024
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
127 changes: 105 additions & 22 deletions backend/src/xfd_django/xfd_api/api_methods/domain.py
Original file line number Diff line number Diff line change
Expand Up @@ -5,6 +5,7 @@

# Standard Python Libraries
import csv
import io

# Third-Party Libraries
from django.core.exceptions import ObjectDoesNotExist
Expand All @@ -14,7 +15,8 @@
from fastapi import HTTPException

from ..auth import get_org_memberships, get_user_organization_ids, is_global_view_admin
from ..helpers.filter_helpers import filter_domains, sort_direction
from ..helpers.filter_helpers import apply_domain_filters, sort_direction
from ..helpers.s3_client import S3Client
from ..models import Domain, Service
from ..schema_models.domain import DomainFilters, DomainSearch

Expand Down Expand Up @@ -95,49 +97,130 @@ def get_domain_by_id(domain_id: str):

def search_domains(domain_search: DomainSearch, current_user):
"""
List domains by search filter
Arguments:
domain_search: A DomainSearch object to filter by.
Returns:
object: A paginated list of Domain objects
List domains by search filter.
"""
try:
domains = Domain.objects.all().order_by(
sort_direction(domain_search.sort, domain_search.order)
domains = (
Domain.objects.select_related("organization")
.prefetch_related("services", "vulnerabilities")
.order_by(sort_direction(domain_search.sort, domain_search.order))
)

# Apply global filters based on user permissions
# Apply global user permission filters
if not is_global_view_admin(current_user):
orgs = get_org_memberships(current_user)
if not orgs:
# No organization memberships, return empty result
return [], 0
domains = domains.filter(organization__id__in=orgs)

# Add a filter to restrict based on FCEB and CIDR criteria
# Apply the isFceb/fromCidr condition:
domains = domains.filter(Q(isFceb=True) | Q(isFceb=False, fromCidr=True))

# Apply filters if provided
if domain_search.filters:
domains = filter_domains(domains, domain_search.filters)
paginator = Paginator(domains, domain_search.pageSize)
domains = apply_domain_filters(domains, domain_search.filters)

# Handle pagination
page_size = domain_search.pageSize
# If pageSize == -1, return all results without pagination
if page_size == -1:
result = list(domains)
return result, len(result)
else:
page_size = page_size or 15 # default page size if none provided
paginator = Paginator(domains, page_size)
page_obj = paginator.get_page(domain_search.page)
return list(page_obj), paginator.count

return paginator.get_page(domain_search.page)
except Domain.DoesNotExist as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
raise HTTPException(status_code=500, detail=str(e))


def export_domains(domain_filters: DomainFilters):
def export_domains(domain_search: DomainSearch, current_user):
"""Export domains into a CSV and upload to S3."""
try:
domains = Domain.objects.all()

if domain_filters:
domains = filter_domains(domains, domain_filters)
# Set pageSize to -1 to fetch all domains without pagination
domain_search.pageSize = -1

# Fetch domains using search_domains function
domains, count = search_domains(domain_search, current_user)

# If no domains, generate empty CSV
if not domains:
csv_content = "name,ip,id,ports,products,createdAt,updatedAt,organization\n"
else:
# Process domains to flatten organization name, ports as string, products as unique string
processed_domains = []
for domain in domains:
organization_name = (
domain.organization.name if domain.organization else ""
)
ports = ", ".join(
[str(service.port) for service in domain.services.all()]
)

# Collect unique products
products_set = set()
for service in domain.services.all():
for product in service.products.all():
if product.name:
product_entry = (
f"{product.name} {product.version}"
if product.version
else product.name
)
products_set.add(product_entry)
products = ", ".join(sorted(products_set))

processed_domains.append(
{
"name": domain.name,
"ip": domain.ip,
"id": str(domain.id),
"ports": ports,
"products": products,
"createdAt": domain.createdAt.isoformat()
if domain.createdAt
else "",
"updatedAt": domain.updatedAt.isoformat()
if domain.updatedAt
else "",
"organization": organization_name,
}
)

# Define CSV fields
csv_fields = [
"name",
"ip",
"id",
"ports",
"products",
"createdAt",
"updatedAt",
"organization",
]

# Generate CSV content using csv module
output = io.StringIO()
writer = csv.DictWriter(output, fieldnames=csv_fields)
writer.writeheader()
for domain in processed_domains:
writer.writerow(domain)
csv_content = output.getvalue()

# Initialize S3 client
client = S3Client()

# Save CSV to S3
url = client.save_csv(csv_content, "domains")

return {"url": url}

# TODO: Integrate methods to generate CSV from queryset and save to S3 bucket
return domains
except Domain.DoesNotExist as e:
raise HTTPException(status_code=404, detail=str(e))
except Exception as e:
# Log the exception for debugging (optional)
print(f"Error exporting domains: {e}")
raise HTTPException(status_code=500, detail=str(e))
Original file line number Diff line number Diff line change
Expand Up @@ -1084,6 +1084,7 @@ def search_organizations_task(search_body, current_user: User):
client = ESClient()

# Construct the Elasticsearch query

query_body: Dict[str, Any] = {"query": {"bool": {"must": [], "filter": []}}}

# Use match_all if searchTerm is empty
Expand Down
Loading
Loading