Skip to content

Commit

Permalink
fix: merge conflicts resolved from preview
Browse files Browse the repository at this point in the history
  • Loading branch information
aaryan610 committed Nov 4, 2024
2 parents e2a2812 + f205d72 commit e560038
Show file tree
Hide file tree
Showing 225 changed files with 5,809 additions and 3,622 deletions.
1 change: 1 addition & 0 deletions apiserver/Dockerfile.api
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ FROM python:3.12.5-alpine AS backend
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV INSTANCE_CHANGELOG_URL https://api.plane.so/api/public/anchor/8e1c2e4c7bc5493eb7731be3862f6960/pages/

WORKDIR /code

Expand Down
1 change: 1 addition & 0 deletions apiserver/Dockerfile.dev
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@ FROM python:3.12.5-alpine AS backend
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV PIP_DISABLE_PIP_VERSION_CHECK=1
ENV INSTANCE_CHANGELOG_URL https://api.plane.so/api/public/anchor/8e1c2e4c7bc5493eb7731be3862f6960/pages/

RUN apk --no-cache add \
"bash~=5.2" \
Expand Down
82 changes: 44 additions & 38 deletions apiserver/plane/app/serializers/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -49,44 +49,47 @@ def _filter_fields(self, fields):
allowed.append(list(item.keys())[0])

for field in allowed:
from . import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
UserLiteSerializer,
StateLiteSerializer,
IssueSerializer,
LabelSerializer,
CycleIssueSerializer,
IssueLiteSerializer,
IssueRelationSerializer,
InboxIssueLiteSerializer,
IssueReactionLiteSerializer,
IssueLinkLiteSerializer,
)

# Expansion mapper
expansion = {
"user": UserLiteSerializer,
"workspace": WorkspaceLiteSerializer,
"project": ProjectLiteSerializer,
"default_assignee": UserLiteSerializer,
"project_lead": UserLiteSerializer,
"state": StateLiteSerializer,
"created_by": UserLiteSerializer,
"issue": IssueSerializer,
"actor": UserLiteSerializer,
"owned_by": UserLiteSerializer,
"members": UserLiteSerializer,
"assignees": UserLiteSerializer,
"labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer,
"parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
"issue_inbox": InboxIssueLiteSerializer,
"issue_reactions": IssueReactionLiteSerializer,
"issue_link": IssueLinkLiteSerializer,
"sub_issues": IssueLiteSerializer,
}
if field not in self.fields:
from . import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
UserLiteSerializer,
StateLiteSerializer,
IssueSerializer,
LabelSerializer,
CycleIssueSerializer,
IssueLiteSerializer,
IssueRelationSerializer,
InboxIssueLiteSerializer,
IssueReactionLiteSerializer,
IssueLinkLiteSerializer,
RelatedIssueSerializer,
)

# Expansion mapper
expansion = {
"user": UserLiteSerializer,
"workspace": WorkspaceLiteSerializer,
"project": ProjectLiteSerializer,
"default_assignee": UserLiteSerializer,
"project_lead": UserLiteSerializer,
"state": StateLiteSerializer,
"created_by": UserLiteSerializer,
"issue": IssueSerializer,
"actor": UserLiteSerializer,
"owned_by": UserLiteSerializer,
"members": UserLiteSerializer,
"assignees": UserLiteSerializer,
"labels": LabelSerializer,
"issue_cycle": CycleIssueSerializer,
"parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
"issue_related": RelatedIssueSerializer,
"issue_inbox": InboxIssueLiteSerializer,
"issue_reactions": IssueReactionLiteSerializer,
"issue_link": IssueLinkLiteSerializer,
"sub_issues": IssueLiteSerializer,
}

if field not in self.fields and field in expansion:
self.fields[field] = expansion[field](
Expand All @@ -104,6 +107,7 @@ def _filter_fields(self, fields):
"issue_attachment",
"issue_link",
"sub_issues",
"issue_related",
]
else False
)
Expand Down Expand Up @@ -133,6 +137,7 @@ def to_representation(self, instance):
IssueReactionLiteSerializer,
IssueAttachmentLiteSerializer,
IssueLinkLiteSerializer,
RelatedIssueSerializer,
)

# Expansion mapper
Expand All @@ -153,6 +158,7 @@ def to_representation(self, instance):
"issue_cycle": CycleIssueSerializer,
"parent": IssueLiteSerializer,
"issue_relation": IssueRelationSerializer,
"issue_related": RelatedIssueSerializer,
"issue_inbox": InboxIssueLiteSerializer,
"issue_reactions": IssueReactionLiteSerializer,
"issue_attachment": IssueAttachmentLiteSerializer,
Expand Down
13 changes: 13 additions & 0 deletions apiserver/plane/app/urls/issue.py
Original file line number Diff line number Diff line change
Expand Up @@ -21,7 +21,9 @@
BulkArchiveIssuesEndpoint,
DeletedIssuesListViewSet,
IssuePaginatedViewSet,
IssueDetailEndpoint,
IssueAttachmentV2Endpoint,
IssueBulkUpdateDateEndpoint,
)

urlpatterns = [
Expand All @@ -40,6 +42,12 @@
),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues-detail/",
IssueDetailEndpoint.as_view(),
name="project-issue-detail",
),
# updated v1 paginated issues
# updated v2 paginated issues
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/v2/issues/",
Expand Down Expand Up @@ -307,4 +315,9 @@
DeletedIssuesListViewSet.as_view(),
name="deleted-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issue-dates/",
IssueBulkUpdateDateEndpoint.as_view(),
name="project-issue-dates",
),
]
2 changes: 2 additions & 0 deletions apiserver/plane/app/views/__init__.py
Original file line number Diff line number Diff line change
Expand Up @@ -130,6 +130,8 @@
BulkDeleteIssuesEndpoint,
DeletedIssuesListViewSet,
IssuePaginatedViewSet,
IssueDetailEndpoint,
IssueBulkUpdateDateEndpoint,
)

from .issue.activity import (
Expand Down
191 changes: 191 additions & 0 deletions apiserver/plane/app/views/issue/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -976,3 +976,194 @@ def list(self, request, slug, project_id):
)

return Response(paginated_data, status=status.HTTP_200_OK)


class IssueDetailEndpoint(BaseAPIView):
@allow_permission([ROLE.ADMIN, ROLE.MEMBER, ROLE.GUEST])
def get(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET")
issue = (
Issue.issue_objects.filter(
workspace__slug=slug, project_id=project_id
)
.select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module")
.annotate(
cycle_id=Subquery(
CycleIssue.objects.filter(
issue=OuterRef("id"), deleted_at__isnull=True
).values("cycle_id")[:1]
)
)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"labels__id",
distinct=True,
filter=Q(
~Q(labels__id__isnull=True)
& Q(label_issue__deleted_at__isnull=True),
),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"assignees__id",
distinct=True,
filter=Q(
~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True)
& Q(issue_assignee__deleted_at__isnull=True)
),
),
Value([], output_field=ArrayField(UUIDField())),
),
module_ids=Coalesce(
ArrayAgg(
"issue_module__module_id",
distinct=True,
filter=Q(
~Q(issue_module__module_id__isnull=True)
& Q(issue_module__module__archived_at__isnull=True)
& Q(issue_module__deleted_at__isnull=True)
),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=FileAsset.objects.filter(
issue_id=OuterRef("id"),
entity_type=FileAsset.EntityTypeContext.ISSUE_ATTACHMENT,
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues_count=Issue.issue_objects.filter(
parent=OuterRef("id")
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
issue = issue.filter(**filters)
order_by_param = request.GET.get("order_by", "-created_at")
# Issue queryset
issue, order_by_param = order_issue_queryset(
issue_queryset=issue,
order_by_param=order_by_param,
)
return self.paginate(
request=request,
order_by=order_by_param,
queryset=(issue),
on_results=lambda issue: IssueSerializer(
issue,
many=True,
fields=self.fields,
expand=self.expand,
).data,
)


class IssueBulkUpdateDateEndpoint(BaseAPIView):

def validate_dates(
self, current_start, current_target, new_start, new_target
):
"""
Validate that start date is before target date.
"""
start = new_start or current_start
target = new_target or current_target

if start and target and start > target:
return False
return True

@allow_permission([ROLE.ADMIN, ROLE.MEMBER])
def post(self, request, slug, project_id):

updates = request.data.get("updates", [])

issue_ids = [update["id"] for update in updates]
epoch = int(timezone.now().timestamp())

# Fetch all relevant issues in a single query
issues = list(Issue.objects.filter(id__in=issue_ids))
issues_dict = {str(issue.id): issue for issue in issues}
issues_to_update = []

for update in updates:
issue_id = update["id"]
issue = issues_dict.get(issue_id)

if not issue:
continue

start_date = update.get("start_date")
target_date = update.get("target_date")
validate_dates = self.validate_dates(
issue.start_date, issue.target_date, start_date, target_date
)
if not validate_dates:
return Response(
{
"message": "Start date cannot exceed target date",
},
status=status.HTTP_400_BAD_REQUEST,
)

if start_date:
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
{"start_date": update.get("start_date")}
),
current_instance=json.dumps(
{"start_date": str(issue.start_date)}
),
issue_id=str(issue_id),
actor_id=str(request.user.id),
project_id=str(project_id),
epoch=epoch,
)
issue.start_date = start_date
issues_to_update.append(issue)

if target_date:
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps(
{"target_date": update.get("target_date")}
),
current_instance=json.dumps(
{"target_date": str(issue.target_date)}
),
issue_id=str(issue_id),
actor_id=str(request.user.id),
project_id=str(project_id),
epoch=epoch,
)
issue.target_date = target_date
issues_to_update.append(issue)

# Bulk update issues
Issue.objects.bulk_update(
issues_to_update, ["start_date", "target_date"]
)

return Response(
{"message": "Issues updated successfully"},
status=status.HTTP_200_OK,
)
Loading

0 comments on commit e560038

Please sign in to comment.