diff --git a/.eslintrc.js b/.eslintrc.js
index be1ad0f9da5..463c86901c0 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -1,10 +1,10 @@
module.exports = {
root: true,
- // This tells ESLint to load the config from the package `config`
- // extends: ["custom"],
+ // This tells ESLint to load the config from the package `eslint-config-custom`
+ extends: ["custom"],
settings: {
next: {
- rootDir: ["apps/*/"],
+ rootDir: ["apps/*"],
},
},
};
diff --git a/.gitignore b/.gitignore
index ad72521ff81..3562ab0b34c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -62,3 +62,11 @@ yarn-error.log
*.sln
package-lock.json
.vscode
+
+# Sentry
+.sentryclirc
+
+# lock files
+package-lock.json
+pnpm-lock.yaml
+pnpm-workspace.yaml
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 00000000000..094d628e3bb
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,116 @@
+FROM node:18-alpine AS builder
+RUN apk add --no-cache libc6-compat
+RUN apk update
+# Set working directory
+WORKDIR /app
+
+RUN yarn global add turbo
+COPY . .
+
+RUN turbo prune --scope=app --docker
+
+# Add lockfile and package.json's of isolated subworkspace
+FROM node:18-alpine AS installer
+
+
+RUN apk add --no-cache libc6-compat
+RUN apk update
+WORKDIR /app
+
+# First install the dependencies (as they change less often)
+COPY .gitignore .gitignore
+COPY --from=builder /app/out/json/ .
+COPY --from=builder /app/out/yarn.lock ./yarn.lock
+RUN yarn install
+
+# Build the project
+COPY --from=builder /app/out/full/ .
+COPY turbo.json turbo.json
+
+RUN yarn turbo run build --filter=app
+
+
+FROM python:3.11.1-alpine3.17 AS backend
+
+# set environment variables
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+ENV PIP_DISABLE_PIP_VERSION_CHECK=1
+
+WORKDIR /code
+
+RUN apk --update --no-cache add \
+ "libpq~=15" \
+ "libxslt~=1.1" \
+ "nodejs-current~=19" \
+ "xmlsec~=1.2" \
+ "nginx" \
+ "nodejs" \
+ "npm" \
+ "supervisor"
+
+COPY apiserver/requirements.txt ./
+COPY apiserver/requirements ./requirements
+RUN apk add libffi-dev
+RUN apk --update --no-cache --virtual .build-deps add \
+ "bash~=5.2" \
+ "g++~=12.2" \
+ "gcc~=12.2" \
+ "cargo~=1.64" \
+ "git~=2" \
+ "make~=4.3" \
+ "postgresql13-dev~=13" \
+ "libc-dev" \
+ "linux-headers" \
+ && \
+ pip install -r requirements.txt --compile --no-cache-dir \
+ && \
+ apk del .build-deps
+
+# Add in Django deps and generate Django's static files
+COPY apiserver/manage.py manage.py
+COPY apiserver/plane plane/
+COPY apiserver/templates templates/
+
+COPY apiserver/gunicorn.config.py ./
+RUN apk --update --no-cache add "bash~=5.2"
+COPY apiserver/bin ./bin/
+
+RUN chmod +x ./bin/takeoff ./bin/worker
+RUN chmod -R 777 /code
+
+# Expose container port and run entry point script
+EXPOSE 8000
+EXPOSE 3000
+EXPOSE 80
+
+
+
+WORKDIR /app
+
+# Don't run production as root
+RUN addgroup --system --gid 1001 plane
+RUN adduser --system --uid 1001 captain
+
+COPY --from=installer /app/apps/app/next.config.js .
+COPY --from=installer /app/apps/app/package.json .
+
+COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
+
+COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
+
+ENV NEXT_TELEMETRY_DISABLED 1
+
+# RUN rm /etc/nginx/conf.d/default.conf
+#######################################################################
+COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf
+#######################################################################
+
+COPY nginx/supervisor.conf /code/supervisor.conf
+
+
+CMD ["supervisord","-c","/code/supervisor.conf"]
+
+
+
+
diff --git a/README.md b/README.md
index 0480ee4fd87..6af8396acdd 100644
--- a/README.md
+++ b/README.md
@@ -7,7 +7,7 @@
-
+
@@ -48,4 +48,4 @@ Our [Code of Conduct](https://github.com/makeplane/plane/blob/master/CODE_OF_CON
## Security
-If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email security@plane.so to disclose any security vulnerabilities.
\ No newline at end of file
+If you believe you have found a security vulnerability in Plane, we encourage you to responsibly disclose this and not open a public issue. We will investigate all legitimate reports. Email security@plane.so to disclose any security vulnerabilities.
diff --git a/apiserver/.env.example b/apiserver/.env.example
index 0595770fa7a..9a6904b55b9 100644
--- a/apiserver/.env.example
+++ b/apiserver/.env.example
@@ -1,18 +1,22 @@
-# Backend
SECRET_KEY="<-- django secret -->"
+DJANGO_SETTINGS_MODULE="plane.settings.production"
+# Database
+DATABASE_URL=postgres://plane:plane@plane-db-1:5432/plane
+# Cache
+REDIS_URL=redis://redis:6379/
+# SMPT
EMAIL_HOST="<-- email smtp -->"
EMAIL_HOST_USER="<-- email host user -->"
EMAIL_HOST_PASSWORD="<-- email host password -->"
-
+# AWS
AWS_REGION="<-- aws region -->"
AWS_ACCESS_KEY_ID="<-- aws access key -->"
AWS_SECRET_ACCESS_KEY="<-- aws secret acess key -->"
AWS_S3_BUCKET_NAME="<-- aws s3 bucket name -->"
-
-SENTRY_DSN="<-- sentry dsn -->"
-WEB_URL="<-- frontend web url -->"
-
+# FE
+WEB_URL="localhost/"
+# OAUTH
GITHUB_CLIENT_SECRET="<-- github secret -->"
-
+# Flags
DISABLE_COLLECTSTATIC=1
-DOCKERIZED=0 //True if running docker compose else 0
+DOCKERIZED=1
diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api
index 6343c740e21..123544571e4 100644
--- a/apiserver/Dockerfile.api
+++ b/apiserver/Dockerfile.api
@@ -1,4 +1,4 @@
-FROM python:3.8.14-alpine3.16 AS backend
+FROM python:3.11.1-alpine3.17 AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
@@ -8,19 +8,19 @@ ENV PIP_DISABLE_PIP_VERSION_CHECK=1
WORKDIR /code
RUN apk --update --no-cache add \
- "libpq~=14" \
+ "libpq~=15" \
"libxslt~=1.1" \
- "nodejs-current~=18" \
+ "nodejs-current~=19" \
"xmlsec~=1.2"
COPY requirements.txt ./
COPY requirements ./requirements
RUN apk add libffi-dev
RUN apk --update --no-cache --virtual .build-deps add \
- "bash~=5.1" \
- "g++~=11.2" \
- "gcc~=11.2" \
- "cargo~=1.60" \
+ "bash~=5.2" \
+ "g++~=12.2" \
+ "gcc~=12.2" \
+ "cargo~=1.64" \
"git~=2" \
"make~=4.3" \
"postgresql13-dev~=13" \
@@ -46,15 +46,16 @@ COPY templates templates/
COPY gunicorn.config.py ./
USER root
-RUN apk --update --no-cache add "bash~=5.1"
+RUN apk --update --no-cache add "bash~=5.2"
COPY ./bin ./bin/
RUN chmod +x ./bin/takeoff ./bin/worker
+RUN chmod -R 777 /code
USER captain
# Expose container port and run entry point script
EXPOSE 8000
-CMD [ "./bin/takeoff" ]
+# CMD [ "./bin/takeoff" ]
diff --git a/apiserver/back_migration.py b/apiserver/back_migration.py
index 57ded0ba46e..f716ea29fb2 100644
--- a/apiserver/back_migration.py
+++ b/apiserver/back_migration.py
@@ -1,11 +1,14 @@
# All the python scripts that are used for back migrations
+import uuid
+import random
+from django.contrib.auth.hashers import make_password
from plane.db.models import ProjectIdentifier
-from plane.db.models import Issue, IssueComment
+from plane.db.models import Issue, IssueComment, User
+
# Update description and description html values for old descriptions
def update_description():
try:
-
issues = Issue.objects.all()
updated_issues = []
@@ -25,7 +28,6 @@ def update_description():
def update_comments():
try:
-
issue_comments = IssueComment.objects.all()
updated_issue_comments = []
@@ -44,9 +46,11 @@ def update_comments():
def update_project_identifiers():
try:
- project_identifiers = ProjectIdentifier.objects.filter(workspace_id=None).select_related("project", "project__workspace")
+ project_identifiers = ProjectIdentifier.objects.filter(
+ workspace_id=None
+ ).select_related("project", "project__workspace")
updated_identifiers = []
-
+
for identifier in project_identifiers:
identifier.workspace_id = identifier.project.workspace_id
updated_identifiers.append(identifier)
@@ -58,3 +62,37 @@ def update_project_identifiers():
except Exception as e:
print(e)
print("Failed")
+
+
+def update_user_empty_password():
+ try:
+ users = User.objects.filter(password="")
+ updated_users = []
+
+ for user in users:
+ user.password = make_password(uuid.uuid4().hex)
+ user.is_password_autoset = True
+ updated_users.append(user)
+
+ User.objects.bulk_update(updated_users, ["password"], batch_size=50)
+ print("Success")
+
+ except Exception as e:
+ print(e)
+ print("Failed")
+
+
+def updated_issue_sort_order():
+ try:
+ issues = Issue.objects.all()
+ updated_issues = []
+
+ for issue in issues:
+ issue.sort_order = issue.sequence_id * random.randint(100, 500)
+ updated_issues.append(issue)
+
+ Issue.objects.bulk_update(updated_issues, ["sort_order"], batch_size=100)
+ print("Success")
+ except Exception as e:
+ print(e)
+ print("Failed")
diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff
index 8340f16c704..d22eceb6ee9 100755
--- a/apiserver/bin/takeoff
+++ b/apiserver/bin/takeoff
@@ -2,4 +2,8 @@
set -e
python manage.py wait_for_db
python manage.py migrate
+
+# Create a Default User
+python bin/user_script.py
+
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --config gunicorn.config.py --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py
new file mode 100644
index 00000000000..b554d2c405e
--- /dev/null
+++ b/apiserver/bin/user_script.py
@@ -0,0 +1,28 @@
+import os, sys
+import uuid
+
+sys.path.append("/code")
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
+import django
+
+django.setup()
+
+from plane.db.models import User
+
+
+def populate():
+ default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so")
+ default_password = os.environ.get("DEFAULT_PASSWORD", "password123")
+
+ if not User.objects.filter(email=default_email).exists():
+ user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
+ user.set_password(default_password)
+ user.save()
+ print("User created")
+
+ print("Success")
+
+
+if __name__ == "__main__":
+ populate()
diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py
index 8d43d90ffed..183129939f2 100644
--- a/apiserver/plane/api/serializers/__init__.py
+++ b/apiserver/plane/api/serializers/__init__.py
@@ -40,4 +40,13 @@
from .module import ModuleWriteSerializer, ModuleSerializer, ModuleIssueSerializer
-from .api_token import APITokenSerializer
\ No newline at end of file
+from .api_token import APITokenSerializer
+
+from .integration import (
+ IntegrationSerializer,
+ WorkspaceIntegrationSerializer,
+ GithubIssueSyncSerializer,
+ GithubRepositorySerializer,
+ GithubRepositorySyncSerializer,
+ GithubCommentSyncSerializer,
+)
diff --git a/apiserver/plane/api/serializers/integration/__init__.py b/apiserver/plane/api/serializers/integration/__init__.py
new file mode 100644
index 00000000000..8aea68bd6d5
--- /dev/null
+++ b/apiserver/plane/api/serializers/integration/__init__.py
@@ -0,0 +1,7 @@
+from .base import IntegrationSerializer, WorkspaceIntegrationSerializer
+from .github import (
+ GithubRepositorySerializer,
+ GithubRepositorySyncSerializer,
+ GithubIssueSyncSerializer,
+ GithubCommentSyncSerializer,
+)
diff --git a/apiserver/plane/api/serializers/integration/base.py b/apiserver/plane/api/serializers/integration/base.py
new file mode 100644
index 00000000000..10ebd462012
--- /dev/null
+++ b/apiserver/plane/api/serializers/integration/base.py
@@ -0,0 +1,20 @@
+# Module imports
+from plane.api.serializers import BaseSerializer
+from plane.db.models import Integration, WorkspaceIntegration
+
+
+class IntegrationSerializer(BaseSerializer):
+ class Meta:
+ model = Integration
+ fields = "__all__"
+ read_only_fields = [
+ "verified",
+ ]
+
+
+class WorkspaceIntegrationSerializer(BaseSerializer):
+ integration_detail = IntegrationSerializer(read_only=True, source="integration")
+
+ class Meta:
+ model = WorkspaceIntegration
+ fields = "__all__"
diff --git a/apiserver/plane/api/serializers/integration/github.py b/apiserver/plane/api/serializers/integration/github.py
new file mode 100644
index 00000000000..8352dcee143
--- /dev/null
+++ b/apiserver/plane/api/serializers/integration/github.py
@@ -0,0 +1,45 @@
+# Module imports
+from plane.api.serializers import BaseSerializer
+from plane.db.models import (
+ GithubIssueSync,
+ GithubRepository,
+ GithubRepositorySync,
+ GithubCommentSync,
+)
+
+
+class GithubRepositorySerializer(BaseSerializer):
+ class Meta:
+ model = GithubRepository
+ fields = "__all__"
+
+
+class GithubRepositorySyncSerializer(BaseSerializer):
+ repo_detail = GithubRepositorySerializer(source="repository")
+
+ class Meta:
+ model = GithubRepositorySync
+ fields = "__all__"
+
+
+class GithubIssueSyncSerializer(BaseSerializer):
+ class Meta:
+ model = GithubIssueSync
+ fields = "__all__"
+ read_only_fields = [
+ "project",
+ "workspace",
+ "repository_sync",
+ ]
+
+
+class GithubCommentSyncSerializer(BaseSerializer):
+ class Meta:
+ model = GithubCommentSync
+ fields = "__all__"
+ read_only_fields = [
+ "project",
+ "workspace",
+ "repository_sync",
+ "issue_sync",
+ ]
diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py
index a148cbfb51a..6a3c06e2250 100644
--- a/apiserver/plane/api/serializers/issue.py
+++ b/apiserver/plane/api/serializers/issue.py
@@ -24,9 +24,15 @@
Cycle,
Module,
ModuleIssue,
+ IssueLink,
)
+class IssueLinkCreateSerializer(serializers.Serializer):
+ url = serializers.CharField(required=True)
+ title = serializers.CharField(required=False)
+
+
class IssueFlatSerializer(BaseSerializer):
## Contain only flat fields
@@ -40,24 +46,13 @@ class Meta:
"start_date",
"target_date",
"sequence_id",
+ "sort_order",
]
-# Issue Serializer with state details
-class IssueStateSerializer(BaseSerializer):
-
- state_detail = StateSerializer(read_only=True, source="state")
- project_detail = ProjectSerializer(read_only=True, source="project")
-
- class Meta:
- model = Issue
- fields = "__all__"
-
-
##TODO: Find a better way to write this serializer
## Find a better approach to save manytomany?
class IssueCreateSerializer(BaseSerializer):
-
state_detail = StateSerializer(read_only=True, source="state")
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
project_detail = ProjectSerializer(read_only=True, source="project")
@@ -87,6 +82,11 @@ class IssueCreateSerializer(BaseSerializer):
write_only=True,
required=False,
)
+ links_list = serializers.ListField(
+ child=IssueLinkCreateSerializer(),
+ write_only=True,
+ required=False,
+ )
class Meta:
model = Issue
@@ -105,6 +105,7 @@ def create(self, validated_data):
assignees = validated_data.pop("assignees_list", None)
labels = validated_data.pop("labels_list", None)
blocks = validated_data.pop("blocks_list", None)
+ links = validated_data.pop("links_list", None)
project = self.context["project"]
issue = Issue.objects.create(**validated_data, project=project)
@@ -173,14 +174,32 @@ def create(self, validated_data):
batch_size=10,
)
+ if links is not None:
+ IssueLink.objects.bulk_create(
+ [
+ IssueLink(
+ issue=issue,
+ project=project,
+ workspace=project.workspace,
+ created_by=issue.created_by,
+ updated_by=issue.updated_by,
+ title=link.get("title", None),
+ url=link.get("url", None),
+ )
+ for link in links
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
return issue
def update(self, instance, validated_data):
-
blockers = validated_data.pop("blockers_list", None)
assignees = validated_data.pop("assignees_list", None)
labels = validated_data.pop("labels_list", None)
blocks = validated_data.pop("blocks_list", None)
+ links = validated_data.pop("links_list", None)
if blockers is not None:
IssueBlocker.objects.filter(block=instance).delete()
@@ -250,11 +269,29 @@ def update(self, instance, validated_data):
batch_size=10,
)
+ if links is not None:
+ IssueLink.objects.filter(issue=instance).delete()
+ IssueLink.objects.bulk_create(
+ [
+ IssueLink(
+ issue=instance,
+ project=instance.project,
+ workspace=instance.project.workspace,
+ created_by=instance.created_by,
+ updated_by=instance.updated_by,
+ title=link.get("title", None),
+ url=link.get("url", None),
+ )
+ for link in links
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
return super().update(instance, validated_data)
class IssueActivitySerializer(BaseSerializer):
-
actor_detail = UserLiteSerializer(read_only=True, source="actor")
class Meta:
@@ -263,7 +300,6 @@ class Meta:
class IssueCommentSerializer(BaseSerializer):
-
actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectSerializer(read_only=True, source="project")
@@ -319,7 +355,6 @@ class Meta:
class IssueLabelSerializer(BaseSerializer):
-
# label_details = LabelSerializer(read_only=True, source="label")
class Meta:
@@ -332,7 +367,6 @@ class Meta:
class BlockedIssueSerializer(BaseSerializer):
-
blocked_issue_detail = IssueFlatSerializer(source="block", read_only=True)
class Meta:
@@ -341,7 +375,6 @@ class Meta:
class BlockerIssueSerializer(BaseSerializer):
-
blocker_issue_detail = IssueFlatSerializer(source="blocked_by", read_only=True)
class Meta:
@@ -350,7 +383,6 @@ class Meta:
class IssueAssigneeSerializer(BaseSerializer):
-
assignee_details = UserLiteSerializer(read_only=True, source="assignee")
class Meta:
@@ -373,7 +405,6 @@ class Meta:
class IssueCycleDetailSerializer(BaseSerializer):
-
cycle_detail = CycleBaseSerializer(read_only=True, source="cycle")
class Meta:
@@ -404,7 +435,6 @@ class Meta:
class IssueModuleDetailSerializer(BaseSerializer):
-
module_detail = ModuleBaseSerializer(read_only=True, source="module")
class Meta:
@@ -420,6 +450,26 @@ class Meta:
]
+class IssueLinkSerializer(BaseSerializer):
+ created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
+
+ class Meta:
+ model = IssueLink
+ fields = "__all__"
+
+
+# Issue Serializer with state details
+class IssueStateSerializer(BaseSerializer):
+ state_detail = StateSerializer(read_only=True, source="state")
+ project_detail = ProjectSerializer(read_only=True, source="project")
+ label_details = LabelSerializer(read_only=True, source="labels", many=True)
+ assignee_details = UserLiteSerializer(read_only=True, source="assignees", many=True)
+
+ class Meta:
+ model = Issue
+ fields = "__all__"
+
+
class IssueSerializer(BaseSerializer):
project_detail = ProjectSerializer(read_only=True, source="project")
state_detail = StateSerializer(read_only=True, source="state")
@@ -432,6 +482,7 @@ class IssueSerializer(BaseSerializer):
blocker_issues = BlockerIssueSerializer(read_only=True, many=True)
issue_cycle = IssueCycleDetailSerializer(read_only=True)
issue_module = IssueModuleDetailSerializer(read_only=True)
+ issue_link = IssueLinkSerializer(read_only=True, many=True)
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py
index 808991ddc0b..14a33d9c313 100644
--- a/apiserver/plane/api/serializers/user.py
+++ b/apiserver/plane/api/serializers/user.py
@@ -21,6 +21,7 @@ class Meta:
"last_login_uagent",
"token_updated_at",
"is_onboarded",
+ "is_bot",
]
extra_kwargs = {"password": {"write_only": True}}
@@ -34,7 +35,9 @@ class Meta:
"last_name",
"email",
"avatar",
+ "is_bot",
]
read_only_fields = [
"id",
+ "is_bot",
]
diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py
index 98c2e87d286..f267ff16a7a 100644
--- a/apiserver/plane/api/urls.py
+++ b/apiserver/plane/api/urls.py
@@ -5,7 +5,6 @@
from plane.api.views import (
# Authentication
- SignUpEndpoint,
SignInEndpoint,
SignOutEndpoint,
MagicSignInEndpoint,
@@ -87,6 +86,14 @@
# Api Tokens
ApiTokenEndpoint,
## End Api Tokens
+ # Integrations
+ IntegrationViewSet,
+ WorkspaceIntegrationViewSet,
+ GithubRepositoriesEndpoint,
+ GithubRepositorySyncViewSet,
+ GithubIssueSyncViewSet,
+ GithubCommentSyncViewSet,
+ ## End Integrations
)
@@ -95,7 +102,6 @@
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
# Auth
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
- path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
# Magic Sign In/Up
path(
@@ -683,7 +689,118 @@
),
## End Modules
# API Tokens
- path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-token"),
- path("api-tokens//", ApiTokenEndpoint.as_view(), name="api-token"),
+ path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"),
+ path("api-tokens//", ApiTokenEndpoint.as_view(), name="api-tokens"),
## End API Tokens
+ # Integrations
+ path(
+ "integrations/",
+ IntegrationViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ name="integrations",
+ ),
+ path(
+ "integrations//",
+ IntegrationViewSet.as_view(
+ {
+ "get": "retrieve",
+ "patch": "partial_update",
+ "delete": "destroy",
+ }
+ ),
+ name="integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations/",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "get": "list",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations//",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "post": "create",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ path(
+ "workspaces//workspace-integrations//provider/",
+ WorkspaceIntegrationViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ name="workspace-integrations",
+ ),
+ # Github Integrations
+ path(
+ "workspaces//workspace-integrations//github-repositories/",
+ GithubRepositoriesEndpoint.as_view(),
+ ),
+ path(
+ "workspaces//projects//workspace-integrations//github-repository-sync/",
+ GithubRepositorySyncViewSet.as_view(
+ {
+ "get": "list",
+ "post": "create",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//workspace-integrations//github-repository-sync//",
+ GithubRepositorySyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync/",
+ GithubIssueSyncViewSet.as_view(
+ {
+ "post": "create",
+ "get": "list",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//",
+ GithubIssueSyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/",
+ GithubCommentSyncViewSet.as_view(
+ {
+ "post": "create",
+ "get": "list",
+ }
+ ),
+ ),
+ path(
+ "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//",
+ GithubCommentSyncViewSet.as_view(
+ {
+ "get": "retrieve",
+ "delete": "destroy",
+ }
+ ),
+ ),
+ ## End Github Integrations
+ ## End Integrations
]
diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py
index 1212e0dca0d..275642c501c 100644
--- a/apiserver/plane/api/views/__init__.py
+++ b/apiserver/plane/api/views/__init__.py
@@ -64,7 +64,6 @@
from .authentication import (
- SignUpEndpoint,
SignInEndpoint,
SignOutEndpoint,
MagicSignInEndpoint,
@@ -73,4 +72,13 @@
from .module import ModuleViewSet, ModuleIssueViewSet
-from .api_token import ApiTokenEndpoint
\ No newline at end of file
+from .api_token import ApiTokenEndpoint
+
+from .integration import (
+ WorkspaceIntegrationViewSet,
+ IntegrationViewSet,
+ GithubIssueSyncViewSet,
+ GithubRepositorySyncViewSet,
+ GithubCommentSyncViewSet,
+ GithubRepositoriesEndpoint,
+)
diff --git a/apiserver/plane/api/views/api_token.py b/apiserver/plane/api/views/api_token.py
index 4ed3d9de028..2508b06acbc 100644
--- a/apiserver/plane/api/views/api_token.py
+++ b/apiserver/plane/api/views/api_token.py
@@ -15,12 +15,16 @@
class ApiTokenEndpoint(BaseAPIView):
def post(self, request):
try:
-
label = request.data.get("label", str(uuid4().hex))
+ workspace = request.data.get("workspace", False)
+
+ if not workspace:
+ return Response(
+ {"error": "Workspace is required"}, status=status.HTTP_200_OK
+ )
api_token = APIToken.objects.create(
- label=label,
- user=request.user,
+ label=label, user=request.user, workspace_id=workspace
)
serializer = APITokenSerializer(api_token)
diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py
index 487d10a2260..56dc091f489 100644
--- a/apiserver/plane/api/views/auth_extended.py
+++ b/apiserver/plane/api/views/auth_extended.py
@@ -84,7 +84,7 @@ def post(self, request):
)
return Response(
- {"messgae": "Check your email to reset your password"},
+ {"message": "Check your email to reset your password"},
status=status.HTTP_200_OK,
)
return Response(
diff --git a/apiserver/plane/api/views/authentication.py b/apiserver/plane/api/views/authentication.py
index c77bdd1606b..58d75a04952 100644
--- a/apiserver/plane/api/views/authentication.py
+++ b/apiserver/plane/api/views/authentication.py
@@ -9,6 +9,7 @@
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.conf import settings
+from django.contrib.auth.hashers import make_password
# Third party imports
from rest_framework.response import Response
@@ -34,13 +35,11 @@ def get_tokens_for_user(user):
)
-class SignUpEndpoint(BaseAPIView):
-
+class SignInEndpoint(BaseAPIView):
permission_classes = (AllowAny,)
def post(self, request):
try:
-
email = request.data.get("email", False)
password = request.data.get("password", False)
@@ -63,114 +62,67 @@ def post(self, request):
user = User.objects.filter(email=email).first()
- if user is not None:
- return Response(
- {"error": "Email ID is already taken"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.create(email=email)
- user.set_password(password)
-
- # settings last actives for the user
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
-
- serialized_user = UserSerializer(user).data
-
- access_token, refresh_token = get_tokens_for_user(user)
-
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- }
-
- return Response(data, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
-
-
-class SignInEndpoint(BaseAPIView):
- permission_classes = (AllowAny,)
-
- def post(self, request):
- try:
- email = request.data.get("email", False)
- password = request.data.get("password", False)
+ # Sign up Process
+ if user is None:
+ user = User.objects.create(email=email, username=uuid.uuid4().hex)
+ user.set_password(password)
- ## Raise exception if any of the above are missing
- if not email or not password:
- return Response(
- {"error": "Both email and password are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ # settings last actives for the user
+ user.last_active = timezone.now()
+ user.last_login_time = timezone.now()
+ user.last_login_ip = request.META.get("REMOTE_ADDR")
+ user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
+ user.token_updated_at = timezone.now()
+ user.save()
- email = email.strip().lower()
+ serialized_user = UserSerializer(user).data
- try:
- validate_email(email)
- except ValidationError as e:
- return Response(
- {"error": "Please provide a valid email address."},
- status=status.HTTP_400_BAD_REQUEST,
- )
+ access_token, refresh_token = get_tokens_for_user(user)
- user = User.objects.get(email=email)
+ data = {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ "user": serialized_user,
+ }
- if not user.check_password(password):
- return Response(
- {
- "error": "Sorry, we could not find a user with the provided credentials. Please try again."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
- if not user.is_active:
- return Response(
- {
- "error": "Your account has been deactivated. Please contact your site administrator."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
+ return Response(data, status=status.HTTP_200_OK)
+ # Sign in Process
+ else:
+ if not user.check_password(password):
+ return Response(
+ {
+ "error": "Sorry, we could not find a user with the provided credentials. Please try again."
+ },
+ status=status.HTTP_403_FORBIDDEN,
+ )
+ if not user.is_active:
+ return Response(
+ {
+ "error": "Your account has been deactivated. Please contact your site administrator."
+ },
+ status=status.HTTP_403_FORBIDDEN,
+ )
- serialized_user = UserSerializer(user).data
+ serialized_user = UserSerializer(user).data
- # settings last active for the user
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
+ # settings last active for the user
+ user.last_active = timezone.now()
+ user.last_login_time = timezone.now()
+ user.last_login_ip = request.META.get("REMOTE_ADDR")
+ user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
+ user.token_updated_at = timezone.now()
+ user.save()
- access_token, refresh_token = get_tokens_for_user(user)
+ access_token, refresh_token = get_tokens_for_user(user)
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- }
+ data = {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ "user": serialized_user,
+ }
- return Response(data, status=status.HTTP_200_OK)
+ return Response(data, status=status.HTTP_200_OK)
- except User.DoesNotExist:
- return Response(
- {
- "error": "Sorry, we could not find a user with the provided credentials. Please try again."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
except Exception as e:
capture_exception(e)
return Response(
@@ -216,14 +168,12 @@ def post(self, request):
class MagicSignInGenerateEndpoint(BaseAPIView):
-
permission_classes = [
AllowAny,
]
def post(self, request):
try:
-
email = request.data.get("email", False)
if not email:
@@ -269,7 +219,6 @@ def post(self, request):
ri.set(key, json.dumps(value), ex=expiry)
else:
-
value = {"current_attempt": 0, "email": email, "token": token}
expiry = 600
@@ -293,14 +242,12 @@ def post(self, request):
class MagicSignInEndpoint(BaseAPIView):
-
permission_classes = [
AllowAny,
]
def post(self, request):
try:
-
user_token = request.data.get("token", "").strip().lower()
key = request.data.get("key", False)
@@ -313,19 +260,20 @@ def post(self, request):
ri = redis_instance()
if ri.exists(key):
-
data = json.loads(ri.get(key))
token = data["token"]
email = data["email"]
if str(token) == str(user_token):
-
if User.objects.filter(email=email).exists():
user = User.objects.get(email=email)
else:
user = User.objects.create(
- email=email, username=uuid.uuid4().hex
+ email=email,
+ username=uuid.uuid4().hex,
+ password=make_password(uuid.uuid4().hex),
+ is_password_autoset=True,
)
user.last_active = timezone.now()
diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py
index d1b291d9a2b..2b18aab96b3 100644
--- a/apiserver/plane/api/views/cycle.py
+++ b/apiserver/plane/api/views/cycle.py
@@ -1,5 +1,9 @@
+# Python imports
+import json
+
# Django imports
from django.db.models import OuterRef, Func, F
+from django.core import serializers
# Third party imports
from rest_framework.response import Response
@@ -11,10 +15,10 @@
from plane.api.serializers import CycleSerializer, CycleIssueSerializer
from plane.api.permissions import ProjectEntityPermission
from plane.db.models import Cycle, CycleIssue, Issue
+from plane.bgtasks.issue_activites_task import issue_activity
class CycleViewSet(BaseViewSet):
-
serializer_class = CycleSerializer
model = Cycle
permission_classes = [
@@ -41,7 +45,6 @@ def get_queryset(self):
class CycleIssueViewSet(BaseViewSet):
-
serializer_class = CycleIssueSerializer
model = CycleIssue
@@ -79,7 +82,6 @@ def get_queryset(self):
def create(self, request, slug, project_id, cycle_id):
try:
-
issues = request.data.get("issues", [])
if not len(issues):
@@ -91,29 +93,77 @@ def create(self, request, slug, project_id, cycle_id):
workspace__slug=slug, project_id=project_id, pk=cycle_id
)
- issues = Issue.objects.filter(
- pk__in=issues, workspace__slug=slug, project_id=project_id
- )
-
- # Delete old records in order to maintain the database integrity
- CycleIssue.objects.filter(issue_id__in=issues).delete()
+ # Get all CycleIssues already created
+ cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
+ records_to_update = []
+ update_cycle_issue_activity = []
+ record_to_create = []
+
+ for issue in issues:
+ cycle_issue = [
+ cycle_issue
+ for cycle_issue in cycle_issues
+ if str(cycle_issue.issue_id) in issues
+ ]
+ # Update only when cycle changes
+ if len(cycle_issue):
+ if cycle_issue[0].cycle_id != cycle_id:
+ update_cycle_issue_activity.append(
+ {
+ "old_cycle_id": str(cycle_issue[0].cycle_id),
+ "new_cycle_id": str(cycle_id),
+ "issue_id": str(cycle_issue[0].issue_id),
+ }
+ )
+ cycle_issue[0].cycle_id = cycle_id
+ records_to_update.append(cycle_issue[0])
+ else:
+ record_to_create.append(
+ CycleIssue(
+ project_id=project_id,
+ workspace=cycle.workspace,
+ created_by=request.user,
+ updated_by=request.user,
+ cycle=cycle,
+ issue_id=issue,
+ )
+ )
CycleIssue.objects.bulk_create(
- [
- CycleIssue(
- project_id=project_id,
- workspace=cycle.workspace,
- created_by=request.user,
- updated_by=request.user,
- cycle=cycle,
- issue=issue,
- )
- for issue in issues
- ],
+ record_to_create,
batch_size=10,
ignore_conflicts=True,
)
- return Response({"message": "Success"}, status=status.HTTP_200_OK)
+ CycleIssue.objects.bulk_update(
+ records_to_update,
+ ["cycle"],
+ batch_size=10,
+ )
+
+ # Capture Issue Activity
+ issue_activity.delay(
+ {
+ "type": "issue.activity",
+ "requested_data": json.dumps({"cycles_list": issues}),
+ "actor_id": str(self.request.user.id),
+ "issue_id": str(self.kwargs.get("pk", None)),
+ "project_id": str(self.kwargs.get("project_id", None)),
+ "current_instance": json.dumps(
+ {
+ "updated_cycle_issues": update_cycle_issue_activity,
+ "created_cycle_issues": serializers.serialize(
+ "json", record_to_create
+ ),
+ }
+ ),
+ },
+ )
+
+ # Return all Cycle Issues
+ return Response(
+ CycleIssueSerializer(self.get_queryset(), many=True).data,
+ status=status.HTTP_200_OK,
+ )
except Cycle.DoesNotExist:
return Response(
diff --git a/apiserver/plane/api/views/integration/__init__.py b/apiserver/plane/api/views/integration/__init__.py
new file mode 100644
index 00000000000..69320257368
--- /dev/null
+++ b/apiserver/plane/api/views/integration/__init__.py
@@ -0,0 +1,7 @@
+from .base import IntegrationViewSet, WorkspaceIntegrationViewSet
+from .github import (
+ GithubRepositorySyncViewSet,
+ GithubIssueSyncViewSet,
+ GithubCommentSyncViewSet,
+ GithubRepositoriesEndpoint,
+)
diff --git a/apiserver/plane/api/views/integration/base.py b/apiserver/plane/api/views/integration/base.py
new file mode 100644
index 00000000000..4f15c347f91
--- /dev/null
+++ b/apiserver/plane/api/views/integration/base.py
@@ -0,0 +1,198 @@
+# Python improts
+import uuid
+
+# Django imports
+from django.db import IntegrityError
+from django.contrib.auth.hashers import make_password
+
+# Third party imports
+from rest_framework.response import Response
+from rest_framework import status
+from sentry_sdk import capture_exception
+
+# Module imports
+from plane.api.views import BaseViewSet
+from plane.db.models import (
+ Integration,
+ WorkspaceIntegration,
+ Workspace,
+ User,
+ WorkspaceMember,
+ APIToken,
+)
+from plane.api.serializers import IntegrationSerializer, WorkspaceIntegrationSerializer
+from plane.utils.integrations.github import (
+ get_github_metadata,
+ delete_github_installation,
+)
+
+
+class IntegrationViewSet(BaseViewSet):
+ serializer_class = IntegrationSerializer
+ model = Integration
+
+ def create(self, request):
+ try:
+ serializer = IntegrationSerializer(data=request.data)
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_201_CREATED)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ def partial_update(self, request, pk):
+ try:
+ integration = Integration.objects.get(pk=pk)
+ if integration.verified:
+ return Response(
+ {"error": "Verified integrations cannot be updated"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ serializer = IntegrationSerializer(
+ integration, data=request.data, partial=True
+ )
+
+ if serializer.is_valid():
+ serializer.save()
+ return Response(serializer.data, status=status.HTTP_200_OK)
+ return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
+
+ except Integration.DoesNotExist:
+ return Response(
+ {"error": "Integration Does not exist"},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+
+class WorkspaceIntegrationViewSet(BaseViewSet):
+ serializer_class = WorkspaceIntegrationSerializer
+ model = WorkspaceIntegration
+
+ def get_queryset(self):
+ return (
+ super()
+ .get_queryset()
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .select_related("integration")
+ )
+
+ def create(self, request, slug, provider):
+ try:
+ installation_id = request.data.get("installation_id", None)
+
+ if not installation_id:
+ return Response(
+ {"error": "Installation ID is required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ workspace = Workspace.objects.get(slug=slug)
+ integration = Integration.objects.get(provider=provider)
+ config = {}
+ if provider == "github":
+ metadata = get_github_metadata(installation_id)
+ config = {"installation_id": installation_id}
+
+ # Create a bot user
+ bot_user = User.objects.create(
+ email=f"{uuid.uuid4().hex}@plane.so",
+ username=uuid.uuid4().hex,
+ password=make_password(uuid.uuid4().hex),
+ is_password_autoset=True,
+ is_bot=True,
+ first_name=integration.title,
+ avatar=integration.avatar_url
+ if integration.avatar_url is not None
+ else "",
+ )
+
+ # Create an API Token for the bot user
+ api_token = APIToken.objects.create(
+ user=bot_user,
+ user_type=1, # bot user
+ workspace=workspace,
+ )
+
+ workspace_integration = WorkspaceIntegration.objects.create(
+ workspace=workspace,
+ integration=integration,
+ actor=bot_user,
+ api_token=api_token,
+ metadata=metadata,
+ config=config,
+ )
+
+ # Add bot user as a member of workspace
+ _ = WorkspaceMember.objects.create(
+ workspace=workspace_integration.workspace,
+ member=bot_user,
+ role=20,
+ )
+ return Response(
+ WorkspaceIntegrationSerializer(workspace_integration).data,
+ status=status.HTTP_201_CREATED,
+ )
+ except IntegrityError as e:
+ if "already exists" in str(e):
+ return Response(
+ {"error": "Integration is already active in the workspace"},
+ status=status.HTTP_410_GONE,
+ )
+ else:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ except (Workspace.DoesNotExist, Integration.DoesNotExist) as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Workspace or Integration not found"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ def destroy(self, request, slug, pk):
+ try:
+ workspace_integration = WorkspaceIntegration.objects.get(
+ pk=pk, workspace__slug=slug
+ )
+
+ if workspace_integration.integration.provider == "github":
+ installation_id = workspace_integration.config.get(
+ "installation_id", False
+ )
+ if installation_id:
+ delete_github_installation(installation_id=installation_id)
+
+ workspace_integration.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+
+ except WorkspaceIntegration.DoesNotExist:
+ return Response(
+ {"error": "Workspace Integration Does not exists"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
diff --git a/apiserver/plane/api/views/integration/github.py b/apiserver/plane/api/views/integration/github.py
new file mode 100644
index 00000000000..df8f1d0c234
--- /dev/null
+++ b/apiserver/plane/api/views/integration/github.py
@@ -0,0 +1,165 @@
+# Third party imports
+from rest_framework import status
+from rest_framework.response import Response
+from sentry_sdk import capture_exception
+
+# Module imports
+from plane.api.views import BaseViewSet, BaseAPIView
+from plane.db.models import (
+ GithubIssueSync,
+ GithubRepositorySync,
+ GithubRepository,
+ WorkspaceIntegration,
+ ProjectMember,
+ Label,
+ GithubCommentSync,
+)
+from plane.api.serializers import (
+ GithubIssueSyncSerializer,
+ GithubRepositorySyncSerializer,
+ GithubCommentSyncSerializer,
+)
+from plane.utils.integrations.github import get_github_repos
+
+
+class GithubRepositoriesEndpoint(BaseAPIView):
+ def get(self, request, slug, workspace_integration_id):
+ try:
+ workspace_integration = WorkspaceIntegration.objects.get(
+ workspace__slug=slug, pk=workspace_integration_id
+ )
+ access_tokens_url = workspace_integration.metadata["access_tokens_url"]
+ repositories_url = workspace_integration.metadata["repositories_url"]
+ repositories = get_github_repos(access_tokens_url, repositories_url)
+ return Response(repositories, status=status.HTTP_200_OK)
+ except WorkspaceIntegration.DoesNotExist:
+ return Response(
+ {"error": "Workspace Integration Does not exists"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+
+class GithubRepositorySyncViewSet(BaseViewSet):
+ serializer_class = GithubRepositorySyncSerializer
+ model = GithubRepositorySync
+
+ def perform_create(self, serializer):
+ serializer.save(project_id=self.kwargs.get("project_id"))
+
+ def get_queryset(self):
+ return (
+ super()
+ .get_queryset()
+ .filter(workspace__slug=self.kwargs.get("slug"))
+ .filter(project_id=self.kwargs.get("project_id"))
+ )
+
+ def create(self, request, slug, project_id, workspace_integration_id):
+ try:
+ name = request.data.get("name", False)
+ url = request.data.get("url", False)
+ config = request.data.get("config", {})
+ repository_id = request.data.get("repository_id", False)
+ owner = request.data.get("owner", False)
+
+ if not name or not url or not repository_id or not owner:
+ return Response(
+ {"error": "Name, url, repository_id and owner are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ # Get the workspace integration
+ workspace_integration = WorkspaceIntegration.objects.get(
+ pk=workspace_integration_id
+ )
+
+ # Delete the old repository object
+ GithubRepositorySync.objects.filter(
+ project_id=project_id, workspace__slug=slug
+ ).delete()
+ GithubRepository.objects.filter(
+ project_id=project_id, workspace__slug=slug
+ ).delete()
+ # Project member delete
+ ProjectMember.objects.filter(
+ member=workspace_integration.actor, role=20, project_id=project_id
+ ).delete()
+
+ # Create repository
+ repo = GithubRepository.objects.create(
+ name=name,
+ url=url,
+ config=config,
+ repository_id=repository_id,
+ owner=owner,
+ project_id=project_id,
+ )
+
+ # Create a Label for github
+ label = Label.objects.filter(
+ name="GitHub",
+ project_id=project_id,
+ ).first()
+
+ if label is None:
+ label = Label.objects.create(
+ name="GitHub",
+ project_id=project_id,
+ description="Label to sync Plane issues with GitHub issues",
+ color="#003773",
+ )
+
+ # Create repo sync
+ repo_sync = GithubRepositorySync.objects.create(
+ repository=repo,
+ workspace_integration=workspace_integration,
+ actor=workspace_integration.actor,
+ credentials=request.data.get("credentials", {}),
+ project_id=project_id,
+ label=label,
+ )
+
+ # Add bot as a member in the project
+ _ = ProjectMember.objects.create(
+ member=workspace_integration.actor, role=20, project_id=project_id
+ )
+
+ # Return Response
+ return Response(
+ GithubRepositorySyncSerializer(repo_sync).data,
+ status=status.HTTP_201_CREATED,
+ )
+
+ except WorkspaceIntegration.DoesNotExist:
+ return Response(
+ {"error": "Workspace Integration does not exist"},
+ status=status.HTTP_404_NOT_FOUND,
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+
+class GithubIssueSyncViewSet(BaseViewSet):
+ serializer_class = GithubIssueSyncSerializer
+ model = GithubIssueSync
+
+ def perform_create(self, serializer):
+ serializer.save(
+ project_id=self.kwargs.get("project_id"),
+ repository_sync_id=self.kwargs.get("repo_sync_id"),
+ )
+
+
+class GithubCommentSyncViewSet(BaseViewSet):
+ serializer_class = GithubCommentSyncSerializer
+ model = GithubCommentSync
+
+ def perform_create(self, serializer):
+ serializer.save(
+ project_id=self.kwargs.get("project_id"),
+ issue_sync_id=self.kwargs.get("issue_sync_id"),
+ )
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index 37082e0ec8a..68797c2962c 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -3,7 +3,7 @@
from itertools import groupby, chain
# Django imports
-from django.db.models import Prefetch, OuterRef, Func, F
+from django.db.models import Prefetch, OuterRef, Func, F, Q
from django.core.serializers.json import DjangoJSONEncoder
# Third Party imports
@@ -22,6 +22,7 @@
LabelSerializer,
IssueSerializer,
LabelSerializer,
+ IssueFlatSerializer,
)
from plane.api.permissions import (
ProjectEntityPermission,
@@ -39,8 +40,10 @@
IssueBlocker,
CycleIssue,
ModuleIssue,
+ IssueLink,
)
from plane.bgtasks.issue_activites_task import issue_activity
+from plane.utils.grouper import group_results
class IssueViewSet(BaseViewSet):
@@ -75,10 +78,9 @@ def perform_update(self, serializer):
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
-
issue_activity.delay(
{
- "type": "issue.activity",
+ "type": "issue.activity.updated",
"requested_data": requested_data,
"actor_id": str(self.request.user.id),
"issue_id": str(self.kwargs.get("pk", None)),
@@ -91,8 +93,28 @@ def perform_update(self, serializer):
return super().perform_update(serializer)
- def get_queryset(self):
+ def perform_destroy(self, instance):
+ current_instance = (
+ self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
+ )
+ if current_instance is not None:
+ issue_activity.delay(
+ {
+ "type": "issue.activity.deleted",
+ "requested_data": json.dumps(
+ {"issue_id": str(self.kwargs.get("pk", None))}
+ ),
+ "actor_id": str(self.request.user.id),
+ "issue_id": str(self.kwargs.get("pk", None)),
+ "project_id": str(self.kwargs.get("project_id", None)),
+ "current_instance": json.dumps(
+ IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
+ ),
+ },
+ )
+ return super().perform_destroy(instance)
+ def get_queryset(self):
return (
super()
.get_queryset()
@@ -136,52 +158,42 @@ def get_queryset(self):
).prefetch_related("module__members"),
),
)
+ .prefetch_related(
+ Prefetch(
+ "issue_link",
+ queryset=IssueLink.objects.select_related("issue").select_related(
+ "created_by"
+ ),
+ )
+ )
)
- def grouper(self, issue, group_by):
- group_by = issue.get(group_by, "")
-
- if isinstance(group_by, list):
- if len(group_by):
- return group_by[0]
- else:
- return ""
-
- else:
- return group_by
-
def list(self, request, slug, project_id):
try:
- issue_queryset = self.get_queryset()
+ # Issue State groups
+ type = request.GET.get("type", "all")
+ group = ["backlog", "unstarted", "started", "completed", "cancelled"]
+ if type == "backlog":
+ group = ["backlog"]
+ if type == "active":
+ group = ["unstarted", "started"]
+
+ issue_queryset = (
+ self.get_queryset()
+ .order_by(request.GET.get("order_by", "created_at"))
+ .filter(state__group__in=group)
+ )
+ issues = IssueSerializer(issue_queryset, many=True).data
+
## Grouping the results
group_by = request.GET.get("group_by", False)
- # TODO: Move this group by from ittertools to ORM for better performance - nk
if group_by:
- issue_dict = dict()
-
- issues = IssueSerializer(issue_queryset, many=True).data
-
- for key, value in groupby(
- issues, lambda issue: self.grouper(issue, group_by)
- ):
- issue_dict[str(key)] = list(value)
-
- return Response(issue_dict, status=status.HTTP_200_OK)
+ return Response(
+ group_results(issues, group_by), status=status.HTTP_200_OK
+ )
- return Response(
- {
- "next_cursor": str(0),
- "prev_cursor": str(0),
- "next_page_results": False,
- "prev_page_results": False,
- "count": issue_queryset.count(),
- "total_pages": 1,
- "extra_stats": {},
- "results": IssueSerializer(issue_queryset, many=True).data,
- },
- status=status.HTTP_200_OK,
- )
+ return Response(issues, status=status.HTTP_200_OK)
except Exception as e:
print(e)
@@ -202,15 +214,18 @@ def create(self, request, slug, project_id):
serializer.save()
# Track the issue
- IssueActivity.objects.create(
- issue_id=serializer.data["id"],
- project_id=project_id,
- workspace_id=serializer["workspace"],
- comment=f"{request.user.email} created the issue",
- verb="created",
- actor=request.user,
+ issue_activity.delay(
+ {
+ "type": "issue.activity.created",
+ "requested_data": json.dumps(
+ self.request.data, cls=DjangoJSONEncoder
+ ),
+ "actor_id": str(request.user.id),
+ "issue_id": str(serializer.data.get("id", None)),
+ "project_id": str(project_id),
+ "current_instance": None,
+ },
)
-
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@@ -265,6 +280,14 @@ def get(self, request, slug):
queryset=ModuleIssue.objects.select_related("module", "issue"),
),
)
+ .prefetch_related(
+ Prefetch(
+ "issue_link",
+ queryset=IssueLink.objects.select_related(
+ "issue"
+ ).select_related("created_by"),
+ )
+ )
)
serializer = IssueSerializer(issues, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@@ -277,7 +300,6 @@ def get(self, request, slug):
class WorkSpaceIssuesEndpoint(BaseAPIView):
-
permission_classes = [
WorkSpaceAdminPermission,
]
@@ -298,7 +320,6 @@ def get(self, request, slug):
class IssueActivityEndpoint(BaseAPIView):
-
permission_classes = [
ProjectEntityPermission,
]
@@ -307,7 +328,10 @@ def get(self, request, slug, project_id, issue_id):
try:
issue_activities = (
IssueActivity.objects.filter(issue_id=issue_id)
- .filter(project__project_projectmember__member=self.request.user)
+ .filter(
+ ~Q(field="comment"),
+ project__project_projectmember__member=self.request.user,
+ )
.select_related("actor")
).order_by("created_by")
issue_comments = (
@@ -333,7 +357,6 @@ def get(self, request, slug, project_id, issue_id):
class IssueCommentViewSet(BaseViewSet):
-
serializer_class = IssueCommentSerializer
model = IssueComment
permission_classes = [
@@ -351,6 +374,60 @@ def perform_create(self, serializer):
issue_id=self.kwargs.get("issue_id"),
actor=self.request.user if self.request.user is not None else None,
)
+ issue_activity.delay(
+ {
+ "type": "comment.activity.created",
+ "requested_data": json.dumps(serializer.data, cls=DjangoJSONEncoder),
+ "actor_id": str(self.request.user.id),
+ "issue_id": str(self.kwargs.get("issue_id")),
+ "project_id": str(self.kwargs.get("project_id")),
+ "current_instance": None,
+ },
+ )
+
+ def perform_update(self, serializer):
+ requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder)
+ current_instance = (
+ self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
+ )
+ if current_instance is not None:
+ issue_activity.delay(
+ {
+ "type": "comment.activity.updated",
+ "requested_data": requested_data,
+ "actor_id": str(self.request.user.id),
+ "issue_id": str(self.kwargs.get("issue_id", None)),
+ "project_id": str(self.kwargs.get("project_id", None)),
+ "current_instance": json.dumps(
+ IssueCommentSerializer(current_instance).data,
+ cls=DjangoJSONEncoder,
+ ),
+ },
+ )
+
+ return super().perform_update(serializer)
+
+ def perform_destroy(self, instance):
+ current_instance = (
+ self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
+ )
+ if current_instance is not None:
+ issue_activity.delay(
+ {
+ "type": "comment.activity.deleted",
+ "requested_data": json.dumps(
+ {"comment_id": str(self.kwargs.get("pk", None))}
+ ),
+ "actor_id": str(self.request.user.id),
+ "issue_id": str(self.kwargs.get("issue_id", None)),
+ "project_id": str(self.kwargs.get("project_id", None)),
+ "current_instance": json.dumps(
+ IssueCommentSerializer(current_instance).data,
+ cls=DjangoJSONEncoder,
+ ),
+ },
+ )
+ return super().perform_destroy(instance)
def get_queryset(self):
return self.filter_queryset(
@@ -436,7 +513,6 @@ def list(self, request, slug, project_id):
def create(self, request, slug, project_id):
try:
-
issue_property, created = IssueProperty.objects.get_or_create(
user=request.user,
project_id=project_id,
@@ -463,7 +539,6 @@ def create(self, request, slug, project_id):
class LabelViewSet(BaseViewSet):
-
serializer_class = LabelSerializer
model = Label
permission_classes = [
@@ -490,14 +565,12 @@ def get_queryset(self):
class BulkDeleteIssuesEndpoint(BaseAPIView):
-
permission_classes = [
ProjectEntityPermission,
]
def delete(self, request, slug, project_id):
try:
-
issue_ids = request.data.get("issue_ids", [])
if not len(issue_ids):
@@ -527,14 +600,12 @@ def delete(self, request, slug, project_id):
class SubIssuesEndpoint(BaseAPIView):
-
permission_classes = [
ProjectEntityPermission,
]
def get(self, request, slug, project_id, issue_id):
try:
-
sub_issues = (
Issue.objects.filter(
parent_id=issue_id, workspace__slug=slug, project_id=project_id
@@ -583,3 +654,39 @@ def get(self, request, slug, project_id, issue_id):
{"error": "Something went wrong please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
+
+ # Assign multiple sub issues
+ def post(self, request, slug, project_id, issue_id):
+ try:
+ parent_issue = Issue.objects.get(pk=issue_id)
+ sub_issue_ids = request.data.get("sub_issue_ids", [])
+
+ if not len(sub_issue_ids):
+ return Response(
+ {"error": "Sub Issue IDs are required"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ sub_issues = Issue.objects.filter(id__in=sub_issue_ids)
+
+ for sub_issue in sub_issues:
+ sub_issue.parent = parent_issue
+
+ _ = Issue.objects.bulk_update(sub_issues, ["parent"], batch_size=10)
+
+ updated_sub_issues = Issue.objects.filter(id__in=sub_issue_ids)
+
+ return Response(
+ IssueFlatSerializer(updated_sub_issues, many=True).data,
+ status=status.HTTP_200_OK,
+ )
+ except Issue.DoesNotExist:
+ return Response(
+ {"Parent Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST
+ )
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py
index 9955ded76a2..a1cda983467 100644
--- a/apiserver/plane/api/views/module.py
+++ b/apiserver/plane/api/views/module.py
@@ -1,6 +1,10 @@
+# Python imports
+import json
+
# Django Imports
from django.db import IntegrityError
from django.db.models import Prefetch, F, OuterRef, Func
+from django.core import serializers
# Third party imports
from rest_framework.response import Response
@@ -22,10 +26,10 @@
Issue,
ModuleLink,
)
+from plane.bgtasks.issue_activites_task import issue_activity
class ModuleViewSet(BaseViewSet):
-
model = Module
permission_classes = [
ProjectEntityPermission,
@@ -95,7 +99,6 @@ def create(self, request, slug, project_id):
class ModuleIssueViewSet(BaseViewSet):
-
serializer_class = ModuleIssueSerializer
model = ModuleIssue
@@ -148,29 +151,77 @@ def create(self, request, slug, project_id, module_id):
workspace__slug=slug, project_id=project_id, pk=module_id
)
- issues = Issue.objects.filter(
- pk__in=issues, workspace__slug=slug, project_id=project_id
- )
-
- # Delete old records in order to maintain the database integrity
- ModuleIssue.objects.filter(issue_id__in=issues).delete()
+ module_issues = list(ModuleIssue.objects.filter(issue_id__in=issues))
+
+ update_module_issue_activity = []
+ records_to_update = []
+ record_to_create = []
+
+ for issue in issues:
+ module_issue = [
+ module_issue
+ for module_issue in module_issues
+ if str(module_issue.issue_id) in issues
+ ]
+
+ if len(module_issue):
+ if module_issue[0].module_id != module_id:
+ update_module_issue_activity.append(
+ {
+ "old_module_id": str(module_issue[0].module_id),
+ "new_module_id": str(module_id),
+ "issue_id": str(module_issue[0].issue_id),
+ }
+ )
+ module_issue[0].module_id = module_id
+ records_to_update.append(module_issue[0])
+ else:
+ record_to_create.append(
+ ModuleIssue(
+ module=module,
+ issue_id=issue,
+ project_id=project_id,
+ workspace=module.workspace,
+ created_by=request.user,
+ updated_by=request.user,
+ )
+ )
ModuleIssue.objects.bulk_create(
- [
- ModuleIssue(
- module=module,
- issue=issue,
- project_id=project_id,
- workspace=module.workspace,
- created_by=request.user,
- updated_by=request.user,
- )
- for issue in issues
- ],
+ record_to_create,
batch_size=10,
ignore_conflicts=True,
)
- return Response({"message": "Success"}, status=status.HTTP_200_OK)
+
+ ModuleIssue.objects.bulk_update(
+ records_to_update,
+ ["module"],
+ batch_size=10,
+ )
+
+ # Capture Issue Activity
+ issue_activity.delay(
+ {
+ "type": "issue.activity",
+ "requested_data": json.dumps({"modules_list": issues}),
+ "actor_id": str(self.request.user.id),
+ "issue_id": str(self.kwargs.get("pk", None)),
+ "project_id": str(self.kwargs.get("project_id", None)),
+ "current_instance": json.dumps(
+ {
+ "updated_module_issues": update_module_issue_activity,
+ "created_module_issues": serializers.serialize(
+ "json", record_to_create
+ ),
+ }
+ ),
+ },
+ )
+
+ return Response(
+ ModuleIssueSerializer(self.get_queryset(), many=True).data,
+ status=status.HTTP_200_OK,
+ )
except Module.DoesNotExist:
return Response(
{"error": "Module Does not exists"}, status=status.HTTP_400_BAD_REQUEST
diff --git a/apiserver/plane/api/views/oauth.py b/apiserver/plane/api/views/oauth.py
index bcebfb29436..994cb0466ab 100644
--- a/apiserver/plane/api/views/oauth.py
+++ b/apiserver/plane/api/views/oauth.py
@@ -34,7 +34,6 @@ def get_tokens_for_user(user):
def validate_google_token(token, client_id):
try:
-
id_info = id_token.verify_oauth2_token(
token, google_auth_request.Request(), client_id
)
@@ -106,9 +105,19 @@ def get_user_data(access_token: str) -> dict:
resp = requests.get(url=url, headers=headers)
- userData = resp.json()
+ user_data = resp.json()
+
+ response = requests.get(
+ url="https://api.github.com/user/emails", headers=headers
+ ).json()
+
+ [
+ user_data.update({"email": item.get("email")})
+ for item in response
+ if item.get("primary") is True
+ ]
- return userData
+ return user_data
class OauthEndpoint(BaseAPIView):
@@ -116,7 +125,6 @@ class OauthEndpoint(BaseAPIView):
def post(self, request):
try:
-
medium = request.data.get("medium", False)
id_token = request.data.get("credential", False)
client_id = request.data.get("clientId", False)
@@ -138,7 +146,6 @@ def post(self, request):
email = data.get("email", None)
if email == None:
-
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
@@ -153,7 +160,6 @@ def post(self, request):
mobile_number = uuid.uuid4().hex
email_verified = True
else:
-
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py
index 2ec6faf1e53..e24477ecd30 100644
--- a/apiserver/plane/api/views/project.py
+++ b/apiserver/plane/api/views/project.py
@@ -75,7 +75,6 @@ def get_queryset(self):
def create(self, request, slug):
try:
-
workspace = Workspace.objects.get(slug=slug)
serializer = ProjectSerializer(
@@ -96,6 +95,7 @@ def create(self, request, slug):
"color": "#5e6ad2",
"sequence": 15000,
"group": "backlog",
+ "default": True,
},
{
"name": "Todo",
@@ -132,6 +132,7 @@ def create(self, request, slug):
sequence=state["sequence"],
workspace=serializer.instance.workspace,
group=state["group"],
+ default=state.get("default", False),
)
for state in states
]
@@ -188,7 +189,7 @@ def partial_update(self, request, slug, pk=None):
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
- except (Project.DoesNotExist or Workspace.DoesNotExist) as e:
+ except Project.DoesNotExist or Workspace.DoesNotExist as e:
return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
)
@@ -206,14 +207,12 @@ def partial_update(self, request, slug, pk=None):
class InviteProjectEndpoint(BaseAPIView):
-
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
try:
-
email = request.data.get("email", False)
role = request.data.get("role", False)
@@ -287,7 +286,6 @@ def post(self, request, slug, project_id):
class UserProjectInvitationsViewset(BaseViewSet):
-
serializer_class = ProjectMemberInviteSerializer
model = ProjectMemberInvite
@@ -301,7 +299,6 @@ def get_queryset(self):
def create(self, request):
try:
-
invitations = request.data.get("invitations")
project_invitations = ProjectMemberInvite.objects.filter(
pk__in=invitations, accepted=True
@@ -331,7 +328,6 @@ def create(self, request):
class ProjectMemberViewSet(BaseViewSet):
-
serializer_class = ProjectMemberSerializer
model = ProjectMember
permission_classes = [
@@ -356,14 +352,12 @@ def get_queryset(self):
class AddMemberToProjectEndpoint(BaseAPIView):
-
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
try:
-
member_id = request.data.get("member_id", False)
role = request.data.get("role", False)
@@ -412,13 +406,11 @@ def post(self, request, slug, project_id):
class AddTeamToProjectEndpoint(BaseAPIView):
-
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
-
try:
team_members = TeamMember.objects.filter(
workspace__slug=slug, team__in=request.data.get("teams", [])
@@ -467,7 +459,6 @@ def post(self, request, slug, project_id):
class ProjectMemberInvitationsViewset(BaseViewSet):
-
serializer_class = ProjectMemberInviteSerializer
model = ProjectMemberInvite
@@ -489,7 +480,6 @@ def get_queryset(self):
class ProjectMemberInviteDetailViewSet(BaseViewSet):
-
serializer_class = ProjectMemberInviteSerializer
model = ProjectMemberInvite
@@ -509,14 +499,12 @@ def get_queryset(self):
class ProjectIdentifierEndpoint(BaseAPIView):
-
permission_classes = [
ProjectBasePermission,
]
def get(self, request, slug):
try:
-
name = request.GET.get("name", "").strip().upper()
if name == "":
@@ -541,7 +529,6 @@ def get(self, request, slug):
def delete(self, request, slug):
try:
-
name = request.data.get("name", "").strip().upper()
if name == "":
@@ -616,7 +603,6 @@ def post(self, request, slug):
class ProjectUserViewsEndpoint(BaseAPIView):
def post(self, request, slug, project_id):
try:
-
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project_member = ProjectMember.objects.filter(
@@ -655,7 +641,6 @@ def post(self, request, slug, project_id):
class ProjectMemberUserEndpoint(BaseAPIView):
def get(self, request, slug, project_id):
try:
-
project_member = ProjectMember.objects.get(
project_id=project_id, workspace__slug=slug, member=request.user
)
diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py
index 8054b15dd92..4616fcee7f0 100644
--- a/apiserver/plane/api/views/state.py
+++ b/apiserver/plane/api/views/state.py
@@ -1,3 +1,12 @@
+# Python imports
+from itertools import groupby
+
+# Third party imports
+from rest_framework.response import Response
+from rest_framework import status
+from sentry_sdk import capture_exception
+
+
# Module imports
from . import BaseViewSet
from plane.api.serializers import StateSerializer
@@ -6,7 +15,6 @@
class StateViewSet(BaseViewSet):
-
serializer_class = StateSerializer
model = State
permission_classes = [
@@ -27,3 +35,38 @@ def get_queryset(self):
.select_related("workspace")
.distinct()
)
+
+ def list(self, request, slug, project_id):
+ try:
+ state_dict = dict()
+ states = StateSerializer(self.get_queryset(), many=True).data
+
+ for key, value in groupby(
+ sorted(states, key=lambda state: state["group"]),
+ lambda state: state.get("group"),
+ ):
+ state_dict[str(key)] = list(value)
+
+ return Response(state_dict, status=status.HTTP_200_OK)
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ def destroy(self, request, slug, project_id, pk):
+ try:
+ state = State.objects.get(
+ pk=pk, project_id=project_id, workspace__slug=slug
+ )
+
+ if state.default:
+ return Response(
+ {"error": "Default state cannot be deleted"}, status=False
+ )
+
+ state.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+ except State.DoesNotExist:
+ return Response({"error": "State does not exists"}, status=status.HTTP_404)
diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py
index f6debc921fa..a9bf30712d9 100644
--- a/apiserver/plane/bgtasks/issue_activites_task.py
+++ b/apiserver/plane/bgtasks/issue_activites_task.py
@@ -1,12 +1,27 @@
# Python imports
import json
+import requests
+
+# Django imports
+from django.conf import settings
+from django.core.serializers.json import DjangoJSONEncoder
# Third Party imports
from django_rq import job
from sentry_sdk import capture_exception
# Module imports
-from plane.db.models import User, Issue, Project, Label, IssueActivity, State
+from plane.db.models import (
+ User,
+ Issue,
+ Project,
+ Label,
+ IssueActivity,
+ State,
+ Cycle,
+ Module,
+)
+from plane.api.serializers import IssueActivitySerializer
# Track Chnages in name
@@ -44,7 +59,6 @@ def track_parent(
issue_activities,
):
if current_instance.get("parent") != requested_data.get("parent"):
-
if requested_data.get("parent") == None:
old_parent = Issue.objects.get(pk=current_instance.get("parent"))
issue_activities.append(
@@ -134,7 +148,6 @@ def track_state(
issue_activities,
):
if current_instance.get("state") != requested_data.get("state"):
-
new_state = State.objects.get(pk=requested_data.get("state", None))
old_state = State.objects.get(pk=current_instance.get("state", None))
@@ -167,7 +180,6 @@ def track_description(
if current_instance.get("description_html") != requested_data.get(
"description_html"
):
-
issue_activities.append(
IssueActivity(
issue_id=issue_id,
@@ -274,7 +286,6 @@ def track_labels(
):
# Label Addition
if len(requested_data.get("labels_list")) > len(current_instance.get("labels")):
-
for label in requested_data.get("labels_list"):
if label not in current_instance.get("labels"):
label = Label.objects.get(pk=label)
@@ -296,7 +307,6 @@ def track_labels(
# Label Removal
if len(requested_data.get("labels_list")) < len(current_instance.get("labels")):
-
for label in current_instance.get("labels"):
if label not in requested_data.get("labels_list"):
label = Label.objects.get(pk=label)
@@ -326,12 +336,10 @@ def track_assignees(
actor,
issue_activities,
):
-
# Assignee Addition
if len(requested_data.get("assignees_list")) > len(
current_instance.get("assignees")
):
-
for assignee in requested_data.get("assignees_list"):
if assignee not in current_instance.get("assignees"):
assignee = User.objects.get(pk=assignee)
@@ -354,7 +362,6 @@ def track_assignees(
if len(requested_data.get("assignees_list")) < len(
current_instance.get("assignees")
):
-
for assignee in current_instance.get("assignees"):
if assignee not in requested_data.get("assignees_list"):
assignee = User.objects.get(pk=assignee)
@@ -386,7 +393,6 @@ def track_blocks(
if len(requested_data.get("blocks_list")) > len(
current_instance.get("blocked_issues")
):
-
for block in requested_data.get("blocks_list"):
if (
len(
@@ -418,7 +424,6 @@ def track_blocks(
if len(requested_data.get("blocks_list")) < len(
current_instance.get("blocked_issues")
):
-
for blocked in current_instance.get("blocked_issues"):
if blocked.get("block") not in requested_data.get("blocks_list"):
issue = Issue.objects.get(pk=blocked.get("block"))
@@ -450,7 +455,6 @@ def track_blockings(
if len(requested_data.get("blockers_list")) > len(
current_instance.get("blocker_issues")
):
-
for block in requested_data.get("blockers_list"):
if (
len(
@@ -482,7 +486,6 @@ def track_blockings(
if len(requested_data.get("blockers_list")) < len(
current_instance.get("blocker_issues")
):
-
for blocked in current_instance.get("blocker_issues"):
if blocked.get("blocked_by") not in requested_data.get("blockers_list"):
issue = Issue.objects.get(pk=blocked.get("blocked_by"))
@@ -502,15 +505,250 @@ def track_blockings(
)
+def track_cycles(
+ requested_data,
+ current_instance,
+ issue_id,
+ project,
+ actor,
+ issue_activities,
+):
+ # Updated Records:
+ updated_records = current_instance.get("updated_cycle_issues", [])
+ created_records = json.loads(current_instance.get("created_cycle_issues", []))
+
+ for updated_record in updated_records:
+ old_cycle = Cycle.objects.filter(
+ pk=updated_record.get("old_cycle_id", None)
+ ).first()
+ new_cycle = Cycle.objects.filter(
+ pk=updated_record.get("new_cycle_id", None)
+ ).first()
+
+ issue_activities.append(
+ IssueActivity(
+ issue_id=updated_record.get("issue_id"),
+ actor=actor,
+ verb="updated",
+ old_value=old_cycle.name,
+ new_value=new_cycle.name,
+ field="cycles",
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} updated cycle from {old_cycle.name} to {new_cycle.name}",
+ old_identifier=old_cycle.id,
+ new_identifier=new_cycle.id,
+ )
+ )
+
+ for created_record in created_records:
+ cycle = Cycle.objects.filter(
+ pk=created_record.get("fields").get("cycle")
+ ).first()
+
+ issue_activities.append(
+ IssueActivity(
+ issue_id=created_record.get("fields").get("issue"),
+ actor=actor,
+ verb="created",
+ old_value="",
+ new_value=cycle.name,
+ field="cycles",
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} added cycle {cycle.name}",
+ new_identifier=cycle.id,
+ )
+ )
+
+
+def track_modules(
+ requested_data,
+ current_instance,
+ issue_id,
+ project,
+ actor,
+ issue_activities,
+):
+ # Updated Records:
+ updated_records = current_instance.get("updated_module_issues", [])
+ created_records = json.loads(current_instance.get("created_module_issues", []))
+
+ for updated_record in updated_records:
+ old_module = Module.objects.filter(
+ pk=updated_record.get("old_module_id", None)
+ ).first()
+ new_module = Module.objects.filter(
+ pk=updated_record.get("new_module_id", None)
+ ).first()
+
+ issue_activities.append(
+ IssueActivity(
+ issue_id=updated_record.get("issue_id"),
+ actor=actor,
+ verb="updated",
+ old_value=old_module.name,
+ new_value=new_module.name,
+ field="modules",
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} updated module from {old_module.name} to {new_module.name}",
+ old_identifier=old_module.id,
+ new_identifier=new_module.id,
+ )
+ )
+
+ for created_record in created_records:
+ module = Module.objects.filter(
+ pk=created_record.get("fields").get("module")
+ ).first()
+ issue_activities.append(
+ IssueActivity(
+ issue_id=created_record.get("fields").get("issue"),
+ actor=actor,
+ verb="created",
+ old_value="",
+ new_value=module.name,
+ field="modules",
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} added module {module.name}",
+ new_identifier=module.id,
+ )
+ )
+
+
+def create_issue_activity(
+ requested_data, current_instance, issue_id, project, actor, issue_activities
+):
+ issue_activities.append(
+ IssueActivity(
+ issue_id=issue_id,
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} created the issue",
+ verb="created",
+ actor=actor,
+ )
+ )
+
+
+def update_issue_activity(
+ requested_data, current_instance, issue_id, project, actor, issue_activities
+):
+ ISSUE_ACTIVITY_MAPPER = {
+ "name": track_name,
+ "parent": track_parent,
+ "priority": track_priority,
+ "state": track_state,
+ "description": track_description,
+ "target_date": track_target_date,
+ "start_date": track_start_date,
+ "labels_list": track_labels,
+ "assignees_list": track_assignees,
+ "blocks_list": track_blocks,
+ "blockers_list": track_blockings,
+ "cycles_list": track_cycles,
+ "modules_list": track_modules,
+ }
+ for key in requested_data:
+ func = ISSUE_ACTIVITY_MAPPER.get(key, None)
+ if func is not None:
+ func(
+ requested_data,
+ current_instance,
+ issue_id,
+ project,
+ actor,
+ issue_activities,
+ )
+
+
+def create_comment_activity(
+ requested_data, current_instance, issue_id, project, actor, issue_activities
+):
+ issue_activities.append(
+ IssueActivity(
+ issue_id=issue_id,
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} created a comment",
+ verb="created",
+ actor=actor,
+ field="comment",
+ new_value=requested_data.get("comment_html"),
+ new_identifier=requested_data.get("id"),
+ issue_comment_id=requested_data.get("id", None),
+ )
+ )
+
+
+def update_comment_activity(
+ requested_data, current_instance, issue_id, project, actor, issue_activities
+):
+ if current_instance.get("comment_html") != requested_data.get("comment_html"):
+ issue_activities.append(
+ IssueActivity(
+ issue_id=issue_id,
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} updated a comment",
+ verb="updated",
+ actor=actor,
+ field="comment",
+ old_value=current_instance.get("comment_html"),
+ old_identifier=current_instance.get("id"),
+ new_value=requested_data.get("comment_html"),
+ new_identifier=current_instance.get("id"),
+ issue_comment_id=current_instance.get("id"),
+ )
+ )
+
+
+def delete_issue_activity(
+ requested_data, current_instance, issue_id, project, actor, issue_activities
+):
+ issue_activities.append(
+ IssueActivity(
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} deleted the issue",
+ verb="deleted",
+ actor=actor,
+ field="issue",
+ )
+ )
+
+
+def delete_comment_activity(
+ requested_data, current_instance, issue_id, project, actor, issue_activities
+):
+ issue_activities.append(
+ IssueActivity(
+ issue_id=issue_id,
+ project=project,
+ workspace=project.workspace,
+ comment=f"{actor.email} deleted the comment",
+ verb="deleted",
+ actor=actor,
+ field="comment",
+ )
+ )
+
+
# Receive message from room group
@job("default")
def issue_activity(event):
try:
issue_activities = []
-
+ type = event.get("type")
requested_data = json.loads(event.get("requested_data"))
- current_instance = json.loads(event.get("current_instance"))
- issue_id = event.get("issue_id")
+ current_instance = (
+ json.loads(event.get("current_instance"))
+ if event.get("current_instance") is not None
+ else None
+ )
+ issue_id = event.get("issue_id", None)
actor_id = event.get("actor_id")
project_id = event.get("project_id")
@@ -518,35 +756,43 @@ def issue_activity(event):
project = Project.objects.get(pk=project_id)
- ISSUE_ACTIVITY_MAPPER = {
- "name": track_name,
- "parent": track_parent,
- "priority": track_priority,
- "state": track_state,
- "description": track_description,
- "target_date": track_target_date,
- "start_date": track_start_date,
- "labels_list": track_labels,
- "assignees_list": track_assignees,
- "blocks_list": track_blocks,
- "blockers_list": track_blockings,
+ ACTIVITY_MAPPER = {
+ "issue.activity.created": create_issue_activity,
+ "issue.activity.updated": update_issue_activity,
+ "issue.activity.deleted": delete_issue_activity,
+ "comment.activity.created": create_comment_activity,
+ "comment.activity.updated": update_comment_activity,
+ "comment.activity.deleted": delete_comment_activity,
}
- for key in requested_data:
- func = ISSUE_ACTIVITY_MAPPER.get(key, None)
- if func is not None:
- func(
- requested_data,
- current_instance,
- issue_id,
- project,
- actor,
- issue_activities,
- )
+ func = ACTIVITY_MAPPER.get(type)
+ if func is not None:
+ func(
+ requested_data,
+ current_instance,
+ issue_id,
+ project,
+ actor,
+ issue_activities,
+ )
# Save all the values to database
- _ = IssueActivity.objects.bulk_create(issue_activities)
-
+ issue_activities_created = IssueActivity.objects.bulk_create(issue_activities)
+ # Post the updates to segway for integrations and webhooks
+ if len(issue_activities_created):
+ # Don't send activities if the actor is a bot
+ if settings.PROXY_BASE_URL:
+ for issue_activity in issue_activities_created:
+ headers = {"Content-Type": "application/json"}
+ issue_activity_json = json.dumps(
+ IssueActivitySerializer(issue_activity).data,
+ cls=DjangoJSONEncoder,
+ )
+ _ = requests.post(
+ f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(issue_activity.workspace_id)}/projects/{str(issue_activity.project_id)}/issues/{str(issue_activity.issue_id)}/issue-activity-hooks/",
+ json=issue_activity_json,
+ headers=headers,
+ )
return
except Exception as e:
capture_exception(e)
diff --git a/apiserver/plane/db/migrations/0020_auto_20230214_0118.py b/apiserver/plane/db/migrations/0020_auto_20230214_0118.py
new file mode 100644
index 00000000000..19276407821
--- /dev/null
+++ b/apiserver/plane/db/migrations/0020_auto_20230214_0118.py
@@ -0,0 +1,69 @@
+# Generated by Django 3.2.16 on 2023-02-13 19:48
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('db', '0019_auto_20230131_0049'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='label',
+ old_name='colour',
+ new_name='color',
+ ),
+ migrations.AddField(
+ model_name='apitoken',
+ name='workspace',
+ field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_tokens', to='db.workspace'),
+ ),
+ migrations.AddField(
+ model_name='issue',
+ name='completed_at',
+ field=models.DateTimeField(null=True),
+ ),
+ migrations.AddField(
+ model_name='issue',
+ name='sort_order',
+ field=models.FloatField(default=65535),
+ ),
+ migrations.AddField(
+ model_name='project',
+ name='cycle_view',
+ field=models.BooleanField(default=True),
+ ),
+ migrations.AddField(
+ model_name='project',
+ name='module_view',
+ field=models.BooleanField(default=True),
+ ),
+ migrations.AddField(
+ model_name='state',
+ name='default',
+ field=models.BooleanField(default=False),
+ ),
+ migrations.AlterField(
+ model_name='issue',
+ name='description',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='issue',
+ name='description_html',
+ field=models.TextField(blank=True, default=''),
+ ),
+ migrations.AlterField(
+ model_name='issuecomment',
+ name='comment_html',
+ field=models.TextField(blank=True, default=''),
+ ),
+ migrations.AlterField(
+ model_name='issuecomment',
+ name='comment_json',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ ]
diff --git a/apiserver/plane/db/migrations/0021_auto_20230223_0104.py b/apiserver/plane/db/migrations/0021_auto_20230223_0104.py
new file mode 100644
index 00000000000..bae6a086ad7
--- /dev/null
+++ b/apiserver/plane/db/migrations/0021_auto_20230223_0104.py
@@ -0,0 +1,185 @@
+# Generated by Django 3.2.16 on 2023-02-22 19:34
+
+from django.conf import settings
+from django.db import migrations, models
+import django.db.models.deletion
+import uuid
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('db', '0020_auto_20230214_0118'),
+ ]
+
+ operations = [
+ migrations.CreateModel(
+ name='GithubRepository',
+ fields=[
+ ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
+ ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
+ ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
+ ('name', models.CharField(max_length=500)),
+ ('url', models.URLField(null=True)),
+ ('config', models.JSONField(default=dict)),
+ ('repository_id', models.BigIntegerField()),
+ ('owner', models.CharField(max_length=500)),
+ ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='githubrepository_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
+ ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_githubrepository', to='db.project')),
+ ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='githubrepository_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
+ ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_githubrepository', to='db.workspace')),
+ ],
+ options={
+ 'verbose_name': 'Repository',
+ 'verbose_name_plural': 'Repositories',
+ 'db_table': 'github_repositories',
+ 'ordering': ('-created_at',),
+ },
+ ),
+ migrations.CreateModel(
+ name='Integration',
+ fields=[
+ ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
+ ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
+ ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
+ ('title', models.CharField(max_length=400)),
+ ('provider', models.CharField(max_length=400, unique=True)),
+ ('network', models.PositiveIntegerField(choices=[(1, 'Private'), (2, 'Public')], default=1)),
+ ('description', models.JSONField(default=dict)),
+ ('author', models.CharField(blank=True, max_length=400)),
+ ('webhook_url', models.TextField(blank=True)),
+ ('webhook_secret', models.TextField(blank=True)),
+ ('redirect_url', models.TextField(blank=True)),
+ ('metadata', models.JSONField(default=dict)),
+ ('verified', models.BooleanField(default=False)),
+ ('avatar_url', models.URLField(blank=True, null=True)),
+ ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='integration_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
+ ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='integration_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
+ ],
+ options={
+ 'verbose_name': 'Integration',
+ 'verbose_name_plural': 'Integrations',
+ 'db_table': 'integrations',
+ 'ordering': ('-created_at',),
+ },
+ ),
+ migrations.AlterField(
+ model_name='issueactivity',
+ name='issue',
+ field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issue_activity', to='db.issue'),
+ ),
+ migrations.CreateModel(
+ name='WorkspaceIntegration',
+ fields=[
+ ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
+ ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
+ ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
+ ('metadata', models.JSONField(default=dict)),
+ ('config', models.JSONField(default=dict)),
+ ('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='integrations', to=settings.AUTH_USER_MODEL)),
+ ('api_token', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='integrations', to='db.apitoken')),
+ ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workspaceintegration_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
+ ('integration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='integrated_workspaces', to='db.integration')),
+ ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='workspaceintegration_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
+ ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_integrations', to='db.workspace')),
+ ],
+ options={
+ 'verbose_name': 'Workspace Integration',
+ 'verbose_name_plural': 'Workspace Integrations',
+ 'db_table': 'workspace_integrations',
+ 'ordering': ('-created_at',),
+ 'unique_together': {('workspace', 'integration')},
+ },
+ ),
+ migrations.CreateModel(
+ name='IssueLink',
+ fields=[
+ ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
+ ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
+ ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
+ ('title', models.CharField(max_length=255, null=True)),
+ ('url', models.URLField()),
+ ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issuelink_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
+ ('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_link', to='db.issue')),
+ ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_issuelink', to='db.project')),
+ ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issuelink_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
+ ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_issuelink', to='db.workspace')),
+ ],
+ options={
+ 'verbose_name': 'Issue Link',
+ 'verbose_name_plural': 'Issue Links',
+ 'db_table': 'issue_links',
+ 'ordering': ('-created_at',),
+ },
+ ),
+ migrations.CreateModel(
+ name='GithubRepositorySync',
+ fields=[
+ ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
+ ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
+ ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
+ ('credentials', models.JSONField(default=dict)),
+ ('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='user_syncs', to=settings.AUTH_USER_MODEL)),
+ ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='githubrepositorysync_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
+ ('label', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='repo_syncs', to='db.label')),
+ ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_githubrepositorysync', to='db.project')),
+ ('repository', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='syncs', to='db.githubrepository')),
+ ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='githubrepositorysync_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
+ ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_githubrepositorysync', to='db.workspace')),
+ ('workspace_integration', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='github_syncs', to='db.workspaceintegration')),
+ ],
+ options={
+ 'verbose_name': 'Github Repository Sync',
+ 'verbose_name_plural': 'Github Repository Syncs',
+ 'db_table': 'github_repository_syncs',
+ 'ordering': ('-created_at',),
+ 'unique_together': {('project', 'repository')},
+ },
+ ),
+ migrations.CreateModel(
+ name='GithubIssueSync',
+ fields=[
+ ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
+ ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
+ ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
+ ('repo_issue_id', models.BigIntegerField()),
+ ('github_issue_id', models.BigIntegerField()),
+ ('issue_url', models.URLField()),
+ ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='githubissuesync_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
+ ('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='github_syncs', to='db.issue')),
+ ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_githubissuesync', to='db.project')),
+ ('repository_sync', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_syncs', to='db.githubrepositorysync')),
+ ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='githubissuesync_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
+ ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_githubissuesync', to='db.workspace')),
+ ],
+ options={
+ 'verbose_name': 'Github Issue Sync',
+ 'verbose_name_plural': 'Github Issue Syncs',
+ 'db_table': 'github_issue_syncs',
+ 'ordering': ('-created_at',),
+ 'unique_together': {('repository_sync', 'issue')},
+ },
+ ),
+ migrations.CreateModel(
+ name='GithubCommentSync',
+ fields=[
+ ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
+ ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
+ ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)),
+ ('repo_comment_id', models.BigIntegerField()),
+ ('comment', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_syncs', to='db.issuecomment')),
+ ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='githubcommentsync_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
+ ('issue_sync', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='comment_syncs', to='db.githubissuesync')),
+ ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_githubcommentsync', to='db.project')),
+ ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='githubcommentsync_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
+ ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_githubcommentsync', to='db.workspace')),
+ ],
+ options={
+ 'verbose_name': 'Github Comment Sync',
+ 'verbose_name_plural': 'Github Comment Syncs',
+ 'db_table': 'github_comment_syncs',
+ 'ordering': ('-created_at',),
+ 'unique_together': {('issue_sync', 'comment')},
+ },
+ ),
+ ]
diff --git a/apiserver/plane/db/mixins.py b/apiserver/plane/db/mixins.py
index b48e5c96552..728cb993351 100644
--- a/apiserver/plane/db/mixins.py
+++ b/apiserver/plane/db/mixins.py
@@ -1,3 +1,7 @@
+# Python imports
+import uuid
+
+# Django imports
from django.db import models
diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py
index ef7ad5b8d49..ce8cf950be6 100644
--- a/apiserver/plane/db/models/__init__.py
+++ b/apiserver/plane/db/models/__init__.py
@@ -10,7 +10,13 @@
TeamMember,
)
-from .project import Project, ProjectMember, ProjectBaseModel, ProjectMemberInvite, ProjectIdentifier
+from .project import (
+ Project,
+ ProjectMember,
+ ProjectBaseModel,
+ ProjectMemberInvite,
+ ProjectIdentifier,
+)
from .issue import (
Issue,
@@ -23,6 +29,7 @@
IssueAssignee,
Label,
IssueBlocker,
+ IssueLink,
)
from .asset import FileAsset
@@ -37,6 +44,15 @@
from .view import View
-from .module import Module, ModuleMember, ModuleIssue, ModuleLink
+from .module import Module, ModuleMember, ModuleIssue, ModuleLink
+
+from .api_token import APIToken
-from .api_token import APIToken
\ No newline at end of file
+from .integration import (
+ WorkspaceIntegration,
+ Integration,
+ GithubRepository,
+ GithubRepositorySync,
+ GithubIssueSync,
+ GithubCommentSync,
+)
diff --git a/apiserver/plane/db/models/api_token.py b/apiserver/plane/db/models/api_token.py
index 32ba013bc60..b4009e6eb2c 100644
--- a/apiserver/plane/db/models/api_token.py
+++ b/apiserver/plane/db/models/api_token.py
@@ -17,7 +17,6 @@ def generate_token():
class APIToken(BaseModel):
-
token = models.CharField(max_length=255, unique=True, default=generate_token)
label = models.CharField(max_length=255, default=generate_label_token)
user = models.ForeignKey(
@@ -28,6 +27,9 @@ class APIToken(BaseModel):
user_type = models.PositiveSmallIntegerField(
choices=((0, "Human"), (1, "Bot")), default=0
)
+ workspace = models.ForeignKey(
+ "db.Workspace", related_name="api_tokens", on_delete=models.CASCADE, null=True
+ )
class Meta:
verbose_name = "API Token"
diff --git a/apiserver/plane/db/models/integration/__init__.py b/apiserver/plane/db/models/integration/__init__.py
new file mode 100644
index 00000000000..4742a25293a
--- /dev/null
+++ b/apiserver/plane/db/models/integration/__init__.py
@@ -0,0 +1,2 @@
+from .base import Integration, WorkspaceIntegration
+from .github import GithubRepository, GithubRepositorySync, GithubIssueSync, GithubCommentSync
diff --git a/apiserver/plane/db/models/integration/base.py b/apiserver/plane/db/models/integration/base.py
new file mode 100644
index 00000000000..47db0483c48
--- /dev/null
+++ b/apiserver/plane/db/models/integration/base.py
@@ -0,0 +1,68 @@
+# Python imports
+import uuid
+
+# Django imports
+from django.db import models
+
+# Module imports
+from plane.db.models import BaseModel
+from plane.db.mixins import AuditModel
+
+
+class Integration(AuditModel):
+ id = models.UUIDField(
+ default=uuid.uuid4, unique=True, editable=False, db_index=True, primary_key=True
+ )
+ title = models.CharField(max_length=400)
+ provider = models.CharField(max_length=400, unique=True)
+ network = models.PositiveIntegerField(
+ default=1, choices=((1, "Private"), (2, "Public"))
+ )
+ description = models.JSONField(default=dict)
+ author = models.CharField(max_length=400, blank=True)
+ webhook_url = models.TextField(blank=True)
+ webhook_secret = models.TextField(blank=True)
+ redirect_url = models.TextField(blank=True)
+ metadata = models.JSONField(default=dict)
+ verified = models.BooleanField(default=False)
+ avatar_url = models.URLField(blank=True, null=True)
+
+ def __str__(self):
+ """Return provider of the integration"""
+ return f"{self.provider}"
+
+ class Meta:
+ verbose_name = "Integration"
+ verbose_name_plural = "Integrations"
+ db_table = "integrations"
+ ordering = ("-created_at",)
+
+
+class WorkspaceIntegration(BaseModel):
+ workspace = models.ForeignKey(
+ "db.Workspace", related_name="workspace_integrations", on_delete=models.CASCADE
+ )
+ # Bot user
+ actor = models.ForeignKey(
+ "db.User", related_name="integrations", on_delete=models.CASCADE
+ )
+ integration = models.ForeignKey(
+ "db.Integration", related_name="integrated_workspaces", on_delete=models.CASCADE
+ )
+ api_token = models.ForeignKey(
+ "db.APIToken", related_name="integrations", on_delete=models.CASCADE
+ )
+ metadata = models.JSONField(default=dict)
+
+ config = models.JSONField(default=dict)
+
+ def __str__(self):
+ """Return name of the integration and workspace"""
+ return f"{self.workspace.name} <{self.integration.provider}>"
+
+ class Meta:
+ unique_together = ["workspace", "integration"]
+ verbose_name = "Workspace Integration"
+ verbose_name_plural = "Workspace Integrations"
+ db_table = "workspace_integrations"
+ ordering = ("-created_at",)
diff --git a/apiserver/plane/db/models/integration/github.py b/apiserver/plane/db/models/integration/github.py
new file mode 100644
index 00000000000..130925c21d6
--- /dev/null
+++ b/apiserver/plane/db/models/integration/github.py
@@ -0,0 +1,99 @@
+# Python imports
+import uuid
+
+# Django imports
+from django.db import models
+
+# Module imports
+from plane.db.models import ProjectBaseModel
+from plane.db.mixins import AuditModel
+
+
+class GithubRepository(ProjectBaseModel):
+ name = models.CharField(max_length=500)
+ url = models.URLField(null=True)
+ config = models.JSONField(default=dict)
+ repository_id = models.BigIntegerField()
+ owner = models.CharField(max_length=500)
+
+ def __str__(self):
+ """Return the repo name"""
+ return f"{self.name}"
+
+ class Meta:
+ verbose_name = "Repository"
+ verbose_name_plural = "Repositories"
+ db_table = "github_repositories"
+ ordering = ("-created_at",)
+
+
+class GithubRepositorySync(ProjectBaseModel):
+ repository = models.OneToOneField(
+ "db.GithubRepository", on_delete=models.CASCADE, related_name="syncs"
+ )
+ credentials = models.JSONField(default=dict)
+ # Bot user
+ actor = models.ForeignKey(
+ "db.User", related_name="user_syncs", on_delete=models.CASCADE
+ )
+ workspace_integration = models.ForeignKey(
+ "db.WorkspaceIntegration", related_name="github_syncs", on_delete=models.CASCADE
+ )
+ label = models.ForeignKey(
+ "db.Label", on_delete=models.SET_NULL, null=True, related_name="repo_syncs"
+ )
+
+ def __str__(self):
+ """Return the repo sync"""
+ return f"{self.repository.name} <{self.project.name}>"
+
+ class Meta:
+ unique_together = ["project", "repository"]
+ verbose_name = "Github Repository Sync"
+ verbose_name_plural = "Github Repository Syncs"
+ db_table = "github_repository_syncs"
+ ordering = ("-created_at",)
+
+
+class GithubIssueSync(ProjectBaseModel):
+ repo_issue_id = models.BigIntegerField()
+ github_issue_id = models.BigIntegerField()
+ issue_url = models.URLField(blank=False)
+ issue = models.ForeignKey(
+ "db.Issue", related_name="github_syncs", on_delete=models.CASCADE
+ )
+ repository_sync = models.ForeignKey(
+ "db.GithubRepositorySync", related_name="issue_syncs", on_delete=models.CASCADE
+ )
+
+ def __str__(self):
+ """Return the github issue sync"""
+ return f"{self.repository.name}-{self.project.name}-{self.issue.name}"
+
+ class Meta:
+ unique_together = ["repository_sync", "issue"]
+ verbose_name = "Github Issue Sync"
+ verbose_name_plural = "Github Issue Syncs"
+ db_table = "github_issue_syncs"
+ ordering = ("-created_at",)
+
+
+class GithubCommentSync(ProjectBaseModel):
+ repo_comment_id = models.BigIntegerField()
+ comment = models.ForeignKey(
+ "db.IssueComment", related_name="comment_syncs", on_delete=models.CASCADE
+ )
+ issue_sync = models.ForeignKey(
+ "db.GithubIssueSync", related_name="comment_syncs", on_delete=models.CASCADE
+ )
+
+ def __str__(self):
+ """Return the github issue sync"""
+ return f"{self.comment.id}"
+
+ class Meta:
+ unique_together = ["issue_sync", "comment"]
+ verbose_name = "Github Comment Sync"
+ verbose_name_plural = "Github Comment Syncs"
+ db_table = "github_comment_syncs"
+ ordering = ("-created_at",)
diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py
index c3984b3d2ad..aea41677eff 100644
--- a/apiserver/plane/db/models/issue.py
+++ b/apiserver/plane/db/models/issue.py
@@ -4,11 +4,13 @@
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
+from django.utils import timezone
# Module imports
from . import ProjectBaseModel
from plane.utils.html_processor import strip_tags
+
# TODO: Handle identifiers for Bulk Inserts - nk
class Issue(ProjectBaseModel):
PRIORITY_CHOICES = (
@@ -32,8 +34,8 @@ class Issue(ProjectBaseModel):
related_name="state_issue",
)
name = models.CharField(max_length=255, verbose_name="Issue Name")
- description = models.JSONField(blank=True, null=True)
- description_html = models.TextField(blank=True, null=True)
+ description = models.JSONField(blank=True, default=dict)
+ description_html = models.TextField(blank=True, default="")
description_stripped = models.TextField(blank=True, null=True)
priority = models.CharField(
max_length=30,
@@ -56,6 +58,8 @@ class Issue(ProjectBaseModel):
labels = models.ManyToManyField(
"db.Label", blank=True, related_name="labels", through="IssueLabel"
)
+ sort_order = models.FloatField(default=65535)
+ completed_at = models.DateTimeField(null=True)
class Meta:
verbose_name = "Issue"
@@ -65,6 +69,36 @@ class Meta:
def save(self, *args, **kwargs):
# This means that the model isn't saved to the database yet
+ if self.state is None:
+ try:
+ from plane.db.models import State
+
+ default_state = State.objects.filter(
+ project=self.project, default=True
+ ).first()
+ # if there is no default state assign any random state
+ if default_state is None:
+ self.state = State.objects.filter(project=self.project).first()
+ else:
+ self.state = default_state
+ except ImportError:
+ pass
+ else:
+ try:
+ from plane.db.models import State
+
+ # Get the completed states of the project
+ completed_states = State.objects.filter(
+ group="completed", project=self.project
+ ).values_list("pk", flat=True)
+ # Check if the current issue state and completed state id are same
+ if self.state.id in completed_states:
+ self.completed_at = timezone.now()
+ else:
+ self.completed_at = None
+
+ except ImportError:
+ pass
if self._state.adding:
# Get the maximum display_id value from the database
@@ -75,15 +109,12 @@ def save(self, *args, **kwargs):
# If it isn't none, just use the last ID specified (which should be the greatest) and add one to it
if last_id is not None:
self.sequence_id = last_id + 1
- if self.state is None:
- try:
- from plane.db.models import State
- self.state, created = State.objects.get_or_create(
- project=self.project, name="Backlog"
- )
- except ImportError:
- pass
+ largest_sort_order = Issue.objects.filter(
+ project=self.project, state=self.state
+ ).aggregate(largest=models.Max("sort_order"))["largest"]
+ if largest_sort_order is not None:
+ self.sort_order = largest_sort_order + 10000
# Strip the html tags using html parser
self.description_stripped = (
@@ -137,9 +168,26 @@ def __str__(self):
return f"{self.issue.name} {self.assignee.email}"
+class IssueLink(ProjectBaseModel):
+ title = models.CharField(max_length=255, null=True)
+ url = models.URLField()
+ issue = models.ForeignKey(
+ "db.Issue", on_delete=models.CASCADE, related_name="issue_link"
+ )
+
+ class Meta:
+ verbose_name = "Issue Link"
+ verbose_name_plural = "Issue Links"
+ db_table = "issue_links"
+ ordering = ("-created_at",)
+
+ def __str__(self):
+ return f"{self.issue.name} {self.url}"
+
+
class IssueActivity(ProjectBaseModel):
issue = models.ForeignKey(
- Issue, on_delete=models.CASCADE, related_name="issue_activity"
+ Issue, on_delete=models.SET_NULL, null=True, related_name="issue_activity"
)
verb = models.CharField(max_length=255, verbose_name="Action", default="created")
field = models.CharField(
@@ -196,8 +244,8 @@ def __str__(self):
class IssueComment(ProjectBaseModel):
comment_stripped = models.TextField(verbose_name="Comment", blank=True)
- comment_json = models.JSONField(blank=True, null=True)
- comment_html = models.TextField(blank=True)
+ comment_json = models.JSONField(blank=True, default=dict)
+ comment_html = models.TextField(blank=True, default="")
attachments = ArrayField(models.URLField(), size=10, blank=True, default=list)
issue = models.ForeignKey(Issue, on_delete=models.CASCADE)
# System can also create comment
@@ -246,7 +294,6 @@ def __str__(self):
class Label(ProjectBaseModel):
-
parent = models.ForeignKey(
"self",
on_delete=models.CASCADE,
@@ -256,7 +303,7 @@ class Label(ProjectBaseModel):
)
name = models.CharField(max_length=255)
description = models.TextField(blank=True)
- colour = models.CharField(max_length=255, blank=True)
+ color = models.CharField(max_length=255, blank=True)
class Meta:
verbose_name = "Label"
@@ -269,7 +316,6 @@ def __str__(self):
class IssueLabel(ProjectBaseModel):
-
issue = models.ForeignKey(
"db.Issue", on_delete=models.CASCADE, related_name="label_issue"
)
@@ -288,7 +334,6 @@ def __str__(self):
class IssueSequence(ProjectBaseModel):
-
issue = models.ForeignKey(
Issue, on_delete=models.SET_NULL, related_name="issue_sequence", null=True
)
@@ -305,7 +350,6 @@ class Meta:
# TODO: Find a better method to save the model
@receiver(post_save, sender=Issue)
def create_issue_sequence(sender, instance, created, **kwargs):
-
if created:
IssueSequence.objects.create(
issue=instance, sequence=instance.sequence_id, project=instance.project
diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py
index 545bcd8a685..4a180642b43 100644
--- a/apiserver/plane/db/models/project.py
+++ b/apiserver/plane/db/models/project.py
@@ -29,7 +29,6 @@ def get_default_props():
class Project(BaseModel):
-
NETWORK_CHOICES = ((0, "Secret"), (2, "Public"))
name = models.CharField(max_length=255, verbose_name="Project Name")
description = models.TextField(verbose_name="Project Description", blank=True)
@@ -63,6 +62,8 @@ class Project(BaseModel):
blank=True,
)
icon = models.CharField(max_length=255, null=True, blank=True)
+ module_view = models.BooleanField(default=True)
+ cycle_view = models.BooleanField(default=True)
def __str__(self):
"""Return name of the project"""
@@ -82,7 +83,6 @@ def save(self, *args, **kwargs):
class ProjectBaseModel(BaseModel):
-
project = models.ForeignKey(
Project, on_delete=models.CASCADE, related_name="project_%(class)s"
)
@@ -117,7 +117,6 @@ def __str__(self):
class ProjectMember(ProjectBaseModel):
-
member = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
@@ -141,9 +140,9 @@ def __str__(self):
"""Return members of the project"""
return f"{self.member.email} <{self.project.name}>"
+
# TODO: Remove workspace relation later
class ProjectIdentifier(AuditModel):
-
workspace = models.ForeignKey(
"db.Workspace", models.CASCADE, related_name="project_identifiers", null=True
)
diff --git a/apiserver/plane/db/models/state.py b/apiserver/plane/db/models/state.py
index 2c62879181b..2fa1ebe3829 100644
--- a/apiserver/plane/db/models/state.py
+++ b/apiserver/plane/db/models/state.py
@@ -23,6 +23,7 @@ class State(ProjectBaseModel):
default="backlog",
max_length=20,
)
+ default = models.BooleanField(default=False)
def __str__(self):
"""Return name of the state"""
@@ -37,4 +38,13 @@ class Meta:
def save(self, *args, **kwargs):
self.slug = slugify(self.name)
+ if self._state.adding:
+ # Get the maximum sequence value from the database
+ last_id = State.objects.filter(project=self.project).aggregate(
+ largest=models.Max("sequence")
+ )["largest"]
+ # if last_id is not None
+ if last_id is not None:
+ self.sequence = last_id + 15000
+
return super().save(*args, **kwargs)
diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py
index e14c250b498..9d270662eb2 100644
--- a/apiserver/plane/settings/common.py
+++ b/apiserver/plane/settings/common.py
@@ -1,12 +1,13 @@
import os
import datetime
from datetime import timedelta
+from django.core.management.utils import get_random_secret_key
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-SECRET_KEY = os.environ.get("SECRET_KEY")
+SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key())
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py
index 4d4af9b77d8..ccb3880125b 100644
--- a/apiserver/plane/settings/local.py
+++ b/apiserver/plane/settings/local.py
@@ -2,6 +2,7 @@
from __future__ import absolute_import
+import dj_database_url
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration
@@ -24,6 +25,10 @@
}
}
+DOCKERIZED = os.environ.get("DOCKERIZED", False)
+
+if DOCKERIZED:
+ DATABASES["default"] = dj_database_url.config()
CACHES = {
"default": {
@@ -41,15 +46,16 @@
CORS_ORIGIN_ALLOW_ALL = True
-sentry_sdk.init(
- dsn=os.environ.get("SENTRY_DSN"),
- integrations=[DjangoIntegration(), RedisIntegration()],
- # If you wish to associate users to errors (assuming you are using
- # django.contrib.auth) you may enable sending PII data.
- send_default_pii=True,
- environment="local",
- traces_sample_rate=0.7,
-)
+if os.environ.get("SENTRY_DSN", False):
+ sentry_sdk.init(
+ dsn=os.environ.get("SENTRY_DSN"),
+ integrations=[DjangoIntegration(), RedisIntegration()],
+ # If you wish to associate users to errors (assuming you are using
+ # django.contrib.auth) you may enable sending PII data.
+ send_default_pii=True,
+ environment="local",
+ traces_sample_rate=0.7,
+ )
REDIS_HOST = "localhost"
REDIS_PORT = 6379
@@ -64,5 +70,11 @@
},
}
-WEB_URL = "http://localhost:3000"
+MEDIA_URL = "/uploads/"
+MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
+
+if DOCKERIZED:
+ REDIS_URL = os.environ.get("REDIS_URL")
+WEB_URL = os.environ.get("WEB_URL", "localhost:3000")
+PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False)
diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py
index c8390148464..1b6ac2cf751 100644
--- a/apiserver/plane/settings/production.py
+++ b/apiserver/plane/settings/production.py
@@ -33,6 +33,10 @@
DATABASES["default"] = dj_database_url.config()
SITE_ID = 1
+DOCKERIZED = os.environ.get(
+ "DOCKERIZED", False
+) # Set the variable true if running in docker-compose environment
+
# Enable Connection Pooling (if desired)
# DATABASES['default']['ENGINE'] = 'django_postgrespool'
@@ -48,99 +52,110 @@
# Simplified static file serving.
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
+if os.environ.get("SENTRY_DSN", False):
+ sentry_sdk.init(
+ dsn=os.environ.get("SENTRY_DSN", ""),
+ integrations=[DjangoIntegration(), RedisIntegration()],
+ # If you wish to associate users to errors (assuming you are using
+ # django.contrib.auth) you may enable sending PII data.
+ traces_sample_rate=1,
+ send_default_pii=True,
+ environment="production",
+ )
-sentry_sdk.init(
- dsn=os.environ.get("SENTRY_DSN"),
- integrations=[DjangoIntegration(), RedisIntegration()],
- # If you wish to associate users to errors (assuming you are using
- # django.contrib.auth) you may enable sending PII data.
- traces_sample_rate=1,
- send_default_pii=True,
- environment="production",
-)
+if (
+ os.environ.get("AWS_REGION", False)
+ and os.environ.get("AWS_ACCESS_KEY_ID", False)
+ and os.environ.get("AWS_SECRET_ACCESS_KEY", False)
+ and os.environ.get("AWS_S3_BUCKET_NAME", False)
+):
+ # The AWS region to connect to.
+ AWS_REGION = os.environ.get("AWS_REGION", "")
-# The AWS region to connect to.
-AWS_REGION = os.environ.get("AWS_REGION")
+ # The AWS access key to use.
+ AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "")
-# The AWS access key to use.
-AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
+ # The AWS secret access key to use.
+ AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "")
-# The AWS secret access key to use.
-AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
+ # The optional AWS session token to use.
+ # AWS_SESSION_TOKEN = ""
-# The optional AWS session token to use.
-# AWS_SESSION_TOKEN = ""
+ # The name of the bucket to store files in.
+ AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "")
+ # How to construct S3 URLs ("auto", "path", "virtual").
+ AWS_S3_ADDRESSING_STYLE = "auto"
-# The name of the bucket to store files in.
-AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME")
+ # The full URL to the S3 endpoint. Leave blank to use the default region URL.
+ AWS_S3_ENDPOINT_URL = ""
-# How to construct S3 URLs ("auto", "path", "virtual").
-AWS_S3_ADDRESSING_STYLE = "auto"
+ # A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator.
+ AWS_S3_KEY_PREFIX = ""
-# The full URL to the S3 endpoint. Leave blank to use the default region URL.
-AWS_S3_ENDPOINT_URL = ""
+ # Whether to enable authentication for stored files. If True, then generated URLs will include an authentication
+ # token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token,
+ # and their permissions will be set to "public-read".
+ AWS_S3_BUCKET_AUTH = False
-# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator.
-AWS_S3_KEY_PREFIX = ""
+ # How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH`
+ # is True. It also affects the "Cache-Control" header of the files.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours.
-# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication
-# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token,
-# and their permissions will be set to "public-read".
-AWS_S3_BUCKET_AUTH = False
+ # A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting
+ # cannot be used with `AWS_S3_BUCKET_AUTH`.
+ AWS_S3_PUBLIC_URL = ""
-# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH`
-# is True. It also affects the "Cache-Control" header of the files.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours.
+ # If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you
+ # understand the consequences before enabling.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_REDUCED_REDUNDANCY = False
-# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting
-# cannot be used with `AWS_S3_BUCKET_AUTH`.
-AWS_S3_PUBLIC_URL = ""
+ # The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a
+ # single `name` argument.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_CONTENT_DISPOSITION = ""
-# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you
-# understand the consequences before enabling.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_REDUCED_REDUNDANCY = False
+ # The Content-Language header used when the file is downloaded. This can be a string, or a function taking a
+ # single `name` argument.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_CONTENT_LANGUAGE = ""
-# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a
-# single `name` argument.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_CONTENT_DISPOSITION = ""
+ # A mapping of custom metadata for each file. Each value can be a string, or a function taking a
+ # single `name` argument.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_METADATA = {}
-# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a
-# single `name` argument.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_CONTENT_LANGUAGE = ""
+ # If True, then files will be stored using AES256 server-side encryption.
+ # If this is a string value (e.g., "aws:kms"), that encryption type will be used.
+ # Otherwise, server-side encryption is not be enabled.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_ENCRYPT_KEY = False
-# A mapping of custom metadata for each file. Each value can be a string, or a function taking a
-# single `name` argument.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_METADATA = {}
+ # The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present.
+ # This is only relevant if AWS S3 KMS server-side encryption is enabled (above).
+ # AWS_S3_KMS_ENCRYPTION_KEY_ID = ""
-# If True, then files will be stored using AES256 server-side encryption.
-# If this is a string value (e.g., "aws:kms"), that encryption type will be used.
-# Otherwise, server-side encryption is not be enabled.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_ENCRYPT_KEY = False
+ # If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their
+ # compressed size is smaller than their uncompressed size.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_GZIP = True
-# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present.
-# This is only relevant if AWS S3 KMS server-side encryption is enabled (above).
-# AWS_S3_KMS_ENCRYPTION_KEY_ID = ""
+ # The signature version to use for S3 requests.
+ AWS_S3_SIGNATURE_VERSION = None
-# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their
-# compressed size is smaller than their uncompressed size.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_GZIP = True
+ # If True, then files with the same name will overwrite each other. By default it's set to False to have
+ # extra characters appended.
+ AWS_S3_FILE_OVERWRITE = False
-# The signature version to use for S3 requests.
-AWS_S3_SIGNATURE_VERSION = None
+ # AWS Settings End
-# If True, then files with the same name will overwrite each other. By default it's set to False to have
-# extra characters appended.
-AWS_S3_FILE_OVERWRITE = False
-
-# AWS Settings End
+ DEFAULT_FILE_STORAGE = "django_s3_storage.storage.S3Storage"
+
+else:
+ MEDIA_URL = "/uploads/"
+ MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
# Enable Connection Pooling (if desired)
@@ -155,7 +170,6 @@
]
-DEFAULT_FILE_STORAGE = "django_s3_storage.storage.S3Storage"
# Simplified static file serving.
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
@@ -165,16 +179,27 @@
REDIS_URL = os.environ.get("REDIS_URL")
-CACHES = {
- "default": {
- "BACKEND": "django_redis.cache.RedisCache",
- "LOCATION": REDIS_URL,
- "OPTIONS": {
- "CLIENT_CLASS": "django_redis.client.DefaultClient",
- "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False},
- },
+if DOCKERIZED:
+ CACHES = {
+ "default": {
+ "BACKEND": "django_redis.cache.RedisCache",
+ "LOCATION": REDIS_URL,
+ "OPTIONS": {
+ "CLIENT_CLASS": "django_redis.client.DefaultClient",
+ },
+ }
+ }
+else:
+ CACHES = {
+ "default": {
+ "BACKEND": "django_redis.cache.RedisCache",
+ "LOCATION": REDIS_URL,
+ "OPTIONS": {
+ "CLIENT_CLASS": "django_redis.client.DefaultClient",
+ "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False},
+ },
+ }
}
-}
RQ_QUEUES = {
"default": {
@@ -183,10 +208,6 @@
}
-url = urlparse(os.environ.get("REDIS_URL"))
-
-DOCKERIZED = os.environ.get(
- "DOCKERIZED", False
-) # Set the variable true if running in docker-compose environment
-
WEB_URL = os.environ.get("WEB_URL")
+
+PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False)
diff --git a/apiserver/plane/settings/staging.py b/apiserver/plane/settings/staging.py
index 725f2cd850f..0e58ab224de 100644
--- a/apiserver/plane/settings/staging.py
+++ b/apiserver/plane/settings/staging.py
@@ -185,3 +185,5 @@
WEB_URL = os.environ.get("WEB_URL")
+
+PROXY_BASE_URL = os.environ.get("PROXY_BASE_URL", False)
diff --git a/apiserver/plane/utils/grouper.py b/apiserver/plane/utils/grouper.py
new file mode 100644
index 00000000000..51c1f61c298
--- /dev/null
+++ b/apiserver/plane/utils/grouper.py
@@ -0,0 +1,31 @@
+def group_results(results_data, group_by):
+ """
+ Utility function to group data into a given attribute.
+ Function can group attributes of string and list type.
+ """
+ response_dict = dict()
+
+ for value in results_data:
+ group_attribute = value.get(group_by, None)
+ if isinstance(group_attribute, list):
+ if len(group_attribute):
+ for attrib in group_attribute:
+ if str(attrib) in response_dict:
+ response_dict[str(attrib)].append(value)
+ else:
+ response_dict[str(attrib)] = []
+ response_dict[str(attrib)].append(value)
+ else:
+ if str(None) in response_dict:
+ response_dict[str(None)].append(value)
+ else:
+ response_dict[str(None)] = []
+ response_dict[str(None)].append(value)
+ else:
+ if str(group_attribute) in response_dict:
+ response_dict[str(group_attribute)].append(value)
+ else:
+ response_dict[str(group_attribute)] = []
+ response_dict[str(group_attribute)].append(value)
+
+ return response_dict
\ No newline at end of file
diff --git a/apiserver/plane/utils/integrations/__init__.py b/apiserver/plane/utils/integrations/__init__.py
new file mode 100644
index 00000000000..e69de29bb2d
diff --git a/apiserver/plane/utils/integrations/github.py b/apiserver/plane/utils/integrations/github.py
new file mode 100644
index 00000000000..e06ac31f75e
--- /dev/null
+++ b/apiserver/plane/utils/integrations/github.py
@@ -0,0 +1,74 @@
+import os
+import jwt
+import requests
+from datetime import datetime, timedelta
+from cryptography.hazmat.primitives.serialization import load_pem_private_key
+from cryptography.hazmat.backends import default_backend
+
+
+def get_jwt_token():
+ app_id = os.environ.get("GITHUB_APP_ID", "")
+ secret = bytes(os.environ.get("GITHUB_APP_PRIVATE_KEY", ""), encoding="utf8")
+ current_timestamp = int(datetime.now().timestamp())
+ due_date = datetime.now() + timedelta(minutes=10)
+ expiry = int(due_date.timestamp())
+ payload = {
+ "iss": app_id,
+ "sub": app_id,
+ "exp": expiry,
+ "iat": current_timestamp,
+ "aud": "https://github.com/login/oauth/access_token",
+ }
+
+ priv_rsakey = load_pem_private_key(secret, None, default_backend())
+ token = jwt.encode(payload, priv_rsakey, algorithm="RS256")
+ return token
+
+
+def get_github_metadata(installation_id):
+ token = get_jwt_token()
+
+ url = f"https://api.github.com/app/installations/{installation_id}"
+ headers = {
+ "Authorization": "Bearer " + token,
+ "Accept": "application/vnd.github+json",
+ }
+ response = requests.get(url, headers=headers).json()
+ return response
+
+
+def get_github_repos(access_tokens_url, repositories_url):
+ token = get_jwt_token()
+
+ headers = {
+ "Authorization": "Bearer " + token,
+ "Accept": "application/vnd.github+json",
+ }
+
+ oauth_response = requests.post(
+ access_tokens_url,
+ headers=headers,
+ ).json()
+
+ oauth_token = oauth_response.get("token")
+ headers = {
+ "Authorization": "Bearer " + oauth_token,
+ "Accept": "application/vnd.github+json",
+ }
+ response = requests.get(
+ repositories_url,
+ headers=headers,
+ ).json()
+ return response
+
+
+def delete_github_installation(installation_id):
+ token = get_jwt_token()
+
+ url = f"https://api.github.com/app/installations/{installation_id}"
+ headers = {
+ "Authorization": "Bearer " + token,
+ "Accept": "application/vnd.github+json",
+ }
+ response = requests.delete(url, headers=headers)
+ return response
diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt
index 57823500327..ffe11a2347f 100644
--- a/apiserver/requirements/base.txt
+++ b/apiserver/requirements/base.txt
@@ -1,28 +1,29 @@
# base requirements
-Django==3.2.16
+Django==3.2.18
django-braces==1.15.0
-django-taggit==2.1.0
-psycopg2==2.9.3
-django-oauth-toolkit==2.0.0
-mistune==2.0.3
+django-taggit==3.1.0
+psycopg2==2.9.5
+django-oauth-toolkit==2.2.0
+mistune==2.0.4
djangorestframework==3.14.0
-redis==4.2.2
-django-nested-admin==3.4.0
-django-cors-headers==3.11.0
-whitenoise==6.0.0
-django-allauth==0.50.0
+redis==4.4.2
+django-nested-admin==4.0.2
+django-cors-headers==3.13.0
+whitenoise==6.3.0
+django-allauth==0.52.0
faker==13.4.0
-django-filter==21.1
-jsonmodels==2.5.0
-djangorestframework-simplejwt==5.1.0
-sentry-sdk==1.13.0
-django-s3-storage==0.13.6
+django-filter==22.1
+jsonmodels==2.6.0
+djangorestframework-simplejwt==5.2.2
+sentry-sdk==1.14.0
+django-s3-storage==0.13.11
django-crum==0.7.9
django-guardian==2.4.0
dj_rest_auth==2.2.5
-google-auth==2.9.1
-google-api-python-client==2.55.0
-django-rq==2.5.1
+google-auth==2.16.0
+google-api-python-client==2.75.0
+django-rq==2.6.0
django-redis==5.2.0
-uvicorn==0.20.0
\ No newline at end of file
+uvicorn==0.20.0
+channels==4.0.0
\ No newline at end of file
diff --git a/apiserver/requirements/local.txt b/apiserver/requirements/local.txt
index 238fe63f20d..efd74a071bd 100644
--- a/apiserver/requirements/local.txt
+++ b/apiserver/requirements/local.txt
@@ -1,3 +1,3 @@
-r base.txt
-django-debug-toolbar==3.2.4
\ No newline at end of file
+django-debug-toolbar==3.8.1
\ No newline at end of file
diff --git a/apiserver/requirements/production.txt b/apiserver/requirements/production.txt
index 231d3c0a129..2547ce255fa 100644
--- a/apiserver/requirements/production.txt
+++ b/apiserver/requirements/production.txt
@@ -1,12 +1,12 @@
-r base.txt
-dj-database-url==0.5.0
+dj-database-url==1.2.0
gunicorn==20.1.0
-whitenoise==6.0.0
-django-storages==1.12.3
+whitenoise==6.3.0
+django-storages==1.13.2
boto==2.49.0
-django-anymail==8.5
-twilio==7.8.2
-django-debug-toolbar==3.2.4
+django-anymail==9.0
+twilio==7.16.2
+django-debug-toolbar==3.8.1
gevent==22.10.2
psycogreen==1.0.2
\ No newline at end of file
diff --git a/apiserver/runtime.txt b/apiserver/runtime.txt
index cd6f13073e4..2d4e05157e1 100644
--- a/apiserver/runtime.txt
+++ b/apiserver/runtime.txt
@@ -1 +1 @@
-python-3.11.1
\ No newline at end of file
+python-3.11.2
\ No newline at end of file
diff --git a/apiserver/templates/admin/base_site.html b/apiserver/templates/admin/base_site.html
index 4fdb5e19b54..fd1d890673b 100644
--- a/apiserver/templates/admin/base_site.html
+++ b/apiserver/templates/admin/base_site.html
@@ -17,7 +17,7 @@
color: #FFFFFF;
}
-{% trans 'plane Admin' %}
+{% trans 'Plane Django Admin' %}
{% endblock %}{% block nav-global %}{% endblock %}
diff --git a/app.json b/app.json
index 01791192018..7f6b274273c 100644
--- a/app.json
+++ b/app.json
@@ -6,8 +6,16 @@
"website": "https://plane.so/",
"success_url": "/",
"stack": "heroku-22",
- "keywords": ["plane", "project management", "django", "next"],
- "addons": ["heroku-postgresql:mini", "heroku-redis:mini"],
+ "keywords": [
+ "plane",
+ "project management",
+ "django",
+ "next"
+ ],
+ "addons": [
+ "heroku-postgresql:mini",
+ "heroku-redis:mini"
+ ],
"buildpacks": [
{
"url": "https://github.com/heroku/heroku-buildpack-python.git"
@@ -74,4 +82,4 @@
"value": ""
}
}
-}
+}
\ No newline at end of file
diff --git a/apps/app/.env.example b/apps/app/.env.example
new file mode 100644
index 00000000000..50747dcc607
--- /dev/null
+++ b/apps/app/.env.example
@@ -0,0 +1,7 @@
+NEXT_PUBLIC_API_BASE_URL = "http://localhost"
+NEXT_PUBLIC_GOOGLE_CLIENTID="<-- google client id -->"
+NEXT_PUBLIC_GITHUB_APP_NAME="<-- github app name -->"
+NEXT_PUBLIC_GITHUB_ID="<-- github client id -->"
+NEXT_PUBLIC_SENTRY_DSN="<-- sentry dns -->"
+NEXT_PUBLIC_ENABLE_OAUTH=0
+NEXT_PUBLIC_ENABLE_SENTRY=0
\ No newline at end of file
diff --git a/apps/app/.eslintrc.js b/apps/app/.eslintrc.js
index 64b6ff36bff..c8df607506c 100644
--- a/apps/app/.eslintrc.js
+++ b/apps/app/.eslintrc.js
@@ -1 +1,4 @@
-module.exports = require("config/.eslintrc");
+module.exports = {
+ root: true,
+ extends: ["custom"],
+};
diff --git a/apps/app/Dockerfile.dev b/apps/app/Dockerfile.dev
new file mode 100644
index 00000000000..7b802634ce7
--- /dev/null
+++ b/apps/app/Dockerfile.dev
@@ -0,0 +1,12 @@
+FROM node:18-alpine
+RUN apk add --no-cache libc6-compat
+RUN apk update
+# Set working directory
+WORKDIR /app
+
+
+COPY . .
+RUN yarn global add turbo
+RUN yarn install
+EXPOSE 3000
+CMD ["yarn","dev"]
diff --git a/apps/app/Dockerfile.web b/apps/app/Dockerfile.web
index be8abf5fa12..11bf98bd47b 100644
--- a/apps/app/Dockerfile.web
+++ b/apps/app/Dockerfile.web
@@ -4,33 +4,14 @@ RUN apk update
# Set working directory
WORKDIR /app
-RUN apk add curl
+RUN yarn global add turbo
+COPY . .
-
-RUN curl -fsSL "https://github.com/pnpm/pnpm/releases/latest/download/pnpm-linuxstatic-x64" -o /bin/pnpm; chmod +x /bin/pnpm;
-
-ENV PNPM_HOME="pnpm"
-ENV PATH="${PATH}:./pnpm"
-
-COPY ./apps ./apps
-COPY ./package.json ./package.json
-COPY ./.eslintrc.js ./.eslintrc.js
-COPY ./turbo.json ./turbo.json
-COPY ./pnpm-workspace.yaml ./pnpm-workspace.yaml
-COPY ./pnpm-lock.yaml ./pnpm-lock.yaml
-
-RUN pnpm add -g turbo
RUN turbo prune --scope=app --docker
# Add lockfile and package.json's of isolated subworkspace
FROM node:18-alpine AS installer
-RUN apk add curl
-
-RUN curl -fsSL "https://github.com/pnpm/pnpm/releases/latest/download/pnpm-linuxstatic-x64" -o /bin/pnpm; chmod +x /bin/pnpm;
-
-ENV PNPM_HOME="pnpm"
-ENV PATH="${PATH}:./pnpm"
RUN apk add --no-cache libc6-compat
RUN apk update
@@ -39,14 +20,14 @@ WORKDIR /app
# First install the dependencies (as they change less often)
COPY .gitignore .gitignore
COPY --from=builder /app/out/json/ .
-COPY --from=builder /app/out/pnpm-lock.yaml ./pnpm-lock.yaml
-RUN pnpm install
+COPY --from=builder /app/out/yarn.lock ./yarn.lock
+RUN yarn install
# Build the project
COPY --from=builder /app/out/full/ .
COPY turbo.json turbo.json
-RUN pnpm turbo run build --filter=app...
+RUN yarn turbo run build --filter=app
FROM node:18-alpine AS runner
WORKDIR /app
@@ -62,8 +43,9 @@ COPY --from=installer /app/apps/app/package.json .
# Automatically leverage output traces to reduce image size
# https://nextjs.org/docs/advanced-features/output-file-tracing
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
+# COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone/node_modules ./apps/app/node_modules
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
-EXPOSE 3000
+ENV NEXT_TELEMETRY_DISABLED 1
-CMD node apps/app/server.js
\ No newline at end of file
+EXPOSE 3000
diff --git a/apps/app/components/account/email-code-form.tsx b/apps/app/components/account/email-code-form.tsx
index 03f9ea82286..db201041ccc 100644
--- a/apps/app/components/account/email-code-form.tsx
+++ b/apps/app/components/account/email-code-form.tsx
@@ -1,4 +1,4 @@
-import React, { useState } from "react";
+import React, { useEffect, useState } from "react";
import { useForm } from "react-hook-form";
// ui
import { CheckCircleIcon } from "@heroicons/react/20/solid";
@@ -6,6 +6,7 @@ import { Button, Input } from "components/ui";
// services
import authenticationService from "services/authentication.service";
import useToast from "hooks/use-toast";
+import useTimer from "hooks/use-timer";
// icons
// types
@@ -17,12 +18,19 @@ type EmailCodeFormValues = {
export const EmailCodeForm = ({ onSuccess }: any) => {
const [codeSent, setCodeSent] = useState(false);
+ const [codeResent, setCodeResent] = useState(false);
+ const [isCodeResending, setIsCodeResending] = useState(false);
+ const [errorResendingCode, setErrorResendingCode] = useState(false);
+
const { setToastAlert } = useToast();
+ const { timer: resendCodeTimer, setTimer: setResendCodeTimer } = useTimer();
+
const {
register,
handleSubmit,
setError,
setValue,
+ getValues,
formState: { errors, isSubmitting, isValid, isDirty },
} = useForm({
defaultValues: {
@@ -34,31 +42,38 @@ export const EmailCodeForm = ({ onSuccess }: any) => {
reValidateMode: "onChange",
});
- const onSubmit = ({ email }: EmailCodeFormValues) => {
- console.log(email);
- authenticationService
+ const isResendDisabled =
+ resendCodeTimer > 0 || isCodeResending || isSubmitting || errorResendingCode;
+
+ const onSubmit = async ({ email }: EmailCodeFormValues) => {
+ setErrorResendingCode(false);
+ await authenticationService
.emailCode({ email })
.then((res) => {
setValue("key", res.key);
setCodeSent(true);
})
.catch((err) => {
- console.log(err);
+ setErrorResendingCode(true);
+ setToastAlert({
+ title: "Oops!",
+ type: "error",
+ message: err?.error,
+ });
});
};
- const handleSignin = (formData: EmailCodeFormValues) => {
- authenticationService
+ const handleSignin = async (formData: EmailCodeFormValues) => {
+ await authenticationService
.magicSignIn(formData)
.then((response) => {
onSuccess(response);
})
.catch((error) => {
- console.log(error);
setToastAlert({
title: "Oops!",
type: "error",
- message: "Enter the correct code to sign in",
+ message: error?.response?.data?.error ?? "Enter the correct code to sign in",
});
setError("token" as keyof EmailCodeFormValues, {
type: "manual",
@@ -67,13 +82,16 @@ export const EmailCodeForm = ({ onSuccess }: any) => {
});
};
+ const emailOld = getValues("email");
+
+ useEffect(() => {
+ setErrorResendingCode(false);
+ }, [emailOld]);
+
return (
<>
-
>
diff --git a/apps/app/components/account/github-login-button.tsx b/apps/app/components/account/github-login-button.tsx
index e93abde88dc..80faecec5e3 100644
--- a/apps/app/components/account/github-login-button.tsx
+++ b/apps/app/components/account/github-login-button.tsx
@@ -34,7 +34,7 @@ export const GithubLoginButton: FC = (props) => {
return (
)}
@@ -233,5 +214,3 @@ const ExistingIssuesListModal: React.FC = ({
>
);
};
-
-export default ExistingIssuesListModal;
diff --git a/apps/app/components/common/image-upload-modal.tsx b/apps/app/components/core/image-upload-modal.tsx
similarity index 100%
rename from apps/app/components/common/image-upload-modal.tsx
rename to apps/app/components/core/image-upload-modal.tsx
diff --git a/apps/app/components/core/index.ts b/apps/app/components/core/index.ts
index 8266a5111fc..01a190d0796 100644
--- a/apps/app/components/core/index.ts
+++ b/apps/app/components/core/index.ts
@@ -1 +1,11 @@
+export * from "./board-view";
+export * from "./list-view";
+export * from "./sidebar";
+export * from "./bulk-delete-issues-modal";
+export * from "./existing-issues-list-modal";
+export * from "./image-upload-modal";
+export * from "./issues-view-filter";
+export * from "./issues-view";
+export * from "./link-modal";
export * from "./not-authorized-view";
+export * from "./multi-level-select";
diff --git a/apps/app/components/core/view.tsx b/apps/app/components/core/issues-view-filter.tsx
similarity index 73%
rename from apps/app/components/core/view.tsx
rename to apps/app/components/core/issues-view-filter.tsx
index 1fe147f22a2..9962ba96d59 100644
--- a/apps/app/components/core/view.tsx
+++ b/apps/app/components/core/issues-view-filter.tsx
@@ -17,13 +17,13 @@ import { replaceUnderscoreIfSnakeCase } from "helpers/string.helper";
// types
import { IIssue, Properties } from "types";
// common
-import { filterIssueOptions, groupByOptions, orderByOptions } from "constants/";
+import { GROUP_BY_OPTIONS, ORDER_BY_OPTIONS, FILTER_ISSUE_OPTIONS } from "constants/issue";
type Props = {
issues?: IIssue[];
};
-const View: React.FC = ({ issues }) => {
+export const IssuesFilterView: React.FC = ({ issues }) => {
const router = useRouter();
const { workspaceSlug, projectId } = router.query;
@@ -99,36 +99,40 @@ const View: React.FC = ({ issues }) => {
Group by
option.key === groupByProperty)
+ GROUP_BY_OPTIONS.find((option) => option.key === groupByProperty)
?.name ?? "Select"
}
width="lg"
>
- {groupByOptions.map((option) => (
- setGroupByProperty(option.key)}
- >
- {option.name}
-
- ))}
+ {GROUP_BY_OPTIONS.map((option) =>
+ issueView === "kanban" && option.key === null ? null : (
+ setGroupByProperty(option.key)}
+ >
+ {option.name}
+
+ )
+ )}
Order by
option.key === orderBy)?.name ??
+ ORDER_BY_OPTIONS.find((option) => option.key === orderBy)?.name ??
"Select"
}
width="lg"
>
- {orderByOptions.map((option) =>
+ {ORDER_BY_OPTIONS.map((option) =>
groupByProperty === "priority" &&
option.key === "priority" ? null : (
setOrderBy(option.key)}
+ onClick={() => {
+ setOrderBy(option.key);
+ }}
>
{option.name}
@@ -140,12 +144,12 @@ const View: React.FC = ({ issues }) => {
Issue type
option.key === filterIssue)
+ FILTER_ISSUE_OPTIONS.find((option) => option.key === filterIssue)
?.name ?? "Select"
}
width="lg"
>
- {filterIssueOptions.map((option) => (
+ {FILTER_ISSUE_OPTIONS.map((option) => (
setFilterIssue(option.key)}
@@ -176,20 +180,29 @@ const View: React.FC = ({ issues }) => {
Display Properties
- {Object.keys(properties).map((key) => (
- setProperties(key as keyof Properties)}
- >
- {replaceUnderscoreIfSnakeCase(key)}
-
- ))}
+ {Object.keys(properties).map((key) => {
+ if (
+ issueView === "kanban" &&
+ ((groupByProperty === "state_detail.name" && key === "state") ||
+ (groupByProperty === "priority" && key === "priority"))
+ )
+ return;
+
+ return (
+ setProperties(key as keyof Properties)}
+ >
+ {key === "key" ? "ID" : replaceUnderscoreIfSnakeCase(key)}
+
+ );
+ })}
@@ -203,5 +216,3 @@ const View: React.FC = ({ issues }) => {
>
);
};
-
-export default View;
diff --git a/apps/app/components/core/issues-view.tsx b/apps/app/components/core/issues-view.tsx
new file mode 100644
index 00000000000..98526a2b0ef
--- /dev/null
+++ b/apps/app/components/core/issues-view.tsx
@@ -0,0 +1,429 @@
+import { useCallback, useState } from "react";
+
+import { useRouter } from "next/router";
+
+import useSWR, { mutate } from "swr";
+
+// react-beautiful-dnd
+import { DragDropContext, DropResult } from "react-beautiful-dnd";
+// services
+import issuesService from "services/issues.service";
+import stateService from "services/state.service";
+import projectService from "services/project.service";
+import modulesService from "services/modules.service";
+// hooks
+import useIssueView from "hooks/use-issue-view";
+// components
+import { AllLists, AllBoards } from "components/core";
+import { CreateUpdateIssueModal, DeleteIssueModal } from "components/issues";
+import StrictModeDroppable from "components/dnd/StrictModeDroppable";
+// icons
+import { TrashIcon } from "@heroicons/react/24/outline";
+// helpers
+import { getStatesList } from "helpers/state.helper";
+// types
+import { CycleIssueResponse, IIssue, ModuleIssueResponse, UserAuth } from "types";
+// fetch-keys
+import {
+ CYCLE_ISSUES,
+ MODULE_ISSUES,
+ PROJECT_ISSUES_LIST,
+ PROJECT_MEMBERS,
+ STATE_LIST,
+} from "constants/fetch-keys";
+
+type Props = {
+ type?: "issue" | "cycle" | "module";
+ issues: IIssue[];
+ openIssuesListModal?: () => void;
+ userAuth: UserAuth;
+};
+
+export const IssuesView: React.FC = ({
+ type = "issue",
+ issues,
+ openIssuesListModal,
+ userAuth,
+}) => {
+ // create issue modal
+ const [createIssueModal, setCreateIssueModal] = useState(false);
+ const [preloadedData, setPreloadedData] = useState<
+ (Partial & { actionType: "createIssue" | "edit" | "delete" }) | undefined
+ >(undefined);
+
+ // updates issue modal
+ const [editIssueModal, setEditIssueModal] = useState(false);
+ const [issueToEdit, setIssueToEdit] = useState<
+ (IIssue & { actionType: "edit" | "delete" }) | undefined
+ >(undefined);
+
+ // delete issue modal
+ const [deleteIssueModal, setDeleteIssueModal] = useState(false);
+ const [issueToDelete, setIssueToDelete] = useState(null);
+
+ // trash box
+ const [trashBox, setTrashBox] = useState(false);
+
+ const router = useRouter();
+ const { workspaceSlug, projectId, cycleId, moduleId } = router.query;
+
+ const {
+ issueView,
+ groupedByIssues,
+ groupByProperty: selectedGroup,
+ orderBy,
+ } = useIssueView(issues);
+
+ const { data: stateGroups } = useSWR(
+ workspaceSlug && projectId ? STATE_LIST(projectId as string) : null,
+ workspaceSlug
+ ? () => stateService.getStates(workspaceSlug as string, projectId as string)
+ : null
+ );
+ const states = getStatesList(stateGroups ?? {});
+
+ const { data: members } = useSWR(
+ projectId ? PROJECT_MEMBERS(projectId as string) : null,
+ workspaceSlug && projectId
+ ? () => projectService.projectMembers(workspaceSlug as string, projectId as string)
+ : null
+ );
+
+ const handleDeleteIssue = useCallback(
+ (issue: IIssue) => {
+ setDeleteIssueModal(true);
+ setIssueToDelete(issue);
+ },
+ [setDeleteIssueModal, setIssueToDelete]
+ );
+
+ const handleOnDragEnd = useCallback(
+ (result: DropResult) => {
+ setTrashBox(false);
+
+ if (!result.destination || !workspaceSlug || !projectId) return;
+
+ const { source, destination } = result;
+
+ const draggedItem = groupedByIssues[source.droppableId][source.index];
+
+ if (destination.droppableId === "trashBox") {
+ handleDeleteIssue(draggedItem);
+ } else {
+ if (orderBy === "sort_order") {
+ let newSortOrder = draggedItem.sort_order;
+
+ const destinationGroupArray = groupedByIssues[destination.droppableId];
+
+ if (destinationGroupArray.length !== 0) {
+ // check if dropping in the same group
+ if (source.droppableId === destination.droppableId) {
+ // check if dropping at beginning
+ if (destination.index === 0)
+ newSortOrder = destinationGroupArray[0].sort_order - 10000;
+ // check if dropping at last
+ else if (destination.index === destinationGroupArray.length - 1)
+ newSortOrder =
+ destinationGroupArray[destinationGroupArray.length - 1].sort_order + 10000;
+ else {
+ if (destination.index > source.index)
+ newSortOrder =
+ (destinationGroupArray[source.index + 1].sort_order +
+ destinationGroupArray[source.index + 2].sort_order) /
+ 2;
+ else if (destination.index < source.index)
+ newSortOrder =
+ (destinationGroupArray[source.index - 1].sort_order +
+ destinationGroupArray[source.index - 2].sort_order) /
+ 2;
+ }
+ } else {
+ // check if dropping at beginning
+ if (destination.index === 0)
+ newSortOrder = destinationGroupArray[0].sort_order - 10000;
+ // check if dropping at last
+ else if (destination.index === destinationGroupArray.length)
+ newSortOrder =
+ destinationGroupArray[destinationGroupArray.length - 1].sort_order + 10000;
+ else
+ newSortOrder =
+ (destinationGroupArray[destination.index - 1].sort_order +
+ destinationGroupArray[destination.index].sort_order) /
+ 2;
+ }
+ }
+
+ draggedItem.sort_order = newSortOrder;
+ }
+
+ if (orderBy === "sort_order" || source.droppableId !== destination.droppableId) {
+ const sourceGroup = source.droppableId; // source group id
+ const destinationGroup = destination.droppableId; // destination group id
+
+ if (!sourceGroup || !destinationGroup) return;
+
+ if (selectedGroup === "priority") draggedItem.priority = destinationGroup;
+ else if (selectedGroup === "state_detail.name") {
+ const destinationState = states?.find((s) => s.name === destinationGroup);
+
+ if (!destinationState) return;
+
+ draggedItem.state = destinationState.id;
+ draggedItem.state_detail = destinationState;
+ }
+
+ if (cycleId)
+ mutate(
+ CYCLE_ISSUES(cycleId as string),
+ (prevData) => {
+ if (!prevData) return prevData;
+ const updatedIssues = prevData.map((issue) => {
+ if (issue.issue_detail.id === draggedItem.id) {
+ return {
+ ...issue,
+ issue_detail: draggedItem,
+ };
+ }
+ return issue;
+ });
+ return [...updatedIssues];
+ },
+ false
+ );
+
+ if (moduleId)
+ mutate(
+ MODULE_ISSUES(moduleId as string),
+ (prevData) => {
+ if (!prevData) return prevData;
+ const updatedIssues = prevData.map((issue) => {
+ if (issue.issue_detail.id === draggedItem.id) {
+ return {
+ ...issue,
+ issue_detail: draggedItem,
+ };
+ }
+ return issue;
+ });
+ return [...updatedIssues];
+ },
+ false
+ );
+
+ mutate(
+ PROJECT_ISSUES_LIST(workspaceSlug as string, projectId as string),
+ (prevData) => {
+ if (!prevData) return prevData;
+
+ const updatedIssues = prevData.map((i) => {
+ if (i.id === draggedItem.id) return draggedItem;
+
+ return i;
+ });
+
+ return updatedIssues;
+ },
+ false
+ );
+
+ // patch request
+ issuesService
+ .patchIssue(workspaceSlug as string, projectId as string, draggedItem.id, {
+ priority: draggedItem.priority,
+ state: draggedItem.state,
+ sort_order: draggedItem.sort_order,
+ })
+ .then((res) => {
+ if (cycleId) mutate(CYCLE_ISSUES(cycleId as string));
+ if (moduleId) mutate(MODULE_ISSUES(moduleId as string));
+
+ mutate(PROJECT_ISSUES_LIST(workspaceSlug as string, projectId as string));
+ });
+ }
+ }
+ },
+ [
+ workspaceSlug,
+ cycleId,
+ moduleId,
+ groupedByIssues,
+ projectId,
+ selectedGroup,
+ orderBy,
+ states,
+ handleDeleteIssue,
+ ]
+ );
+
+ const addIssueToState = useCallback(
+ (groupTitle: string, stateId: string | null) => {
+ setCreateIssueModal(true);
+ if (selectedGroup)
+ setPreloadedData({
+ state: stateId ?? undefined,
+ [selectedGroup]: groupTitle,
+ actionType: "createIssue",
+ });
+ else setPreloadedData({ actionType: "createIssue" });
+ },
+ [setCreateIssueModal, setPreloadedData, selectedGroup]
+ );
+
+ const handleEditIssue = useCallback(
+ (issue: IIssue) => {
+ setEditIssueModal(true);
+ setIssueToEdit({
+ ...issue,
+ actionType: "edit",
+ cycle: issue.issue_cycle ? issue.issue_cycle.cycle : null,
+ module: issue.issue_module ? issue.issue_module.module : null,
+ });
+ },
+ [setEditIssueModal, setIssueToEdit]
+ );
+
+ const removeIssueFromCycle = useCallback(
+ (bridgeId: string) => {
+ if (!workspaceSlug || !projectId) return;
+
+ mutate(
+ CYCLE_ISSUES(cycleId as string),
+ (prevData) => prevData?.filter((p) => p.id !== bridgeId),
+ false
+ );
+
+ issuesService
+ .removeIssueFromCycle(
+ workspaceSlug as string,
+ projectId as string,
+ cycleId as string,
+ bridgeId
+ )
+ .then((res) => {
+ console.log(res);
+ })
+ .catch((e) => {
+ console.log(e);
+ });
+ },
+ [workspaceSlug, projectId, cycleId]
+ );
+
+ const removeIssueFromModule = useCallback(
+ (bridgeId: string) => {
+ if (!workspaceSlug || !projectId) return;
+
+ mutate(
+ MODULE_ISSUES(moduleId as string),
+ (prevData) => prevData?.filter((p) => p.id !== bridgeId),
+ false
+ );
+
+ modulesService
+ .removeIssueFromModule(
+ workspaceSlug as string,
+ projectId as string,
+ moduleId as string,
+ bridgeId
+ )
+ .then((res) => {
+ console.log(res);
+ })
+ .catch((e) => {
+ console.log(e);
+ });
+ },
+ [workspaceSlug, projectId, moduleId]
+ );
+
+ const handleTrashBox = useCallback(
+ (isDragging: boolean) => {
+ if (isDragging && !trashBox) setTrashBox(true);
+ },
+ [trashBox, setTrashBox]
+ );
+
+ return (
+ <>
+ setCreateIssueModal(false)}
+ prePopulateData={{
+ ...preloadedData,
+ }}
+ />
+ setEditIssueModal(false)}
+ data={issueToEdit}
+ />
+ setDeleteIssueModal(false)}
+ isOpen={deleteIssueModal}
+ data={issueToDelete}
+ />
+
+
+
+
+ {(provided, snapshot) => (
+
+
+ Drop issue here to delete
+
+ )}
+
+ {issueView === "list" ? (
+
+ ) : (
+
+ )}
+
+
+ >
+ );
+};
diff --git a/apps/app/components/project/modules/module-link-modal.tsx b/apps/app/components/core/link-modal.tsx
similarity index 76%
rename from apps/app/components/project/modules/module-link-modal.tsx
rename to apps/app/components/core/link-modal.tsx
index 18fdd5f6d2b..5700946f0c9 100644
--- a/apps/app/components/project/modules/module-link-modal.tsx
+++ b/apps/app/components/core/link-modal.tsx
@@ -4,23 +4,19 @@ import { useRouter } from "next/router";
import { mutate } from "swr";
+// react-hook-form
import { useForm } from "react-hook-form";
-
+// headless ui
import { Dialog, Transition } from "@headlessui/react";
-// hooks
-// types
-import type { IModule, ModuleLink } from "types";
-// services
-import modulesService from "services/modules.service";
// ui
import { Button, Input } from "components/ui";
-// fetch-keys
-import { MODULE_LIST } from "constants/fetch-keys";
+// types
+import type { IIssueLink, ModuleLink } from "types";
type Props = {
isOpen: boolean;
- module: IModule | undefined;
handleClose: () => void;
+ onFormSubmit: (formData: IIssueLink | ModuleLink) => void;
};
const defaultValues: ModuleLink = {
@@ -28,47 +24,20 @@ const defaultValues: ModuleLink = {
url: "",
};
-const ModuleLinkModal: React.FC = ({ isOpen, module, handleClose }) => {
- const router = useRouter();
- const { workspaceSlug, projectId, moduleId } = router.query;
-
+export const LinkModal: React.FC = ({ isOpen, handleClose, onFormSubmit }) => {
const {
register,
formState: { errors, isSubmitting },
handleSubmit,
reset,
- setError,
} = useForm({
defaultValues,
});
const onSubmit = async (formData: ModuleLink) => {
- if (!workspaceSlug || !projectId || !module) return;
-
- const previousLinks = module.link_module.map((l) => ({ title: l.title, url: l.url }));
+ await onFormSubmit(formData);
- const payload: Partial = {
- links_list: [...previousLinks, formData],
- };
-
- await modulesService
- .patchModule(workspaceSlug as string, projectId as string, module.id, payload)
- .then(() => {
- mutate(projectId && MODULE_LIST(projectId as string), (prevData) =>
- (prevData ?? []).map((module) => {
- if (module.id === moduleId) return { ...module, ...payload };
- return module;
- })
- );
- onClose();
- })
- .catch((err) => {
- Object.keys(err).map((key) => {
- setError(key as keyof ModuleLink, {
- message: err[key].join(", "),
- });
- });
- });
+ onClose();
};
const onClose = () => {
@@ -115,31 +84,31 @@ const ModuleLinkModal: React.FC = ({ isOpen, module, handleClose }) => {
@@ -163,5 +132,3 @@ const ModuleLinkModal: React.FC
= ({ isOpen, module, handleClose }) => {
);
};
-
-export default ModuleLinkModal;
diff --git a/apps/app/components/core/list-view/all-lists.tsx b/apps/app/components/core/list-view/all-lists.tsx
new file mode 100644
index 00000000000..c2b6c498a7f
--- /dev/null
+++ b/apps/app/components/core/list-view/all-lists.tsx
@@ -0,0 +1,63 @@
+// hooks
+import useIssueView from "hooks/use-issue-view";
+// components
+import { SingleList } from "components/core/list-view/single-list";
+// types
+import { IIssue, IProjectMember, IState, UserAuth } from "types";
+
+// types
+type Props = {
+ type: "issue" | "cycle" | "module";
+ issues: IIssue[];
+ states: IState[] | undefined;
+ members: IProjectMember[] | undefined;
+ addIssueToState: (groupTitle: string, stateId: string | null) => void;
+ handleEditIssue: (issue: IIssue) => void;
+ handleDeleteIssue: (issue: IIssue) => void;
+ openIssuesListModal?: (() => void) | null;
+ removeIssue: ((bridgeId: string) => void) | null;
+ userAuth: UserAuth;
+};
+
+export const AllLists: React.FC = ({
+ type,
+ issues,
+ states,
+ members,
+ addIssueToState,
+ openIssuesListModal,
+ handleEditIssue,
+ handleDeleteIssue,
+ removeIssue,
+ userAuth,
+}) => {
+ const { groupedByIssues, groupByProperty: selectedGroup } = useIssueView(issues);
+
+ return (
+
+ {Object.keys(groupedByIssues).map((singleGroup) => {
+ const stateId =
+ selectedGroup === "state_detail.name"
+ ? states?.find((s) => s.name === singleGroup)?.id ?? null
+ : null;
+
+ return (
+ addIssueToState(singleGroup, stateId)}
+ handleEditIssue={handleEditIssue}
+ handleDeleteIssue={handleDeleteIssue}
+ openIssuesListModal={type !== "issue" ? openIssuesListModal : null}
+ removeIssue={removeIssue}
+ userAuth={userAuth}
+ />
+ );
+ })}
+
+ );
+};
diff --git a/apps/app/components/core/list-view/index.ts b/apps/app/components/core/list-view/index.ts
new file mode 100644
index 00000000000..c515ed1c247
--- /dev/null
+++ b/apps/app/components/core/list-view/index.ts
@@ -0,0 +1,3 @@
+export * from "./all-lists";
+export * from "./single-issue";
+export * from "./single-list";
diff --git a/apps/app/components/core/list-view/single-issue.tsx b/apps/app/components/core/list-view/single-issue.tsx
new file mode 100644
index 00000000000..0dea000203b
--- /dev/null
+++ b/apps/app/components/core/list-view/single-issue.tsx
@@ -0,0 +1,237 @@
+import React, { useCallback } from "react";
+
+import Link from "next/link";
+import { useRouter } from "next/router";
+
+import { mutate } from "swr";
+
+// services
+import issuesService from "services/issues.service";
+// hooks
+import useToast from "hooks/use-toast";
+// components
+import {
+ ViewAssigneeSelect,
+ ViewDueDateSelect,
+ ViewPrioritySelect,
+ ViewStateSelect,
+} from "components/issues/view-select";
+
+// ui
+import { Tooltip, CustomMenu } from "components/ui";
+// helpers
+import { copyTextToClipboard } from "helpers/string.helper";
+// types
+import { CycleIssueResponse, IIssue, ModuleIssueResponse, Properties, UserAuth } from "types";
+// fetch-keys
+import { CYCLE_ISSUES, MODULE_ISSUES, PROJECT_ISSUES_LIST } from "constants/fetch-keys";
+
+type Props = {
+ type?: string;
+ issue: IIssue;
+ properties: Properties;
+ editIssue: () => void;
+ removeIssue?: (() => void) | null;
+ handleDeleteIssue: (issue: IIssue) => void;
+ userAuth: UserAuth;
+};
+
+export const SingleListIssue: React.FC = ({
+ type,
+ issue,
+ properties,
+ editIssue,
+ removeIssue,
+ handleDeleteIssue,
+ userAuth,
+}) => {
+ const router = useRouter();
+ const { workspaceSlug, projectId, cycleId, moduleId } = router.query;
+ const { setToastAlert } = useToast();
+ const partialUpdateIssue = useCallback(
+ (formData: Partial) => {
+ if (!workspaceSlug || !projectId) return;
+
+ if (cycleId)
+ mutate(
+ CYCLE_ISSUES(cycleId as string),
+ (prevData) => {
+ const updatedIssues = (prevData ?? []).map((p) => {
+ if (p.issue_detail.id === issue.id) {
+ return {
+ ...p,
+ issue_detail: {
+ ...p.issue_detail,
+ ...formData,
+ },
+ };
+ }
+ return p;
+ });
+ return [...updatedIssues];
+ },
+ false
+ );
+
+ if (moduleId)
+ mutate(
+ MODULE_ISSUES(moduleId as string),
+ (prevData) => {
+ const updatedIssues = (prevData ?? []).map((p) => {
+ if (p.issue_detail.id === issue.id) {
+ return {
+ ...p,
+ issue_detail: {
+ ...p.issue_detail,
+ ...formData,
+ },
+ };
+ }
+ return p;
+ });
+ return [...updatedIssues];
+ },
+ false
+ );
+
+ mutate(
+ PROJECT_ISSUES_LIST(workspaceSlug as string, projectId as string),
+ (prevData) =>
+ (prevData ?? []).map((p) => {
+ if (p.id === issue.id) return { ...p, ...formData };
+
+ return p;
+ }),
+
+ false
+ );
+
+ issuesService
+ .patchIssue(workspaceSlug as string, projectId as string, issue.id, formData)
+ .then((res) => {
+ if (cycleId) mutate(CYCLE_ISSUES(cycleId as string));
+ if (moduleId) mutate(MODULE_ISSUES(moduleId as string));
+
+ mutate(PROJECT_ISSUES_LIST(workspaceSlug as string, projectId as string));
+ })
+ .catch((error) => {
+ console.log(error);
+ });
+ },
+ [workspaceSlug, projectId, cycleId, moduleId, issue]
+ );
+
+ const handleCopyText = () => {
+ const originURL =
+ typeof window !== "undefined" && window.location.origin ? window.location.origin : "";
+ copyTextToClipboard(
+ `${originURL}/${workspaceSlug}/projects/${projectId}/issues/${issue.id}`
+ ).then(() => {
+ setToastAlert({
+ type: "success",
+ title: "Link Copied!",
+ message: "Issue link copied to clipboard.",
+ });
+ });
+ };
+ const isNotAllowed = userAuth.isGuest || userAuth.isViewer;
+
+ return (
+
+
+
+ {properties.priority && (
+
+ )}
+ {properties.state && (
+
+ )}
+ {properties.due_date && (
+
+ )}
+ {properties.sub_issue_count && (
+
+ {issue.sub_issues_count} {issue.sub_issues_count === 1 ? "sub-issue" : "sub-issues"}
+
+ )}
+ {properties.labels && (
+
+ {issue.label_details.map((label) => (
+
+
+ {label.name}
+
+ ))}
+
+ )}
+ {properties.assignee && (
+
+ )}
+ {type && !isNotAllowed && (
+
+ Edit issue
+ {type !== "issue" && removeIssue && (
+
+ <>Remove from {type}>
+
+ )}
+ handleDeleteIssue(issue)}>
+ Delete issue
+
+ Copy issue link
+
+ )}
+
+
+ );
+};
diff --git a/apps/app/components/core/list-view/single-list.tsx b/apps/app/components/core/list-view/single-list.tsx
new file mode 100644
index 00000000000..9c3a7ac0ff9
--- /dev/null
+++ b/apps/app/components/core/list-view/single-list.tsx
@@ -0,0 +1,166 @@
+import { useRouter } from "next/router";
+
+// headless ui
+import { Disclosure, Transition } from "@headlessui/react";
+// hooks
+import useIssuesProperties from "hooks/use-issue-properties";
+// components
+import { SingleListIssue } from "components/core";
+// icons
+import { ChevronDownIcon, PlusIcon } from "@heroicons/react/24/outline";
+// helpers
+import { addSpaceIfCamelCase } from "helpers/string.helper";
+// types
+import { IIssue, IProjectMember, NestedKeyOf, UserAuth } from "types";
+import { CustomMenu } from "components/ui";
+
+type Props = {
+ type?: "issue" | "cycle" | "module";
+ groupTitle: string;
+ groupedByIssues: {
+ [key: string]: IIssue[];
+ };
+ selectedGroup: NestedKeyOf | null;
+ members: IProjectMember[] | undefined;
+ addIssueToState: () => void;
+ handleEditIssue: (issue: IIssue) => void;
+ handleDeleteIssue: (issue: IIssue) => void;
+ openIssuesListModal?: (() => void) | null;
+ removeIssue: ((bridgeId: string) => void) | null;
+ userAuth: UserAuth;
+};
+
+export const SingleList: React.FC = ({
+ type,
+ groupTitle,
+ groupedByIssues,
+ selectedGroup,
+ members,
+ addIssueToState,
+ handleEditIssue,
+ handleDeleteIssue,
+ openIssuesListModal,
+ removeIssue,
+ userAuth,
+}) => {
+ const router = useRouter();
+ const { workspaceSlug, projectId } = router.query;
+
+ const [properties] = useIssuesProperties(workspaceSlug as string, projectId as string);
+
+ const createdBy =
+ selectedGroup === "created_by"
+ ? members?.find((m) => m.member.id === groupTitle)?.member.first_name ?? "Loading..."
+ : null;
+
+ let assignees: any;
+ if (selectedGroup === "assignees") {
+ assignees = groupTitle && groupTitle !== "" ? groupTitle.split(",") : [];
+ assignees =
+ assignees.length > 0
+ ? assignees
+ .map((a: string) => members?.find((m) => m.member.id === a)?.member.first_name)
+ .join(", ")
+ : "No assignee";
+ }
+
+ return (
+
+ {({ open }) => (
+
+
+
+
+
+
+
+ {selectedGroup !== null ? (
+
+ {selectedGroup === "created_by"
+ ? createdBy
+ : selectedGroup === "assignees"
+ ? assignees
+ : addSpaceIfCamelCase(groupTitle)}
+
+ ) : (
+
All Issues
+ )}
+
+ {groupedByIssues[groupTitle as keyof IIssue].length}
+
+
+
+
+
+
+
+ {groupedByIssues[groupTitle] ? (
+ groupedByIssues[groupTitle].length > 0 ? (
+ groupedByIssues[groupTitle].map((issue: IIssue) => (
+
handleEditIssue(issue)}
+ handleDeleteIssue={handleDeleteIssue}
+ removeIssue={() => {
+ removeIssue && removeIssue(issue.bridge);
+ }}
+ userAuth={userAuth}
+ />
+ ))
+ ) : (
+ No issues.
+ )
+ ) : (
+ Loading...
+ )}
+
+
+
+
+ {type === "issue" ? (
+
+
+ Add issue
+
+ ) : (
+
+
+ Add issue
+
+ }
+ optionsPosition="left"
+ noBorder
+ >
+ Create new
+ {openIssuesListModal && (
+
+ Add an existing issue
+
+ )}
+
+ )}
+
+
+ )}
+
+ );
+};
diff --git a/apps/app/components/core/multi-level-select.tsx b/apps/app/components/core/multi-level-select.tsx
new file mode 100644
index 00000000000..68a76ae91a0
--- /dev/null
+++ b/apps/app/components/core/multi-level-select.tsx
@@ -0,0 +1,150 @@
+import React, { useState } from "react";
+
+import { Listbox, Transition } from "@headlessui/react";
+
+import { CheckIcon, ChevronUpDownIcon } from "@heroicons/react/20/solid";
+
+type TSelectOption = {
+ id: string;
+ label: string;
+ value: any;
+ children?:
+ | (TSelectOption & {
+ children?: null;
+ })[]
+ | null;
+};
+
+type TMultipleSelectProps = {
+ options: TSelectOption[];
+ selected: TSelectOption | null;
+ setSelected: (value: any) => void;
+ label: string;
+ direction?: "left" | "right";
+};
+
+export const MultiLevelSelect: React.FC = (props) => {
+ const { options, selected, setSelected, label, direction = "right" } = props;
+
+ const [openChildFor, setOpenChildFor] = useState(null);
+
+ return (
+
+
{
+ if (value?.children === null) {
+ setSelected(value);
+ setOpenChildFor(null);
+ } else setOpenChildFor(value);
+ }}
+ >
+ {({ open }) => (
+
+
setOpenChildFor(null)}
+ className="relative w-full cursor-default rounded-lg bg-white py-2 pl-3 pr-10 text-left shadow-md sm:text-sm"
+ >
+ {selected?.label ?? label}
+
+
+
+
+
+
+ {options.map((option) => (
+ {
+ if (option.children !== null) {
+ e.preventDefault();
+ setOpenChildFor(option);
+ }
+ if (option.id === openChildFor?.id) {
+ e.preventDefault();
+ setOpenChildFor(null);
+ }
+ }}
+ value={option}
+ >
+ {({ selected }) => (
+ <>
+ {openChildFor?.id === option.id && (
+
+ {option.children?.map((child) => (
+
+ {({ selected }) => (
+ <>
+
+ {child.label}
+
+ {selected ? (
+
+
+
+ ) : null}
+ >
+ )}
+
+ ))}
+
+
+
+ )}
+
+ {option.label}
+
+ {selected ? (
+
+
+
+ ) : null}
+ >
+ )}
+
+ ))}
+
+
+
+ )}
+
+
+ );
+};
diff --git a/apps/app/components/core/sidebar/index.ts b/apps/app/components/core/sidebar/index.ts
new file mode 100644
index 00000000000..c5357f576cc
--- /dev/null
+++ b/apps/app/components/core/sidebar/index.ts
@@ -0,0 +1,3 @@
+export * from "./links-list";
+export * from "./sidebar-progress-stats";
+export * from "./single-progress-stats";
diff --git a/apps/app/components/core/sidebar/links-list.tsx b/apps/app/components/core/sidebar/links-list.tsx
new file mode 100644
index 00000000000..8553ee43cf9
--- /dev/null
+++ b/apps/app/components/core/sidebar/links-list.tsx
@@ -0,0 +1,69 @@
+import Link from "next/link";
+
+// icons
+import { LinkIcon, TrashIcon } from "@heroicons/react/24/outline";
+import { ExternalLinkIcon } from "components/icons";
+// helpers
+import { timeAgo } from "helpers/date-time.helper";
+// types
+import { IUserLite, UserAuth } from "types";
+
+type Props = {
+ links: {
+ id: string;
+ created_at: Date;
+ created_by: string;
+ created_by_detail: IUserLite;
+ title: string;
+ url: string;
+ }[];
+ handleDeleteLink: (linkId: string) => void;
+ userAuth: UserAuth;
+};
+
+export const LinksList: React.FC = ({ links, handleDeleteLink, userAuth }) => {
+ const isNotAllowed = userAuth.isGuest || userAuth.isViewer;
+
+ return (
+ <>
+ {links.map((link) => (
+
+ ))}
+ >
+ );
+};
diff --git a/apps/app/components/core/sidebar/progress-chart.tsx b/apps/app/components/core/sidebar/progress-chart.tsx
new file mode 100644
index 00000000000..b0d5bb39466
--- /dev/null
+++ b/apps/app/components/core/sidebar/progress-chart.tsx
@@ -0,0 +1,97 @@
+import React from "react";
+
+import {
+ XAxis,
+ YAxis,
+ Tooltip,
+ ResponsiveContainer,
+ AreaChart,
+ Area,
+ ReferenceLine,
+} from "recharts";
+
+//types
+import { IIssue } from "types";
+// helper
+import { getDatesInRange, renderShortNumericDateFormat } from "helpers/date-time.helper";
+
+type Props = {
+ issues: IIssue[];
+ start: string;
+ end: string;
+};
+
+const ProgressChart: React.FC = ({ issues, start, end }) => {
+ const startDate = new Date(start);
+ const endDate = new Date(end);
+ const getChartData = () => {
+ const dateRangeArray = getDatesInRange(startDate, endDate);
+ let count = 0;
+ const dateWiseData = dateRangeArray.map((d) => {
+ const current = d.toISOString().split("T")[0];
+ const total = issues.length;
+ const currentData = issues.filter(
+ (i) => i.completed_at && i.completed_at.toString().split("T")[0] === current
+ );
+ count = currentData ? currentData.length + count : count;
+
+ return {
+ currentDate: renderShortNumericDateFormat(current),
+ currentDateData: currentData,
+ pending: new Date(current) < new Date() ? total - count : null,
+ };
+ });
+ return dateWiseData;
+ };
+ const ChartData = getChartData();
+ return (
+
+
+
+
+ Ideal
+
+
+
+ Current
+
+
+
+
+ );
+};
+
+export default ProgressChart;
diff --git a/apps/app/components/core/sidebar/sidebar-progress-stats.tsx b/apps/app/components/core/sidebar/sidebar-progress-stats.tsx
new file mode 100644
index 00000000000..01417b62554
--- /dev/null
+++ b/apps/app/components/core/sidebar/sidebar-progress-stats.tsx
@@ -0,0 +1,213 @@
+import React from "react";
+
+import Image from "next/image";
+import { useRouter } from "next/router";
+
+import useSWR from "swr";
+
+// headless ui
+import { Tab } from "@headlessui/react";
+// services
+import issuesServices from "services/issues.service";
+import projectService from "services/project.service";
+// hooks
+import useLocalStorage from "hooks/use-local-storage";
+// components
+import { SingleProgressStats } from "components/core";
+// ui
+import { Avatar } from "components/ui";
+// icons
+import User from "public/user.png";
+// types
+import { IIssue, IIssueLabels } from "types";
+// fetch-keys
+import { PROJECT_ISSUE_LABELS, PROJECT_MEMBERS } from "constants/fetch-keys";
+// types
+type Props = {
+ groupedIssues: any;
+ issues: IIssue[];
+};
+
+const stateGroupColours: {
+ [key: string]: string;
+} = {
+ backlog: "#3f76ff",
+ unstarted: "#ff9e9e",
+ started: "#d687ff",
+ cancelled: "#ff5353",
+ completed: "#096e8d",
+};
+
+export const SidebarProgressStats: React.FC = ({ groupedIssues, issues }) => {
+ const router = useRouter();
+ const { workspaceSlug, projectId } = router.query;
+
+ const { storedValue: tab, setValue: setTab } = useLocalStorage("tab", "Assignees");
+
+ const { data: issueLabels } = useSWR(
+ workspaceSlug && projectId ? PROJECT_ISSUE_LABELS(projectId as string) : null,
+ workspaceSlug && projectId
+ ? () => issuesServices.getIssueLabels(workspaceSlug as string, projectId as string)
+ : null
+ );
+
+ const { data: members } = useSWR(
+ workspaceSlug && projectId ? PROJECT_MEMBERS(workspaceSlug as string) : null,
+ workspaceSlug && projectId
+ ? () => projectService.projectMembers(workspaceSlug as string, projectId as string)
+ : null
+ );
+
+ const currentValue = (tab: string | null) => {
+ switch (tab) {
+ case "Assignees":
+ return 0;
+ case "Labels":
+ return 1;
+ case "States":
+ return 2;
+ default:
+ return 0;
+ }
+ };
+ return (
+ {
+ switch (i) {
+ case 0:
+ return setTab("Assignees");
+ case 1:
+ return setTab("Labels");
+ case 2:
+ return setTab("States");
+
+ default:
+ return setTab("Assignees");
+ }
+ }}
+ >
+
+
+ `w-1/2 rounded py-1 ${selected ? "bg-gray-300" : "hover:bg-gray-200"}`
+ }
+ >
+ Assignees
+
+
+ `w-1/2 rounded py-1 ${selected ? "bg-gray-300 font-semibold" : "hover:bg-gray-200 "}`
+ }
+ >
+ Labels
+
+
+ `w-1/2 rounded py-1 ${selected ? "bg-gray-300 font-semibold" : "hover:bg-gray-200 "}`
+ }
+ >
+ States
+
+
+
+
+ {members?.map((member, index) => {
+ const totalArray = issues?.filter((i) => i.assignees?.includes(member.member.id));
+ const completeArray = totalArray?.filter((i) => i.state_detail.group === "completed");
+ if (totalArray.length > 0) {
+ return (
+
+
+ {member.member.first_name}
+ >
+ }
+ completed={completeArray.length}
+ total={totalArray.length}
+ />
+ );
+ }
+ })}
+ {issues?.filter((i) => i.assignees?.length === 0).length > 0 ? (
+
+
+
+
+ No assignee
+ >
+ }
+ completed={
+ issues?.filter(
+ (i) => i.state_detail.group === "completed" && i.assignees?.length === 0
+ ).length
+ }
+ total={issues?.filter((i) => i.assignees?.length === 0).length}
+ />
+ ) : (
+ ""
+ )}
+
+
+ {issueLabels?.map((issue, index) => {
+ const totalArray = issues?.filter((i) => i.labels?.includes(issue.id));
+ const completeArray = totalArray?.filter((i) => i.state_detail.group === "completed");
+ if (totalArray.length > 0) {
+ return (
+
+
+ {issue.name}
+ >
+ }
+ completed={completeArray.length}
+ total={totalArray.length}
+ />
+ );
+ }
+ })}
+
+
+ {Object.keys(groupedIssues).map((group, index) => (
+
+
+ {group}
+ >
+ }
+ completed={groupedIssues[group].length}
+ total={issues.length}
+ />
+ ))}
+
+
+
+ );
+};
diff --git a/apps/app/components/core/sidebar/single-progress-stats.tsx b/apps/app/components/core/sidebar/single-progress-stats.tsx
new file mode 100644
index 00000000000..bb56e154554
--- /dev/null
+++ b/apps/app/components/core/sidebar/single-progress-stats.tsx
@@ -0,0 +1,29 @@
+import React from "react";
+
+import { ProgressBar } from "components/ui";
+
+type TSingleProgressStatsProps = {
+ title: any;
+ completed: number;
+ total: number;
+};
+
+export const SingleProgressStats: React.FC = ({
+ title,
+ completed,
+ total,
+}) => (
+
+
{title}
+
+
+
+
+
+
{Math.floor((completed / total) * 100)}%
+
+
of
+
{total}
+
+
+);
diff --git a/apps/app/components/project/cycles/stats-view/index.tsx b/apps/app/components/cycles/cycles-list-view.tsx
similarity index 85%
rename from apps/app/components/project/cycles/stats-view/index.tsx
rename to apps/app/components/cycles/cycles-list-view.tsx
index 58f31646442..8491190e8d6 100644
--- a/apps/app/components/project/cycles/stats-view/index.tsx
+++ b/apps/app/components/cycles/cycles-list-view.tsx
@@ -1,8 +1,7 @@
// react
import { useState } from "react";
// components
-import SingleStat from "components/project/cycles/stats-view/single-stat";
-import ConfirmCycleDeletion from "components/project/cycles/confirm-cycle-deletion";
+import { DeleteCycleModal, SingleCycleCard } from "components/cycles";
// types
import { ICycle, SelectCycleType } from "types";
import { CompletedCycleIcon, CurrentCycleIcon, UpcomingCycleIcon } from "components/icons";
@@ -14,7 +13,7 @@ type TCycleStatsViewProps = {
type: "current" | "upcoming" | "completed";
};
-const CycleStatsView: React.FC = ({
+export const CyclesListView: React.FC = ({
cycles,
setCreateUpdateCycleModal,
setSelectedCycle,
@@ -35,7 +34,7 @@ const CycleStatsView: React.FC = ({
return (
<>
- = ({
/>
{cycles.length > 0 ? (
cycles.map((cycle) => (
- handleDeleteCycle(cycle)}
@@ -64,12 +63,10 @@ const CycleStatsView: React.FC = ({
)}
No {type} {type === "current" ? "cycle" : "cycles"} yet. Create with{" "}
- Q
.
+ Q
.
)}
>
);
};
-
-export default CycleStatsView;
diff --git a/apps/app/components/project/cycles/confirm-cycle-deletion.tsx b/apps/app/components/cycles/delete-cycle-modal.tsx
similarity index 96%
rename from apps/app/components/project/cycles/confirm-cycle-deletion.tsx
rename to apps/app/components/cycles/delete-cycle-modal.tsx
index ab35d343bf6..a962533a4b3 100644
--- a/apps/app/components/project/cycles/confirm-cycle-deletion.tsx
+++ b/apps/app/components/cycles/delete-cycle-modal.tsx
@@ -23,7 +23,7 @@ type TConfirmCycleDeletionProps = {
// fetch-keys
import { CYCLE_LIST } from "constants/fetch-keys";
-const ConfirmCycleDeletion: React.FC = ({
+export const DeleteCycleModal: React.FC = ({
isOpen,
setIsOpen,
data,
@@ -36,10 +36,6 @@ const ConfirmCycleDeletion: React.FC = ({
const { setToastAlert } = useToast();
- useEffect(() => {
- data && setIsOpen(true);
- }, [data, setIsOpen]);
-
const handleClose = () => {
setIsOpen(false);
setIsDeleteLoading(false);
@@ -153,5 +149,3 @@ const ConfirmCycleDeletion: React.FC = ({
);
};
-
-export default ConfirmCycleDeletion;
diff --git a/apps/app/components/cycles/form.tsx b/apps/app/components/cycles/form.tsx
index 262db1abcfe..58f57ba1400 100644
--- a/apps/app/components/cycles/form.tsx
+++ b/apps/app/components/cycles/form.tsx
@@ -1,39 +1,59 @@
-import { FC } from "react";
+import { useEffect } from "react";
+
+// react-hook-form
import { Controller, useForm } from "react-hook-form";
-// components
-import { Button, Input, TextArea, CustomSelect } from "components/ui";
+// ui
+import { Button, CustomDatePicker, CustomSelect, Input, TextArea } from "components/ui";
// types
-import type { ICycle } from "types";
+import { ICycle } from "types";
+
+type Props = {
+ handleFormSubmit: (values: Partial) => Promise;
+ handleClose: () => void;
+ status: boolean;
+ data?: ICycle;
+};
const defaultValues: Partial = {
name: "",
description: "",
status: "draft",
- start_date: new Date().toString(),
- end_date: new Date().toString(),
+ start_date: "",
+ end_date: "",
};
-export interface CycleFormProps {
- handleFormSubmit: (values: Partial) => void;
- handleFormCancel?: () => void;
- initialData?: Partial;
-}
-
-export const CycleForm: FC = (props) => {
- const { handleFormSubmit, handleFormCancel = () => {}, initialData = null } = props;
- // form handler
+export const CycleForm: React.FC = ({ handleFormSubmit, handleClose, status, data }) => {
const {
register,
formState: { errors, isSubmitting },
handleSubmit,
control,
+ reset,
} = useForm({
- defaultValues: initialData || defaultValues,
+ defaultValues,
});
+ const handleCreateUpdateCycle = async (formData: Partial) => {
+ await handleFormSubmit(formData);
+
+ reset({
+ ...defaultValues,
+ });
+ };
+
+ useEffect(() => {
+ reset({
+ ...defaultValues,
+ ...data,
+ });
+ }, [data, reset]);
+
return (
-