diff --git a/.eslintrc.js b/.eslintrc.js
index be1ad0f9da5..463c86901c0 100644
--- a/.eslintrc.js
+++ b/.eslintrc.js
@@ -1,10 +1,10 @@
module.exports = {
root: true,
- // This tells ESLint to load the config from the package `config`
- // extends: ["custom"],
+ // This tells ESLint to load the config from the package `eslint-config-custom`
+ extends: ["custom"],
settings: {
next: {
- rootDir: ["apps/*/"],
+ rootDir: ["apps/*"],
},
},
};
diff --git a/.gitignore b/.gitignore
index 4933d309e40..3562ab0b34c 100644
--- a/.gitignore
+++ b/.gitignore
@@ -64,4 +64,9 @@ package-lock.json
.vscode
# Sentry
-.sentryclirc
\ No newline at end of file
+.sentryclirc
+
+# lock files
+package-lock.json
+pnpm-lock.yaml
+pnpm-workspace.yaml
\ No newline at end of file
diff --git a/Dockerfile b/Dockerfile
new file mode 100644
index 00000000000..094d628e3bb
--- /dev/null
+++ b/Dockerfile
@@ -0,0 +1,116 @@
+FROM node:18-alpine AS builder
+RUN apk add --no-cache libc6-compat
+RUN apk update
+# Set working directory
+WORKDIR /app
+
+RUN yarn global add turbo
+COPY . .
+
+RUN turbo prune --scope=app --docker
+
+# Add lockfile and package.json's of isolated subworkspace
+FROM node:18-alpine AS installer
+
+
+RUN apk add --no-cache libc6-compat
+RUN apk update
+WORKDIR /app
+
+# First install the dependencies (as they change less often)
+COPY .gitignore .gitignore
+COPY --from=builder /app/out/json/ .
+COPY --from=builder /app/out/yarn.lock ./yarn.lock
+RUN yarn install
+
+# Build the project
+COPY --from=builder /app/out/full/ .
+COPY turbo.json turbo.json
+
+RUN yarn turbo run build --filter=app
+
+
+FROM python:3.11.1-alpine3.17 AS backend
+
+# set environment variables
+ENV PYTHONDONTWRITEBYTECODE 1
+ENV PYTHONUNBUFFERED 1
+ENV PIP_DISABLE_PIP_VERSION_CHECK=1
+
+WORKDIR /code
+
+RUN apk --update --no-cache add \
+ "libpq~=15" \
+ "libxslt~=1.1" \
+ "nodejs-current~=19" \
+ "xmlsec~=1.2" \
+ "nginx" \
+ "nodejs" \
+ "npm" \
+ "supervisor"
+
+COPY apiserver/requirements.txt ./
+COPY apiserver/requirements ./requirements
+RUN apk add libffi-dev
+RUN apk --update --no-cache --virtual .build-deps add \
+ "bash~=5.2" \
+ "g++~=12.2" \
+ "gcc~=12.2" \
+ "cargo~=1.64" \
+ "git~=2" \
+ "make~=4.3" \
+ "postgresql13-dev~=13" \
+ "libc-dev" \
+ "linux-headers" \
+ && \
+ pip install -r requirements.txt --compile --no-cache-dir \
+ && \
+ apk del .build-deps
+
+# Add in Django deps and generate Django's static files
+COPY apiserver/manage.py manage.py
+COPY apiserver/plane plane/
+COPY apiserver/templates templates/
+
+COPY apiserver/gunicorn.config.py ./
+RUN apk --update --no-cache add "bash~=5.2"
+COPY apiserver/bin ./bin/
+
+RUN chmod +x ./bin/takeoff ./bin/worker
+RUN chmod -R 777 /code
+
+# Expose container port and run entry point script
+EXPOSE 8000
+EXPOSE 3000
+EXPOSE 80
+
+
+
+WORKDIR /app
+
+# Don't run production as root
+RUN addgroup --system --gid 1001 plane
+RUN adduser --system --uid 1001 captain
+
+COPY --from=installer /app/apps/app/next.config.js .
+COPY --from=installer /app/apps/app/package.json .
+
+COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./
+
+COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static
+
+ENV NEXT_TELEMETRY_DISABLED 1
+
+# RUN rm /etc/nginx/conf.d/default.conf
+#######################################################################
+COPY nginx/nginx-single-docker-image.conf /etc/nginx/http.d/default.conf
+#######################################################################
+
+COPY nginx/supervisor.conf /code/supervisor.conf
+
+
+CMD ["supervisord","-c","/code/supervisor.conf"]
+
+
+
+
diff --git a/apiserver/.env.example b/apiserver/.env.example
index 0595770fa7a..9a6904b55b9 100644
--- a/apiserver/.env.example
+++ b/apiserver/.env.example
@@ -1,18 +1,22 @@
-# Backend
SECRET_KEY="<-- django secret -->"
+DJANGO_SETTINGS_MODULE="plane.settings.production"
+# Database
+DATABASE_URL=postgres://plane:plane@plane-db-1:5432/plane
+# Cache
+REDIS_URL=redis://redis:6379/
+# SMPT
EMAIL_HOST="<-- email smtp -->"
EMAIL_HOST_USER="<-- email host user -->"
EMAIL_HOST_PASSWORD="<-- email host password -->"
-
+# AWS
AWS_REGION="<-- aws region -->"
AWS_ACCESS_KEY_ID="<-- aws access key -->"
AWS_SECRET_ACCESS_KEY="<-- aws secret acess key -->"
AWS_S3_BUCKET_NAME="<-- aws s3 bucket name -->"
-
-SENTRY_DSN="<-- sentry dsn -->"
-WEB_URL="<-- frontend web url -->"
-
+# FE
+WEB_URL="localhost/"
+# OAUTH
GITHUB_CLIENT_SECRET="<-- github secret -->"
-
+# Flags
DISABLE_COLLECTSTATIC=1
-DOCKERIZED=0 //True if running docker compose else 0
+DOCKERIZED=1
diff --git a/apiserver/Dockerfile.api b/apiserver/Dockerfile.api
index 6343c740e21..123544571e4 100644
--- a/apiserver/Dockerfile.api
+++ b/apiserver/Dockerfile.api
@@ -1,4 +1,4 @@
-FROM python:3.8.14-alpine3.16 AS backend
+FROM python:3.11.1-alpine3.17 AS backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
@@ -8,19 +8,19 @@ ENV PIP_DISABLE_PIP_VERSION_CHECK=1
WORKDIR /code
RUN apk --update --no-cache add \
- "libpq~=14" \
+ "libpq~=15" \
"libxslt~=1.1" \
- "nodejs-current~=18" \
+ "nodejs-current~=19" \
"xmlsec~=1.2"
COPY requirements.txt ./
COPY requirements ./requirements
RUN apk add libffi-dev
RUN apk --update --no-cache --virtual .build-deps add \
- "bash~=5.1" \
- "g++~=11.2" \
- "gcc~=11.2" \
- "cargo~=1.60" \
+ "bash~=5.2" \
+ "g++~=12.2" \
+ "gcc~=12.2" \
+ "cargo~=1.64" \
"git~=2" \
"make~=4.3" \
"postgresql13-dev~=13" \
@@ -46,15 +46,16 @@ COPY templates templates/
COPY gunicorn.config.py ./
USER root
-RUN apk --update --no-cache add "bash~=5.1"
+RUN apk --update --no-cache add "bash~=5.2"
COPY ./bin ./bin/
RUN chmod +x ./bin/takeoff ./bin/worker
+RUN chmod -R 777 /code
USER captain
# Expose container port and run entry point script
EXPOSE 8000
-CMD [ "./bin/takeoff" ]
+# CMD [ "./bin/takeoff" ]
diff --git a/apiserver/bin/takeoff b/apiserver/bin/takeoff
index 8340f16c704..d22eceb6ee9 100755
--- a/apiserver/bin/takeoff
+++ b/apiserver/bin/takeoff
@@ -2,4 +2,8 @@
set -e
python manage.py wait_for_db
python manage.py migrate
+
+# Create a Default User
+python bin/user_script.py
+
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --config gunicorn.config.py --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py
new file mode 100644
index 00000000000..b554d2c405e
--- /dev/null
+++ b/apiserver/bin/user_script.py
@@ -0,0 +1,28 @@
+import os, sys
+import uuid
+
+sys.path.append("/code")
+
+os.environ.setdefault("DJANGO_SETTINGS_MODULE", "plane.settings.production")
+import django
+
+django.setup()
+
+from plane.db.models import User
+
+
+def populate():
+ default_email = os.environ.get("DEFAULT_EMAIL", "captain@plane.so")
+ default_password = os.environ.get("DEFAULT_PASSWORD", "password123")
+
+ if not User.objects.filter(email=default_email).exists():
+ user = User.objects.create(email=default_email, username=uuid.uuid4().hex)
+ user.set_password(default_password)
+ user.save()
+ print("User created")
+
+ print("Success")
+
+
+if __name__ == "__main__":
+ populate()
diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py
index 3add8f965f1..c501a3d94c0 100644
--- a/apiserver/plane/api/serializers/issue.py
+++ b/apiserver/plane/api/serializers/issue.py
@@ -24,9 +24,15 @@
Cycle,
Module,
ModuleIssue,
+ IssueLink,
)
+class IssueLinkCreateSerializer(serializers.Serializer):
+ url = serializers.CharField(required=True)
+ title = serializers.CharField(required=False)
+
+
class IssueFlatSerializer(BaseSerializer):
## Contain only flat fields
@@ -86,6 +92,11 @@ class IssueCreateSerializer(BaseSerializer):
write_only=True,
required=False,
)
+ links_list = serializers.ListField(
+ child=IssueLinkCreateSerializer(),
+ write_only=True,
+ required=False,
+ )
class Meta:
model = Issue
@@ -104,6 +115,7 @@ def create(self, validated_data):
assignees = validated_data.pop("assignees_list", None)
labels = validated_data.pop("labels_list", None)
blocks = validated_data.pop("blocks_list", None)
+ links = validated_data.pop("links_list", None)
project = self.context["project"]
issue = Issue.objects.create(**validated_data, project=project)
@@ -172,6 +184,24 @@ def create(self, validated_data):
batch_size=10,
)
+ if links is not None:
+ IssueLink.objects.bulk_create(
+ [
+ IssueLink(
+ issue=issue,
+ project=project,
+ workspace=project.workspace,
+ created_by=issue.created_by,
+ updated_by=issue.updated_by,
+ title=link.get("title", None),
+ url=link.get("url", None),
+ )
+ for link in links
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
return issue
def update(self, instance, validated_data):
@@ -179,6 +209,7 @@ def update(self, instance, validated_data):
assignees = validated_data.pop("assignees_list", None)
labels = validated_data.pop("labels_list", None)
blocks = validated_data.pop("blocks_list", None)
+ links = validated_data.pop("links_list", None)
if blockers is not None:
IssueBlocker.objects.filter(block=instance).delete()
@@ -248,6 +279,25 @@ def update(self, instance, validated_data):
batch_size=10,
)
+ if links is not None:
+ IssueLink.objects.filter(issue=instance).delete()
+ IssueLink.objects.bulk_create(
+ [
+ IssueLink(
+ issue=instance,
+ project=instance.project,
+ workspace=instance.project.workspace,
+ created_by=instance.created_by,
+ updated_by=instance.updated_by,
+ title=link.get("title", None),
+ url=link.get("url", None),
+ )
+ for link in links
+ ],
+ batch_size=10,
+ ignore_conflicts=True,
+ )
+
return super().update(instance, validated_data)
@@ -410,6 +460,12 @@ class Meta:
]
+class IssueLinkSerializer(BaseSerializer):
+ class Meta:
+ model = IssueLink
+ fields = "__all__"
+
+
class IssueSerializer(BaseSerializer):
project_detail = ProjectSerializer(read_only=True, source="project")
state_detail = StateSerializer(read_only=True, source="state")
@@ -422,6 +478,7 @@ class IssueSerializer(BaseSerializer):
blocker_issues = BlockerIssueSerializer(read_only=True, many=True)
issue_cycle = IssueCycleDetailSerializer(read_only=True)
issue_module = IssueModuleDetailSerializer(read_only=True)
+ issue_link = IssueLinkSerializer(read_only=True, many=True)
sub_issues_count = serializers.IntegerField(read_only=True)
class Meta:
diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py
index 98c2e87d286..4af139bf511 100644
--- a/apiserver/plane/api/urls.py
+++ b/apiserver/plane/api/urls.py
@@ -5,7 +5,6 @@
from plane.api.views import (
# Authentication
- SignUpEndpoint,
SignInEndpoint,
SignOutEndpoint,
MagicSignInEndpoint,
@@ -95,7 +94,6 @@
path("social-auth/", OauthEndpoint.as_view(), name="oauth"),
# Auth
path("sign-in/", SignInEndpoint.as_view(), name="sign-in"),
- path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"),
path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"),
# Magic Sign In/Up
path(
diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py
index 1212e0dca0d..4fb565e8dd7 100644
--- a/apiserver/plane/api/views/__init__.py
+++ b/apiserver/plane/api/views/__init__.py
@@ -64,7 +64,6 @@
from .authentication import (
- SignUpEndpoint,
SignInEndpoint,
SignOutEndpoint,
MagicSignInEndpoint,
diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py
index 487d10a2260..56dc091f489 100644
--- a/apiserver/plane/api/views/auth_extended.py
+++ b/apiserver/plane/api/views/auth_extended.py
@@ -84,7 +84,7 @@ def post(self, request):
)
return Response(
- {"messgae": "Check your email to reset your password"},
+ {"message": "Check your email to reset your password"},
status=status.HTTP_200_OK,
)
return Response(
diff --git a/apiserver/plane/api/views/authentication.py b/apiserver/plane/api/views/authentication.py
index ac218837dbc..58d75a04952 100644
--- a/apiserver/plane/api/views/authentication.py
+++ b/apiserver/plane/api/views/authentication.py
@@ -35,7 +35,7 @@ def get_tokens_for_user(user):
)
-class SignUpEndpoint(BaseAPIView):
+class SignInEndpoint(BaseAPIView):
permission_classes = (AllowAny,)
def post(self, request):
@@ -62,114 +62,67 @@ def post(self, request):
user = User.objects.filter(email=email).first()
- if user is not None:
- return Response(
- {"error": "Email ID is already taken"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.create(email=email)
- user.set_password(password)
-
- # settings last actives for the user
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
-
- serialized_user = UserSerializer(user).data
-
- access_token, refresh_token = get_tokens_for_user(user)
-
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- }
-
- return Response(data, status=status.HTTP_200_OK)
-
- except Exception as e:
- capture_exception(e)
- return Response(
- {
- "error": "Something went wrong. Please try again later or contact the support team."
- },
- status=status.HTTP_400_BAD_REQUEST,
- )
+ # Sign up Process
+ if user is None:
+ user = User.objects.create(email=email, username=uuid.uuid4().hex)
+ user.set_password(password)
+ # settings last actives for the user
+ user.last_active = timezone.now()
+ user.last_login_time = timezone.now()
+ user.last_login_ip = request.META.get("REMOTE_ADDR")
+ user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
+ user.token_updated_at = timezone.now()
+ user.save()
-class SignInEndpoint(BaseAPIView):
- permission_classes = (AllowAny,)
+ serialized_user = UserSerializer(user).data
- def post(self, request):
- try:
- email = request.data.get("email", False)
- password = request.data.get("password", False)
+ access_token, refresh_token = get_tokens_for_user(user)
- ## Raise exception if any of the above are missing
- if not email or not password:
- return Response(
- {"error": "Both email and password are required"},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- email = email.strip().lower()
-
- try:
- validate_email(email)
- except ValidationError as e:
- return Response(
- {"error": "Please provide a valid email address."},
- status=status.HTTP_400_BAD_REQUEST,
- )
-
- user = User.objects.get(email=email)
+ data = {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ "user": serialized_user,
+ }
- if not user.check_password(password):
- return Response(
- {
- "error": "Sorry, we could not find a user with the provided credentials. Please try again."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
- if not user.is_active:
- return Response(
- {
- "error": "Your account has been deactivated. Please contact your site administrator."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
+ return Response(data, status=status.HTTP_200_OK)
+ # Sign in Process
+ else:
+ if not user.check_password(password):
+ return Response(
+ {
+ "error": "Sorry, we could not find a user with the provided credentials. Please try again."
+ },
+ status=status.HTTP_403_FORBIDDEN,
+ )
+ if not user.is_active:
+ return Response(
+ {
+ "error": "Your account has been deactivated. Please contact your site administrator."
+ },
+ status=status.HTTP_403_FORBIDDEN,
+ )
- serialized_user = UserSerializer(user).data
+ serialized_user = UserSerializer(user).data
- # settings last active for the user
- user.last_active = timezone.now()
- user.last_login_time = timezone.now()
- user.last_login_ip = request.META.get("REMOTE_ADDR")
- user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
- user.token_updated_at = timezone.now()
- user.save()
+ # settings last active for the user
+ user.last_active = timezone.now()
+ user.last_login_time = timezone.now()
+ user.last_login_ip = request.META.get("REMOTE_ADDR")
+ user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
+ user.token_updated_at = timezone.now()
+ user.save()
- access_token, refresh_token = get_tokens_for_user(user)
+ access_token, refresh_token = get_tokens_for_user(user)
- data = {
- "access_token": access_token,
- "refresh_token": refresh_token,
- "user": serialized_user,
- }
+ data = {
+ "access_token": access_token,
+ "refresh_token": refresh_token,
+ "user": serialized_user,
+ }
- return Response(data, status=status.HTTP_200_OK)
+ return Response(data, status=status.HTTP_200_OK)
- except User.DoesNotExist:
- return Response(
- {
- "error": "Sorry, we could not find a user with the provided credentials. Please try again."
- },
- status=status.HTTP_403_FORBIDDEN,
- )
except Exception as e:
capture_exception(e)
return Response(
diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py
index 37082e0ec8a..4f7e7473b34 100644
--- a/apiserver/plane/api/views/issue.py
+++ b/apiserver/plane/api/views/issue.py
@@ -39,6 +39,7 @@
IssueBlocker,
CycleIssue,
ModuleIssue,
+ IssueLink,
)
from plane.bgtasks.issue_activites_task import issue_activity
@@ -75,7 +76,6 @@ def perform_update(self, serializer):
self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first()
)
if current_instance is not None:
-
issue_activity.delay(
{
"type": "issue.activity",
@@ -92,7 +92,6 @@ def perform_update(self, serializer):
return super().perform_update(serializer)
def get_queryset(self):
-
return (
super()
.get_queryset()
@@ -136,6 +135,12 @@ def get_queryset(self):
).prefetch_related("module__members"),
),
)
+ .prefetch_related(
+ Prefetch(
+ "issue_link",
+ queryset=IssueLink.objects.select_related("issue"),
+ )
+ )
)
def grouper(self, issue, group_by):
@@ -265,6 +270,12 @@ def get(self, request, slug):
queryset=ModuleIssue.objects.select_related("module", "issue"),
),
)
+ .prefetch_related(
+ Prefetch(
+ "issue_link",
+ queryset=IssueLink.objects.select_related("issue"),
+ )
+ )
)
serializer = IssueSerializer(issues, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
@@ -277,7 +288,6 @@ def get(self, request, slug):
class WorkSpaceIssuesEndpoint(BaseAPIView):
-
permission_classes = [
WorkSpaceAdminPermission,
]
@@ -298,7 +308,6 @@ def get(self, request, slug):
class IssueActivityEndpoint(BaseAPIView):
-
permission_classes = [
ProjectEntityPermission,
]
@@ -333,7 +342,6 @@ def get(self, request, slug, project_id, issue_id):
class IssueCommentViewSet(BaseViewSet):
-
serializer_class = IssueCommentSerializer
model = IssueComment
permission_classes = [
@@ -436,7 +444,6 @@ def list(self, request, slug, project_id):
def create(self, request, slug, project_id):
try:
-
issue_property, created = IssueProperty.objects.get_or_create(
user=request.user,
project_id=project_id,
@@ -463,7 +470,6 @@ def create(self, request, slug, project_id):
class LabelViewSet(BaseViewSet):
-
serializer_class = LabelSerializer
model = Label
permission_classes = [
@@ -490,14 +496,12 @@ def get_queryset(self):
class BulkDeleteIssuesEndpoint(BaseAPIView):
-
permission_classes = [
ProjectEntityPermission,
]
def delete(self, request, slug, project_id):
try:
-
issue_ids = request.data.get("issue_ids", [])
if not len(issue_ids):
@@ -527,14 +531,12 @@ def delete(self, request, slug, project_id):
class SubIssuesEndpoint(BaseAPIView):
-
permission_classes = [
ProjectEntityPermission,
]
def get(self, request, slug, project_id, issue_id):
try:
-
sub_issues = (
Issue.objects.filter(
parent_id=issue_id, workspace__slug=slug, project_id=project_id
diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py
index 2ec6faf1e53..e24477ecd30 100644
--- a/apiserver/plane/api/views/project.py
+++ b/apiserver/plane/api/views/project.py
@@ -75,7 +75,6 @@ def get_queryset(self):
def create(self, request, slug):
try:
-
workspace = Workspace.objects.get(slug=slug)
serializer = ProjectSerializer(
@@ -96,6 +95,7 @@ def create(self, request, slug):
"color": "#5e6ad2",
"sequence": 15000,
"group": "backlog",
+ "default": True,
},
{
"name": "Todo",
@@ -132,6 +132,7 @@ def create(self, request, slug):
sequence=state["sequence"],
workspace=serializer.instance.workspace,
group=state["group"],
+ default=state.get("default", False),
)
for state in states
]
@@ -188,7 +189,7 @@ def partial_update(self, request, slug, pk=None):
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
- except (Project.DoesNotExist or Workspace.DoesNotExist) as e:
+ except Project.DoesNotExist or Workspace.DoesNotExist as e:
return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
)
@@ -206,14 +207,12 @@ def partial_update(self, request, slug, pk=None):
class InviteProjectEndpoint(BaseAPIView):
-
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
try:
-
email = request.data.get("email", False)
role = request.data.get("role", False)
@@ -287,7 +286,6 @@ def post(self, request, slug, project_id):
class UserProjectInvitationsViewset(BaseViewSet):
-
serializer_class = ProjectMemberInviteSerializer
model = ProjectMemberInvite
@@ -301,7 +299,6 @@ def get_queryset(self):
def create(self, request):
try:
-
invitations = request.data.get("invitations")
project_invitations = ProjectMemberInvite.objects.filter(
pk__in=invitations, accepted=True
@@ -331,7 +328,6 @@ def create(self, request):
class ProjectMemberViewSet(BaseViewSet):
-
serializer_class = ProjectMemberSerializer
model = ProjectMember
permission_classes = [
@@ -356,14 +352,12 @@ def get_queryset(self):
class AddMemberToProjectEndpoint(BaseAPIView):
-
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
try:
-
member_id = request.data.get("member_id", False)
role = request.data.get("role", False)
@@ -412,13 +406,11 @@ def post(self, request, slug, project_id):
class AddTeamToProjectEndpoint(BaseAPIView):
-
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
-
try:
team_members = TeamMember.objects.filter(
workspace__slug=slug, team__in=request.data.get("teams", [])
@@ -467,7 +459,6 @@ def post(self, request, slug, project_id):
class ProjectMemberInvitationsViewset(BaseViewSet):
-
serializer_class = ProjectMemberInviteSerializer
model = ProjectMemberInvite
@@ -489,7 +480,6 @@ def get_queryset(self):
class ProjectMemberInviteDetailViewSet(BaseViewSet):
-
serializer_class = ProjectMemberInviteSerializer
model = ProjectMemberInvite
@@ -509,14 +499,12 @@ def get_queryset(self):
class ProjectIdentifierEndpoint(BaseAPIView):
-
permission_classes = [
ProjectBasePermission,
]
def get(self, request, slug):
try:
-
name = request.GET.get("name", "").strip().upper()
if name == "":
@@ -541,7 +529,6 @@ def get(self, request, slug):
def delete(self, request, slug):
try:
-
name = request.data.get("name", "").strip().upper()
if name == "":
@@ -616,7 +603,6 @@ def post(self, request, slug):
class ProjectUserViewsEndpoint(BaseAPIView):
def post(self, request, slug, project_id):
try:
-
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project_member = ProjectMember.objects.filter(
@@ -655,7 +641,6 @@ def post(self, request, slug, project_id):
class ProjectMemberUserEndpoint(BaseAPIView):
def get(self, request, slug, project_id):
try:
-
project_member = ProjectMember.objects.get(
project_id=project_id, workspace__slug=slug, member=request.user
)
diff --git a/apiserver/plane/api/views/state.py b/apiserver/plane/api/views/state.py
index 8054b15dd92..4616fcee7f0 100644
--- a/apiserver/plane/api/views/state.py
+++ b/apiserver/plane/api/views/state.py
@@ -1,3 +1,12 @@
+# Python imports
+from itertools import groupby
+
+# Third party imports
+from rest_framework.response import Response
+from rest_framework import status
+from sentry_sdk import capture_exception
+
+
# Module imports
from . import BaseViewSet
from plane.api.serializers import StateSerializer
@@ -6,7 +15,6 @@
class StateViewSet(BaseViewSet):
-
serializer_class = StateSerializer
model = State
permission_classes = [
@@ -27,3 +35,38 @@ def get_queryset(self):
.select_related("workspace")
.distinct()
)
+
+ def list(self, request, slug, project_id):
+ try:
+ state_dict = dict()
+ states = StateSerializer(self.get_queryset(), many=True).data
+
+ for key, value in groupby(
+ sorted(states, key=lambda state: state["group"]),
+ lambda state: state.get("group"),
+ ):
+ state_dict[str(key)] = list(value)
+
+ return Response(state_dict, status=status.HTTP_200_OK)
+ except Exception as e:
+ capture_exception(e)
+ return Response(
+ {"error": "Something went wrong please try again later"},
+ status=status.HTTP_400_BAD_REQUEST,
+ )
+
+ def destroy(self, request, slug, project_id, pk):
+ try:
+ state = State.objects.get(
+ pk=pk, project_id=project_id, workspace__slug=slug
+ )
+
+ if state.default:
+ return Response(
+ {"error": "Default state cannot be deleted"}, status=False
+ )
+
+ state.delete()
+ return Response(status=status.HTTP_204_NO_CONTENT)
+ except State.DoesNotExist:
+ return Response({"error": "State does not exists"}, status=status.HTTP_404)
diff --git a/apiserver/plane/db/migrations/0020_auto_20230214_0118.py b/apiserver/plane/db/migrations/0020_auto_20230214_0118.py
new file mode 100644
index 00000000000..19276407821
--- /dev/null
+++ b/apiserver/plane/db/migrations/0020_auto_20230214_0118.py
@@ -0,0 +1,69 @@
+# Generated by Django 3.2.16 on 2023-02-13 19:48
+
+from django.db import migrations, models
+import django.db.models.deletion
+
+
+class Migration(migrations.Migration):
+
+ dependencies = [
+ ('db', '0019_auto_20230131_0049'),
+ ]
+
+ operations = [
+ migrations.RenameField(
+ model_name='label',
+ old_name='colour',
+ new_name='color',
+ ),
+ migrations.AddField(
+ model_name='apitoken',
+ name='workspace',
+ field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='api_tokens', to='db.workspace'),
+ ),
+ migrations.AddField(
+ model_name='issue',
+ name='completed_at',
+ field=models.DateTimeField(null=True),
+ ),
+ migrations.AddField(
+ model_name='issue',
+ name='sort_order',
+ field=models.FloatField(default=65535),
+ ),
+ migrations.AddField(
+ model_name='project',
+ name='cycle_view',
+ field=models.BooleanField(default=True),
+ ),
+ migrations.AddField(
+ model_name='project',
+ name='module_view',
+ field=models.BooleanField(default=True),
+ ),
+ migrations.AddField(
+ model_name='state',
+ name='default',
+ field=models.BooleanField(default=False),
+ ),
+ migrations.AlterField(
+ model_name='issue',
+ name='description',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ migrations.AlterField(
+ model_name='issue',
+ name='description_html',
+ field=models.TextField(blank=True, default='
'),
+ ),
+ migrations.AlterField(
+ model_name='issuecomment',
+ name='comment_html',
+ field=models.TextField(blank=True, default=''),
+ ),
+ migrations.AlterField(
+ model_name='issuecomment',
+ name='comment_json',
+ field=models.JSONField(blank=True, default=dict),
+ ),
+ ]
diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py
index ef7ad5b8d49..d12578fa1bd 100644
--- a/apiserver/plane/db/models/__init__.py
+++ b/apiserver/plane/db/models/__init__.py
@@ -23,6 +23,7 @@
IssueAssignee,
Label,
IssueBlocker,
+ IssueLink,
)
from .asset import FileAsset
diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py
index 3331b08329e..d212f756556 100644
--- a/apiserver/plane/db/models/issue.py
+++ b/apiserver/plane/db/models/issue.py
@@ -4,6 +4,7 @@
from django.conf import settings
from django.db.models.signals import post_save
from django.dispatch import receiver
+from django.utils import timezone
# Module imports
from . import ProjectBaseModel
@@ -58,6 +59,7 @@ class Issue(ProjectBaseModel):
"db.Label", blank=True, related_name="labels", through="IssueLabel"
)
sort_order = models.FloatField(default=65535)
+ completed_at = models.DateTimeField(null=True)
class Meta:
verbose_name = "Issue"
@@ -81,12 +83,32 @@ def save(self, *args, **kwargs):
try:
from plane.db.models import State
- self.state, created = State.objects.get_or_create(
- project=self.project, name="Backlog"
- )
+ default_state = State.objects.filter(
+ project=self.project, default=True
+ ).first()
+ # if there is no default state assign any random state
+ if default_state is None:
+ self.state = State.objects.filter(project=self.project).first()
+ else:
+ self.state = default_state
except ImportError:
pass
+ else:
+ try:
+ from plane.db.models import State
+ # Get the completed states of the project
+ completed_states = State.objects.filter(
+ group="completed", project=self.project
+ ).values_list("pk", flat=True)
+ # Check if the current issue state and completed state id are same
+ if self.state.id in completed_states:
+ self.completed_at = timezone.now()
+ else:
+ self.completed_at = None
+
+ except ImportError:
+ pass
# Strip the html tags using html parser
self.description_stripped = (
None
@@ -139,6 +161,23 @@ def __str__(self):
return f"{self.issue.name} {self.assignee.email}"
+class IssueLink(ProjectBaseModel):
+ title = models.CharField(max_length=255, null=True)
+ url = models.URLField()
+ issue = models.ForeignKey(
+ "db.Issue", on_delete=models.CASCADE, related_name="issue_link"
+ )
+
+ class Meta:
+ verbose_name = "Issue Link"
+ verbose_name_plural = "Issue Links"
+ db_table = "issue_links"
+ ordering = ("-created_at",)
+
+ def __str__(self):
+ return f"{self.issue.name} {self.url}"
+
+
class IssueActivity(ProjectBaseModel):
issue = models.ForeignKey(
Issue, on_delete=models.CASCADE, related_name="issue_activity"
diff --git a/apiserver/plane/db/models/state.py b/apiserver/plane/db/models/state.py
index 2c62879181b..d66ecfa72a0 100644
--- a/apiserver/plane/db/models/state.py
+++ b/apiserver/plane/db/models/state.py
@@ -23,6 +23,7 @@ class State(ProjectBaseModel):
default="backlog",
max_length=20,
)
+ default = models.BooleanField(default=False)
def __str__(self):
"""Return name of the state"""
diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py
index e14c250b498..9d270662eb2 100644
--- a/apiserver/plane/settings/common.py
+++ b/apiserver/plane/settings/common.py
@@ -1,12 +1,13 @@
import os
import datetime
from datetime import timedelta
+from django.core.management.utils import get_random_secret_key
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
-SECRET_KEY = os.environ.get("SECRET_KEY")
+SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key())
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
diff --git a/apiserver/plane/settings/local.py b/apiserver/plane/settings/local.py
index 4d4af9b77d8..3fa0fae5c74 100644
--- a/apiserver/plane/settings/local.py
+++ b/apiserver/plane/settings/local.py
@@ -2,6 +2,7 @@
from __future__ import absolute_import
+import dj_database_url
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration
@@ -24,6 +25,10 @@
}
}
+DOCKERIZED = os.environ.get("DOCKERIZED", False)
+
+if DOCKERIZED:
+ DATABASES["default"] = dj_database_url.config()
CACHES = {
"default": {
@@ -41,15 +46,16 @@
CORS_ORIGIN_ALLOW_ALL = True
-sentry_sdk.init(
- dsn=os.environ.get("SENTRY_DSN"),
- integrations=[DjangoIntegration(), RedisIntegration()],
- # If you wish to associate users to errors (assuming you are using
- # django.contrib.auth) you may enable sending PII data.
- send_default_pii=True,
- environment="local",
- traces_sample_rate=0.7,
-)
+if os.environ.get("SENTRY_DSN", False):
+ sentry_sdk.init(
+ dsn=os.environ.get("SENTRY_DSN"),
+ integrations=[DjangoIntegration(), RedisIntegration()],
+ # If you wish to associate users to errors (assuming you are using
+ # django.contrib.auth) you may enable sending PII data.
+ send_default_pii=True,
+ environment="local",
+ traces_sample_rate=0.7,
+ )
REDIS_HOST = "localhost"
REDIS_PORT = 6379
@@ -64,5 +70,10 @@
},
}
-WEB_URL = "http://localhost:3000"
+MEDIA_URL = "/uploads/"
+MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
+
+if DOCKERIZED:
+ REDIS_URL = os.environ.get("REDIS_URL")
+WEB_URL = os.environ.get("WEB_URL", "localhost:3000")
diff --git a/apiserver/plane/settings/production.py b/apiserver/plane/settings/production.py
index c8390148464..0401a0f0e11 100644
--- a/apiserver/plane/settings/production.py
+++ b/apiserver/plane/settings/production.py
@@ -33,6 +33,10 @@
DATABASES["default"] = dj_database_url.config()
SITE_ID = 1
+DOCKERIZED = os.environ.get(
+ "DOCKERIZED", False
+) # Set the variable true if running in docker-compose environment
+
# Enable Connection Pooling (if desired)
# DATABASES['default']['ENGINE'] = 'django_postgrespool'
@@ -48,99 +52,110 @@
# Simplified static file serving.
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
+if os.environ.get("SENTRY_DSN", False):
+ sentry_sdk.init(
+ dsn=os.environ.get("SENTRY_DSN", ""),
+ integrations=[DjangoIntegration(), RedisIntegration()],
+ # If you wish to associate users to errors (assuming you are using
+ # django.contrib.auth) you may enable sending PII data.
+ traces_sample_rate=1,
+ send_default_pii=True,
+ environment="production",
+ )
-sentry_sdk.init(
- dsn=os.environ.get("SENTRY_DSN"),
- integrations=[DjangoIntegration(), RedisIntegration()],
- # If you wish to associate users to errors (assuming you are using
- # django.contrib.auth) you may enable sending PII data.
- traces_sample_rate=1,
- send_default_pii=True,
- environment="production",
-)
+if (
+ os.environ.get("AWS_REGION", False)
+ and os.environ.get("AWS_ACCESS_KEY_ID", False)
+ and os.environ.get("AWS_SECRET_ACCESS_KEY", False)
+ and os.environ.get("AWS_S3_BUCKET_NAME", False)
+):
+ # The AWS region to connect to.
+ AWS_REGION = os.environ.get("AWS_REGION", "")
-# The AWS region to connect to.
-AWS_REGION = os.environ.get("AWS_REGION")
+ # The AWS access key to use.
+ AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID", "")
-# The AWS access key to use.
-AWS_ACCESS_KEY_ID = os.environ.get("AWS_ACCESS_KEY_ID")
+ # The AWS secret access key to use.
+ AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY", "")
-# The AWS secret access key to use.
-AWS_SECRET_ACCESS_KEY = os.environ.get("AWS_SECRET_ACCESS_KEY")
+ # The optional AWS session token to use.
+ # AWS_SESSION_TOKEN = ""
-# The optional AWS session token to use.
-# AWS_SESSION_TOKEN = ""
+ # The name of the bucket to store files in.
+ AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME", "")
+ # How to construct S3 URLs ("auto", "path", "virtual").
+ AWS_S3_ADDRESSING_STYLE = "auto"
-# The name of the bucket to store files in.
-AWS_S3_BUCKET_NAME = os.environ.get("AWS_S3_BUCKET_NAME")
+ # The full URL to the S3 endpoint. Leave blank to use the default region URL.
+ AWS_S3_ENDPOINT_URL = ""
-# How to construct S3 URLs ("auto", "path", "virtual").
-AWS_S3_ADDRESSING_STYLE = "auto"
+ # A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator.
+ AWS_S3_KEY_PREFIX = ""
-# The full URL to the S3 endpoint. Leave blank to use the default region URL.
-AWS_S3_ENDPOINT_URL = ""
+ # Whether to enable authentication for stored files. If True, then generated URLs will include an authentication
+ # token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token,
+ # and their permissions will be set to "public-read".
+ AWS_S3_BUCKET_AUTH = False
-# A prefix to be applied to every stored file. This will be joined to every filename using the "/" separator.
-AWS_S3_KEY_PREFIX = ""
+ # How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH`
+ # is True. It also affects the "Cache-Control" header of the files.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours.
-# Whether to enable authentication for stored files. If True, then generated URLs will include an authentication
-# token valid for `AWS_S3_MAX_AGE_SECONDS`. If False, then generated URLs will not include an authentication token,
-# and their permissions will be set to "public-read".
-AWS_S3_BUCKET_AUTH = False
+ # A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting
+ # cannot be used with `AWS_S3_BUCKET_AUTH`.
+ AWS_S3_PUBLIC_URL = ""
-# How long generated URLs are valid for. This affects the expiry of authentication tokens if `AWS_S3_BUCKET_AUTH`
-# is True. It also affects the "Cache-Control" header of the files.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_MAX_AGE_SECONDS = 60 * 60 # 1 hours.
+ # If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you
+ # understand the consequences before enabling.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_REDUCED_REDUNDANCY = False
-# A URL prefix to be used for generated URLs. This is useful if your bucket is served through a CDN. This setting
-# cannot be used with `AWS_S3_BUCKET_AUTH`.
-AWS_S3_PUBLIC_URL = ""
+ # The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a
+ # single `name` argument.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_CONTENT_DISPOSITION = ""
-# If True, then files will be stored with reduced redundancy. Check the S3 documentation and make sure you
-# understand the consequences before enabling.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_REDUCED_REDUNDANCY = False
+ # The Content-Language header used when the file is downloaded. This can be a string, or a function taking a
+ # single `name` argument.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_CONTENT_LANGUAGE = ""
-# The Content-Disposition header used when the file is downloaded. This can be a string, or a function taking a
-# single `name` argument.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_CONTENT_DISPOSITION = ""
+ # A mapping of custom metadata for each file. Each value can be a string, or a function taking a
+ # single `name` argument.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_METADATA = {}
-# The Content-Language header used when the file is downloaded. This can be a string, or a function taking a
-# single `name` argument.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_CONTENT_LANGUAGE = ""
+ # If True, then files will be stored using AES256 server-side encryption.
+ # If this is a string value (e.g., "aws:kms"), that encryption type will be used.
+ # Otherwise, server-side encryption is not be enabled.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_ENCRYPT_KEY = False
-# A mapping of custom metadata for each file. Each value can be a string, or a function taking a
-# single `name` argument.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_METADATA = {}
+ # The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present.
+ # This is only relevant if AWS S3 KMS server-side encryption is enabled (above).
+ # AWS_S3_KMS_ENCRYPTION_KEY_ID = ""
-# If True, then files will be stored using AES256 server-side encryption.
-# If this is a string value (e.g., "aws:kms"), that encryption type will be used.
-# Otherwise, server-side encryption is not be enabled.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_ENCRYPT_KEY = False
+ # If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their
+ # compressed size is smaller than their uncompressed size.
+ # Important: Changing this setting will not affect existing files.
+ AWS_S3_GZIP = True
-# The AWS S3 KMS encryption key ID (the `SSEKMSKeyId` parameter) is set from this string if present.
-# This is only relevant if AWS S3 KMS server-side encryption is enabled (above).
-# AWS_S3_KMS_ENCRYPTION_KEY_ID = ""
+ # The signature version to use for S3 requests.
+ AWS_S3_SIGNATURE_VERSION = None
-# If True, then text files will be stored using gzip content encoding. Files will only be gzipped if their
-# compressed size is smaller than their uncompressed size.
-# Important: Changing this setting will not affect existing files.
-AWS_S3_GZIP = True
+ # If True, then files with the same name will overwrite each other. By default it's set to False to have
+ # extra characters appended.
+ AWS_S3_FILE_OVERWRITE = False
-# The signature version to use for S3 requests.
-AWS_S3_SIGNATURE_VERSION = None
+ # AWS Settings End
-# If True, then files with the same name will overwrite each other. By default it's set to False to have
-# extra characters appended.
-AWS_S3_FILE_OVERWRITE = False
-
-# AWS Settings End
+ DEFAULT_FILE_STORAGE = "django_s3_storage.storage.S3Storage"
+
+else:
+ MEDIA_URL = "/uploads/"
+ MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
# Enable Connection Pooling (if desired)
@@ -155,7 +170,6 @@
]
-DEFAULT_FILE_STORAGE = "django_s3_storage.storage.S3Storage"
# Simplified static file serving.
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
@@ -165,16 +179,27 @@
REDIS_URL = os.environ.get("REDIS_URL")
-CACHES = {
- "default": {
- "BACKEND": "django_redis.cache.RedisCache",
- "LOCATION": REDIS_URL,
- "OPTIONS": {
- "CLIENT_CLASS": "django_redis.client.DefaultClient",
- "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False},
- },
+if DOCKERIZED:
+ CACHES = {
+ "default": {
+ "BACKEND": "django_redis.cache.RedisCache",
+ "LOCATION": REDIS_URL,
+ "OPTIONS": {
+ "CLIENT_CLASS": "django_redis.client.DefaultClient",
+ },
+ }
+ }
+else:
+ CACHES = {
+ "default": {
+ "BACKEND": "django_redis.cache.RedisCache",
+ "LOCATION": REDIS_URL,
+ "OPTIONS": {
+ "CLIENT_CLASS": "django_redis.client.DefaultClient",
+ "CONNECTION_POOL_KWARGS": {"ssl_cert_reqs": False},
+ },
+ }
}
-}
RQ_QUEUES = {
"default": {
@@ -183,10 +208,4 @@
}
-url = urlparse(os.environ.get("REDIS_URL"))
-
-DOCKERIZED = os.environ.get(
- "DOCKERIZED", False
-) # Set the variable true if running in docker-compose environment
-
WEB_URL = os.environ.get("WEB_URL")
diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt
index e9ca677dbd2..ffe11a2347f 100644
--- a/apiserver/requirements/base.txt
+++ b/apiserver/requirements/base.txt
@@ -1,28 +1,29 @@
# base requirements
-Django==3.2.17
+Django==3.2.18
django-braces==1.15.0
-django-taggit==2.1.0
-psycopg2==2.9.3
-django-oauth-toolkit==2.0.0
-mistune==2.0.3
+django-taggit==3.1.0
+psycopg2==2.9.5
+django-oauth-toolkit==2.2.0
+mistune==2.0.4
djangorestframework==3.14.0
-redis==4.2.2
-django-nested-admin==3.4.0
-django-cors-headers==3.11.0
-whitenoise==6.0.0
-django-allauth==0.50.0
+redis==4.4.2
+django-nested-admin==4.0.2
+django-cors-headers==3.13.0
+whitenoise==6.3.0
+django-allauth==0.52.0
faker==13.4.0
-django-filter==21.1
-jsonmodels==2.5.0
-djangorestframework-simplejwt==5.1.0
-sentry-sdk==1.13.0
-django-s3-storage==0.13.6
+django-filter==22.1
+jsonmodels==2.6.0
+djangorestframework-simplejwt==5.2.2
+sentry-sdk==1.14.0
+django-s3-storage==0.13.11
django-crum==0.7.9
django-guardian==2.4.0
dj_rest_auth==2.2.5
-google-auth==2.9.1
-google-api-python-client==2.55.0
-django-rq==2.5.1
+google-auth==2.16.0
+google-api-python-client==2.75.0
+django-rq==2.6.0
django-redis==5.2.0
-uvicorn==0.20.0
\ No newline at end of file
+uvicorn==0.20.0
+channels==4.0.0
\ No newline at end of file
diff --git a/apiserver/requirements/local.txt b/apiserver/requirements/local.txt
index 238fe63f20d..efd74a071bd 100644
--- a/apiserver/requirements/local.txt
+++ b/apiserver/requirements/local.txt
@@ -1,3 +1,3 @@
-r base.txt
-django-debug-toolbar==3.2.4
\ No newline at end of file
+django-debug-toolbar==3.8.1
\ No newline at end of file
diff --git a/apiserver/requirements/production.txt b/apiserver/requirements/production.txt
index 231d3c0a129..2547ce255fa 100644
--- a/apiserver/requirements/production.txt
+++ b/apiserver/requirements/production.txt
@@ -1,12 +1,12 @@
-r base.txt
-dj-database-url==0.5.0
+dj-database-url==1.2.0
gunicorn==20.1.0
-whitenoise==6.0.0
-django-storages==1.12.3
+whitenoise==6.3.0
+django-storages==1.13.2
boto==2.49.0
-django-anymail==8.5
-twilio==7.8.2
-django-debug-toolbar==3.2.4
+django-anymail==9.0
+twilio==7.16.2
+django-debug-toolbar==3.8.1
gevent==22.10.2
psycogreen==1.0.2
\ No newline at end of file
diff --git a/apiserver/runtime.txt b/apiserver/runtime.txt
index cd6f13073e4..2d4e05157e1 100644
--- a/apiserver/runtime.txt
+++ b/apiserver/runtime.txt
@@ -1 +1 @@
-python-3.11.1
\ No newline at end of file
+python-3.11.2
\ No newline at end of file
diff --git a/apiserver/templates/admin/base_site.html b/apiserver/templates/admin/base_site.html
index 4fdb5e19b54..fd1d890673b 100644
--- a/apiserver/templates/admin/base_site.html
+++ b/apiserver/templates/admin/base_site.html
@@ -17,7 +17,7 @@
color: #FFFFFF;
}
-