Merge branch 'develop' of github.com:makeplane/plane into feat/pagination

This commit is contained in:
rahulramesha 2024-03-27 14:15:49 +05:30
commit 3e55490bbd
1160 changed files with 14722 additions and 7913 deletions

View File

@ -1,23 +0,0 @@
version = 1
exclude_patterns = [
"bin/**",
"**/node_modules/",
"**/*.min.js"
]
[[analyzers]]
name = "shell"
[[analyzers]]
name = "javascript"
[analyzers.meta]
plugins = ["react"]
environment = ["nodejs"]
[[analyzers]]
name = "python"
[analyzers.meta]
runtime_version = "3.x.x"

View File

@ -2,27 +2,6 @@ name: Branch Build
on:
workflow_dispatch:
inputs:
build-web:
required: false
description: "Build Web"
type: boolean
default: false
build-space:
required: false
description: "Build Space"
type: boolean
default: false
build-api:
required: false
description: "Build API"
type: boolean
default: false
build-proxy:
required: false
description: "Build Proxy"
type: boolean
default: false
push:
branches:
- master
@ -95,7 +74,7 @@ jobs:
- nginx/**
branch_build_push_frontend:
if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event.inputs.build-web=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@ -147,7 +126,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_space:
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event.inputs.build-space=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@ -199,7 +178,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_backend:
if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event.inputs.build-api=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:
@ -251,7 +230,7 @@ jobs:
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_proxy:
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event.inputs.build-web=='true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'workflow_dispatch' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
runs-on: ubuntu-20.04
needs: [branch_build_setup]
env:

View File

@ -1,13 +1,13 @@
name: "CodeQL"
on:
workflow_dispatch:
push:
branches: [ 'develop', 'preview', 'master' ]
branches: ["master"]
pull_request:
# The branches below must be a subset of the branches above
branches: [ 'develop', 'preview', 'master' ]
branches: ["develop", "preview", "master"]
schedule:
- cron: '53 19 * * 5'
- cron: "53 19 * * 5"
jobs:
analyze:
@ -21,45 +21,44 @@ jobs:
strategy:
fail-fast: false
matrix:
language: [ 'python', 'javascript' ]
language: ["python", "javascript"]
# CodeQL supports [ 'cpp', 'csharp', 'go', 'java', 'javascript', 'python', 'ruby' ]
# Use only 'java' to analyze code written in Java, Kotlin or both
# Use only 'javascript' to analyze code written in JavaScript, TypeScript or both
# Learn more about CodeQL language support at https://aka.ms/codeql-docs/language-support
steps:
- name: Checkout repository
uses: actions/checkout@v3
- name: Checkout repository
uses: actions/checkout@v3
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# Initializes the CodeQL tools for scanning.
- name: Initialize CodeQL
uses: github/codeql-action/init@v2
with:
languages: ${{ matrix.language }}
# If you wish to specify custom queries, you can do so here or in a config file.
# By default, queries listed here will override any specified in a config file.
# Prefix the list here with "+" to use these queries and those in the config file.
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# For more details on CodeQL's query packs, refer to: https://docs.github.com/en/code-security/code-scanning/automatically-scanning-your-code-for-vulnerabilities-and-errors/configuring-code-scanning#using-queries-in-ql-packs
# queries: security-extended,security-and-quality
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Autobuild attempts to build any compiled languages (C/C++, C#, Go, Java, or Swift).
# If this step fails, then you should remove it and run the build manually (see below)
- name: Autobuild
uses: github/codeql-action/autobuild@v2
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# Command-line programs to run using the OS shell.
# 📚 See https://docs.github.com/en/actions/using-workflows/workflow-syntax-for-github-actions#jobsjob_idstepsrun
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# If the Autobuild fails above, remove it and uncomment the following three lines.
# modify them (or add more) to build your code if your project, please refer to the EXAMPLE below for guidance.
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
# - run: |
# echo "Run, Build Application using script"
# ./location_of_script_within_repo/buildscript.sh
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}"
- name: Perform CodeQL Analysis
uses: github/codeql-action/analyze@v2
with:
category: "/language:${{matrix.language}}"

1
.gitignore vendored
View File

@ -51,6 +51,7 @@ staticfiles
mediafiles
.env
.DS_Store
logs/
node_modules/
assets/dist/

View File

@ -17,10 +17,10 @@
</p>
<p align="center">
<a href="http://www.plane.so"><b>Website</b></a>
<a href="https://github.com/makeplane/plane/releases"><b>Releases</b></a>
<a href="https://twitter.com/planepowers"><b>Twitter</b></a>
<a href="https://docs.plane.so/"><b>Documentation</b></a>
<a href="https://dub.sh/plane-website-readme"><b>Website</b></a>
<a href="https://git.new/releases"><b>Releases</b></a>
<a href="https://dub.sh/planepowershq"><b>Twitter</b></a>
<a href="https://dub.sh/planedocs"><b>Documentation</b></a>
</p>
<p>
@ -40,15 +40,15 @@
</a>
</p>
Meet [Plane](https://plane.so). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. 🧘‍♀️
Meet [Plane](https://dub.sh/plane-website-readme). An open-source software development tool to manage issues, sprints, and product roadmaps with peace of mind. 🧘‍♀️
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve on our upcoming releases.
> Plane is still in its early days, not everything will be perfect yet, and hiccups may happen. Please let us know of any suggestions, ideas, or bugs that you encounter on our [Discord](https://discord.com/invite/A92xrEGCge) or GitHub issues, and we will use your feedback to improve in our upcoming releases.
## ⚡ Installation
The easiest way to get started with Plane is by creating a [Plane Cloud](https://app.plane.so) account where we offer a hosted solution for users.
If you want more control over your data prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
If you want more control over your data, prefer to self-host Plane, please refer to our [deployment documentation](https://docs.plane.so/docker-compose).
| Installation Methods | Documentation Link |
| -------------------- | ------------------------------------------------------------------------------------------------------------------------------------------------------------ |
@ -59,9 +59,9 @@ If you want more control over your data prefer to self-host Plane, please refer
## 🚀 Features
- **Issues**: Quickly create issues and add details using a powerful, rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
- **Issues**: Quickly create issues and add details using a powerful rich text editor that supports file uploads. Add sub-properties and references to problems for better organization and tracking.
- **Cycles**
- **Cycles**:
Keep up your team's momentum with Cycles. Gain insights into your project's progress with burn-down charts and other valuable features.
- **Modules**: Break down your large projects into smaller, more manageable modules. Assign modules between teams to track and plan your project's progress easily.
@ -74,11 +74,11 @@ If you want more control over your data prefer to self-host Plane, please refer
- **Drive** (_coming soon_): The drive helps you share documents, images, videos, or any other files that make sense to you or your team and align on the problem/solution.
## 🛠️ Contributors Quick Start
## 🛠️ Quick start for contributors
> Development system must have docker engine installed and running.
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute
Setting up local environment is extremely easy and straight forward. Follow the below step and you will be ready to contribute -
1. Clone the code locally using:
```

View File

@ -44,4 +44,3 @@ WEB_URL="http://localhost"
# Gunicorn Workers
GUNICORN_WORKERS=2

View File

@ -48,8 +48,10 @@ USER root
RUN apk --no-cache add "bash~=5.2"
COPY ./bin ./bin/
RUN mkdir -p /code/plane/logs
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
RUN chmod -R 777 /code
RUN chown -R captain:plane /code
USER captain

View File

@ -35,6 +35,7 @@ RUN addgroup -S plane && \
COPY . .
RUN mkdir -p /code/plane/logs
RUN chown -R captain.plane /code
RUN chmod -R +x /code/bin
RUN chmod -R 777 /code

View File

@ -182,7 +182,7 @@ def update_label_color():
labels = Label.objects.filter(color="")
updated_labels = []
for label in labels:
label.color = "#" + "%06x" % random.randint(0, 0xFFFFFF)
label.color = f"#{random.randint(0, 0xFFFFFF+1):06X}"
updated_labels.append(label)
Label.objects.bulk_update(updated_labels, ["color"], batch_size=100)

View File

@ -4,6 +4,7 @@ from plane.api.views.cycle import (
CycleAPIEndpoint,
CycleIssueAPIEndpoint,
TransferCycleIssueAPIEndpoint,
CycleArchiveUnarchiveAPIEndpoint,
)
urlpatterns = [
@ -32,4 +33,14 @@ urlpatterns = [
TransferCycleIssueAPIEndpoint.as_view(),
name="transfer-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/archive/",
CycleArchiveUnarchiveAPIEndpoint.as_view(),
name="cycle-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
CycleArchiveUnarchiveAPIEndpoint.as_view(),
name="cycle-archive-unarchive",
),
]

View File

@ -1,6 +1,10 @@
from django.urls import path
from plane.api.views import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
from plane.api.views import (
ModuleAPIEndpoint,
ModuleIssueAPIEndpoint,
ModuleArchiveUnarchiveAPIEndpoint,
)
urlpatterns = [
path(
@ -23,4 +27,14 @@ urlpatterns = [
ModuleIssueAPIEndpoint.as_view(),
name="module-issues",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:pk>/archive/",
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
name="module-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
ModuleArchiveUnarchiveAPIEndpoint.as_view(),
name="module-archive-unarchive",
),
]

View File

@ -1,6 +1,9 @@
from django.urls import path
from plane.api.views import ProjectAPIEndpoint
from plane.api.views import (
ProjectAPIEndpoint,
ProjectArchiveUnarchiveAPIEndpoint,
)
urlpatterns = [
path(
@ -13,4 +16,9 @@ urlpatterns = [
ProjectAPIEndpoint.as_view(),
name="project",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
ProjectArchiveUnarchiveAPIEndpoint.as_view(),
name="project-archive-unarchive",
),
]

View File

@ -1,4 +1,4 @@
from .project import ProjectAPIEndpoint
from .project import ProjectAPIEndpoint, ProjectArchiveUnarchiveAPIEndpoint
from .state import StateAPIEndpoint
@ -14,8 +14,13 @@ from .cycle import (
CycleAPIEndpoint,
CycleIssueAPIEndpoint,
TransferCycleIssueAPIEndpoint,
CycleArchiveUnarchiveAPIEndpoint,
)
from .module import ModuleAPIEndpoint, ModuleIssueAPIEndpoint
from .module import (
ModuleAPIEndpoint,
ModuleIssueAPIEndpoint,
ModuleArchiveUnarchiveAPIEndpoint,
)
from .inbox import InboxIssueAPIEndpoint

View File

@ -1,26 +1,26 @@
# Python imports
import zoneinfo
from urllib.parse import urlparse
import zoneinfo
# Django imports
from django.conf import settings
from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import IntegrityError
from django.utils import timezone
from rest_framework import status
from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
# Third party imports
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework.permissions import IsAuthenticated
from rest_framework import status
from sentry_sdk import capture_exception
# Module imports
from plane.api.middleware.api_authentication import APIKeyAuthentication
from plane.api.rate_limit import ApiKeyRateThrottle
from plane.utils.paginator import BasePaginator
from plane.bgtasks.webhook_task import send_webhook
from plane.utils.exception_logger import log_exception
from plane.utils.paginator import BasePaginator
class TimezoneMixin:
@ -106,27 +106,23 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
if isinstance(e, ValidationError):
return Response(
{
"error": "The provided payload is not valid please try with a valid payload"
},
{"error": "Please provide valid detail"},
status=status.HTTP_400_BAD_REQUEST,
)
if isinstance(e, ObjectDoesNotExist):
return Response(
{"error": "The required object does not exist."},
{"error": "The requested resource does not exist."},
status=status.HTTP_404_NOT_FOUND,
)
if isinstance(e, KeyError):
return Response(
{"error": " The required key does not exist."},
{"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,

View File

@ -140,7 +140,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
def get(self, request, slug, project_id, pk=None):
if pk:
queryset = self.get_queryset().get(pk=pk)
queryset = (
self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
)
data = CycleSerializer(
queryset,
fields=self.fields,
@ -150,7 +152,9 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
data,
status=status.HTTP_200_OK,
)
queryset = self.get_queryset()
queryset = (
self.get_queryset().filter(archived_at__isnull=True)
)
cycle_view = request.GET.get("cycle_view", "all")
# Current Cycle
@ -291,6 +295,11 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
cycle = Cycle.objects.get(
workspace__slug=slug, project_id=project_id, pk=pk
)
if cycle.archived_at:
return Response(
{"error": "Archived cycle cannot be edited"},
status=status.HTTP_400_BAD_REQUEST,
)
request_data = request.data
@ -368,6 +377,139 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
return Response(status=status.HTTP_204_NO_CONTENT)
class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get_queryset(self):
return (
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(archived_at__isnull=False)
.select_related("project")
.select_related("workspace")
.select_related("owned_by")
.annotate(
total_issues=Count(
"issue_cycle",
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
completed_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
cancelled_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="cancelled",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
unstarted_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="unstarted",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
backlog_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="backlog",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
total_estimates=Sum("issue_cycle__issue__estimate_point")
)
.annotate(
completed_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_estimates=Sum(
"issue_cycle__issue__estimate_point",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
def get(self, request, slug, project_id):
return self.paginate(
request=request,
queryset=(self.get_queryset()),
on_results=lambda cycles: CycleSerializer(
cycles,
many=True,
fields=self.fields,
expand=self.expand,
).data,
)
def post(self, request, slug, project_id, pk):
cycle = Cycle.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug
)
cycle.archived_at = timezone.now()
cycle.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def delete(self, request, slug, project_id, pk):
cycle = Cycle.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug
)
cycle.archived_at = None
cycle.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
"""
This viewset automatically provides `list`, `create`,

View File

@ -357,6 +357,7 @@ class LabelAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.select_related("parent")
@ -489,6 +490,7 @@ class IssueLinkAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.order_by(self.kwargs.get("order_by", "-created_at"))
.distinct()
)
@ -618,6 +620,7 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.select_related("workspace", "project", "issue", "actor")
.annotate(
is_member=Exists(
@ -793,6 +796,7 @@ class IssueActivityAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.select_related("actor", "workspace", "issue", "project")
).order_by(request.GET.get("order_by", "created_at"))

View File

@ -67,6 +67,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
),
)
.annotate(
@ -77,6 +78,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
@ -87,6 +89,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
@ -97,6 +100,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
@ -107,6 +111,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
@ -117,6 +122,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))
@ -165,6 +171,11 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
module = Module.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug
)
if module.archived_at:
return Response(
{"error": "Archived module cannot be edited"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = ModuleSerializer(
module,
data=request.data,
@ -197,7 +208,9 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
def get(self, request, slug, project_id, pk=None):
if pk:
queryset = self.get_queryset().get(pk=pk)
queryset = (
self.get_queryset().filter(archived_at__isnull=True).get(pk=pk)
)
data = ModuleSerializer(
queryset,
fields=self.fields,
@ -209,7 +222,7 @@ class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
)
return self.paginate(
request=request,
queryset=(self.get_queryset()),
queryset=(self.get_queryset().filter(archived_at__isnull=True)),
on_results=lambda modules: ModuleSerializer(
modules,
many=True,
@ -279,6 +292,7 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.select_related("project")
.select_related("workspace")
.select_related("module")
@ -446,3 +460,123 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
epoch=int(timezone.now().timestamp()),
)
return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get_queryset(self):
return (
Module.objects.filter(project_id=self.kwargs.get("project_id"))
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(archived_at__isnull=False)
.select_related("project")
.select_related("workspace")
.select_related("lead")
.prefetch_related("members")
.prefetch_related(
Prefetch(
"link_module",
queryset=ModuleLink.objects.select_related(
"module", "created_by"
),
)
)
.annotate(
total_issues=Count(
"issue_module",
filter=Q(
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
),
)
.annotate(
completed_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="completed",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
cancelled_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="cancelled",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
started_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="started",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
unstarted_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="unstarted",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
backlog_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="backlog",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))
)
def get(self, request, slug, project_id):
return self.paginate(
request=request,
queryset=(self.get_queryset()),
on_results=lambda modules: ModuleSerializer(
modules,
many=True,
fields=self.fields,
expand=self.expand,
).data,
)
def post(self, request, slug, project_id, pk):
module = Module.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug
)
module.archived_at = timezone.now()
module.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def delete(self, request, slug, project_id, pk):
module = Module.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug
)
module.archived_at = None
module.save()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -1,4 +1,5 @@
# Django imports
from django.utils import timezone
from django.db import IntegrityError
from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch
@ -39,7 +40,10 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
return (
Project.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(
Q(project_projectmember__member=self.request.user)
Q(
project_projectmember__member=self.request.user,
project_projectmember__is_active=True,
)
| Q(network=2)
)
.select_related(
@ -260,6 +264,12 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id)
if project.archived_at:
return Response(
{"error": "Archived project cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = ProjectSerializer(
project,
data={**request.data},
@ -316,3 +326,22 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
class ProjectArchiveUnarchiveAPIEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.archived_at = timezone.now()
project.save()
return Response(status=status.HTTP_204_NO_CONTENT)
def delete(self, request, slug, project_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.archived_at = None
project.save()
return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -28,6 +28,7 @@ class StateAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.filter(~Q(name="Triage"))
.select_related("project")
.select_related("workspace")

View File

@ -31,6 +31,7 @@ class CycleWriteSerializer(BaseSerializer):
"workspace",
"project",
"owned_by",
"archived_at",
]

View File

@ -533,8 +533,8 @@ class IssueReactionLiteSerializer(DynamicBaseSerializer):
model = IssueReaction
fields = [
"id",
"actor_id",
"issue_id",
"actor",
"issue",
"reaction",
]

View File

@ -39,6 +39,7 @@ class ModuleWriteSerializer(BaseSerializer):
"updated_by",
"created_at",
"updated_at",
"archived_at",
]
def to_representation(self, instance):

View File

@ -8,6 +8,7 @@ from plane.app.views import (
CycleFavoriteViewSet,
TransferCycleIssueEndpoint,
CycleUserPropertiesEndpoint,
CycleArchiveUnarchiveEndpoint,
)
@ -90,4 +91,14 @@ urlpatterns = [
CycleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/archive/",
CycleArchiveUnarchiveEndpoint.as_view(),
name="cycle-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/",
CycleArchiveUnarchiveEndpoint.as_view(),
name="cycle-archive-unarchive",
),
]

View File

@ -7,6 +7,7 @@ from plane.app.views import (
ModuleLinkViewSet,
ModuleFavoriteViewSet,
ModuleUserPropertiesEndpoint,
ModuleArchiveUnarchiveEndpoint,
)
@ -110,4 +111,14 @@ urlpatterns = [
ModuleUserPropertiesEndpoint.as_view(),
name="cycle-user-filters",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/modules/<uuid:module_id>/archive/",
ModuleArchiveUnarchiveEndpoint.as_view(),
name="module-archive-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/",
ModuleArchiveUnarchiveEndpoint.as_view(),
name="module-archive-unarchive",
),
]

View File

@ -14,6 +14,7 @@ from plane.app.views import (
ProjectPublicCoverImagesEndpoint,
ProjectDeployBoardViewSet,
UserProjectRolesEndpoint,
ProjectArchiveUnarchiveEndpoint,
)
@ -175,4 +176,9 @@ urlpatterns = [
),
name="project-deploy-board",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archive/",
ProjectArchiveUnarchiveEndpoint.as_view(),
name="project-archive-unarchive",
),
]

View File

@ -5,6 +5,7 @@ from .project.base import (
ProjectFavoritesViewSet,
ProjectPublicCoverImagesEndpoint,
ProjectDeployBoardViewSet,
ProjectArchiveUnarchiveEndpoint,
)
from .project.invite import (
@ -90,6 +91,7 @@ from .cycle.base import (
CycleDateCheckEndpoint,
CycleFavoriteViewSet,
TransferCycleIssueEndpoint,
CycleArchiveUnarchiveEndpoint,
CycleUserPropertiesEndpoint,
)
from .cycle.issue import (
@ -168,6 +170,7 @@ from .module.base import (
ModuleViewSet,
ModuleLinkViewSet,
ModuleFavoriteViewSet,
ModuleArchiveUnarchiveEndpoint,
ModuleUserPropertiesEndpoint,
)

View File

@ -2,11 +2,11 @@
import traceback
import zoneinfo
# Django imports
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import IntegrityError
# Django imports
from django.urls import resolve
from django.utils import timezone
from django_filters.rest_framework import DjangoFilterBackend
@ -19,11 +19,10 @@ from rest_framework.permissions import IsAuthenticated
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.viewsets import ModelViewSet
from sentry_sdk import capture_exception
from plane.bgtasks.webhook_task import send_webhook
# Module imports
from plane.bgtasks.webhook_task import send_webhook
from plane.utils.exception_logger import log_exception
from plane.utils.paginator import BasePaginator
@ -90,7 +89,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
try:
return self.model.objects.all()
except Exception as e:
capture_exception(e)
log_exception(e)
raise APIException(
"Please check the view", status.HTTP_400_BAD_REQUEST
)
@ -128,13 +127,13 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
)
if isinstance(e, KeyError):
capture_exception(e)
log_exception(e)
return Response(
{"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
capture_exception(e)
log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
@ -240,9 +239,7 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
status=status.HTTP_400_BAD_REQUEST,
)
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,

View File

@ -21,9 +21,9 @@ from django.db.models import (
)
from django.db.models.functions import Coalesce
from django.utils import timezone
from rest_framework import status
# Third party imports
from rest_framework import status
from rest_framework.response import Response
from plane.app.permissions import (
@ -82,6 +82,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.select_related("project", "workspace", "owned_by")
.prefetch_related(
Prefetch(
@ -100,9 +101,20 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
)
.annotate(is_favorite=Exists(favorite_subquery))
.annotate(
total_issues=Count(
"issue_cycle__issue__id",
distinct=True,
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
completed_issues=Count(
"issue_cycle__issue__state__group",
"issue_cycle__issue__id",
distinct=True,
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
@ -112,7 +124,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
.annotate(
cancelled_issues=Count(
"issue_cycle__issue__state__group",
"issue_cycle__issue__id",
distinct=True,
filter=Q(
issue_cycle__issue__state__group="cancelled",
issue_cycle__issue__archived_at__isnull=True,
@ -122,7 +135,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
.annotate(
started_issues=Count(
"issue_cycle__issue__state__group",
"issue_cycle__issue__id",
distinct=True,
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
@ -132,7 +146,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
.annotate(
unstarted_issues=Count(
"issue_cycle__issue__state__group",
"issue_cycle__issue__id",
distinct=True,
filter=Q(
issue_cycle__issue__state__group="unstarted",
issue_cycle__issue__archived_at__isnull=True,
@ -142,7 +157,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
.annotate(
backlog_issues=Count(
"issue_cycle__issue__state__group",
"issue_cycle__issue__id",
distinct=True,
filter=Q(
issue_cycle__issue__state__group="backlog",
issue_cycle__issue__archived_at__isnull=True,
@ -186,15 +202,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
)
def list(self, request, slug, project_id):
queryset = self.get_queryset().annotate(
total_issues=Count(
"issue_cycle",
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
queryset = self.get_queryset().filter(archived_at__isnull=True)
cycle_view = request.GET.get("cycle_view", "all")
# Update the order by
@ -349,6 +357,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
"progress_snapshot",
# meta fields
"is_favorite",
"total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
@ -395,6 +404,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
# meta fields
"is_favorite",
"cancelled_issues",
"total_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
@ -421,6 +431,11 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
workspace__slug=slug, project_id=project_id, pk=pk
)
cycle = queryset.first()
if cycle.archived_at:
return Response(
{"error": "Archived cycle cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
request_data = request.data
if (
@ -465,6 +480,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
"progress_snapshot",
# meta fields
"is_favorite",
"total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
@ -478,31 +494,11 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
def retrieve(self, request, slug, project_id, pk):
queryset = (
self.get_queryset()
.filter(pk=pk)
.annotate(
total_issues=Count(
"issue_cycle",
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
self.get_queryset().filter(archived_at__isnull=True).filter(pk=pk)
)
data = (
self.get_queryset()
.filter(pk=pk)
.annotate(
total_issues=Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
parent__isnull=True,
issue_cycle__cycle_id=pk,
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues=Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
@ -683,6 +679,197 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
class CycleArchiveUnarchiveEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get_queryset(self):
favorite_subquery = CycleFavorite.objects.filter(
user=self.request.user,
cycle_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
)
return (
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
.filter(archived_at__isnull=False)
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.select_related("project", "workspace", "owned_by")
.prefetch_related(
Prefetch(
"issue_cycle__issue__assignees",
queryset=User.objects.only(
"avatar", "first_name", "id"
).distinct(),
)
)
.prefetch_related(
Prefetch(
"issue_cycle__issue__labels",
queryset=Label.objects.only(
"name", "color", "id"
).distinct(),
)
)
.annotate(is_favorite=Exists(favorite_subquery))
.annotate(
completed_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
cancelled_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="cancelled",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
unstarted_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="unstarted",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
backlog_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="backlog",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
status=Case(
When(
Q(start_date__lte=timezone.now())
& Q(end_date__gte=timezone.now()),
then=Value("CURRENT"),
),
When(
start_date__gt=timezone.now(), then=Value("UPCOMING")
),
When(end_date__lt=timezone.now(), then=Value("COMPLETED")),
When(
Q(start_date__isnull=True) & Q(end_date__isnull=True),
then=Value("DRAFT"),
),
default=Value("DRAFT"),
output_field=CharField(),
)
)
.annotate(
assignee_ids=Coalesce(
ArrayAgg(
"issue_cycle__issue__assignees__id",
distinct=True,
filter=~Q(
issue_cycle__issue__assignees__id__isnull=True
)
& Q(
issue_cycle__issue__assignees__member_project__is_active=True
),
),
Value([], output_field=ArrayField(UUIDField())),
)
)
.order_by("-is_favorite", "name")
.distinct()
)
def get(self, request, slug, project_id):
queryset = (
self.get_queryset()
.annotate(
total_issues=Count(
"issue_cycle",
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.values(
# necessary fields
"id",
"workspace_id",
"project_id",
# model fields
"name",
"description",
"start_date",
"end_date",
"owned_by_id",
"view_props",
"sort_order",
"external_source",
"external_id",
"progress_snapshot",
# meta fields
"total_issues",
"is_favorite",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
"assignee_ids",
"status",
"archived_at",
)
).order_by("-is_favorite", "-created_at")
return Response(queryset, status=status.HTTP_200_OK)
def post(self, request, slug, project_id, cycle_id):
cycle = Cycle.objects.get(
pk=cycle_id, project_id=project_id, workspace__slug=slug
)
cycle.archived_at = timezone.now()
cycle.save()
return Response(
{"archived_at": str(cycle.archived_at)},
status=status.HTTP_200_OK,
)
def delete(self, request, slug, project_id, cycle_id):
cycle = Cycle.objects.get(
pk=cycle_id, project_id=project_id, workspace__slug=slug
)
cycle.archived_at = None
cycle.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class CycleDateCheckEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,

View File

@ -81,6 +81,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
)
.filter(project__archived_at__isnull=True)
.filter(cycle_id=self.kwargs.get("cycle_id"))
.select_related("project")
.select_related("workspace")

View File

@ -48,6 +48,26 @@ from plane.utils.issue_filters import issue_filters
# Module imports
from .. import BaseAPIView
from plane.db.models import (
Issue,
IssueActivity,
ProjectMember,
Widget,
DashboardWidget,
Dashboard,
Project,
IssueLink,
IssueAttachment,
IssueRelation,
User,
)
from plane.app.serializers import (
IssueActivitySerializer,
IssueSerializer,
DashboardSerializer,
WidgetSerializer,
)
from plane.utils.issue_filters import issue_filters
def dashboard_overview_stats(self, request, slug):
@ -150,7 +170,8 @@ def dashboard_assigned_issues(self, request, slug):
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@ -304,7 +325,8 @@ def dashboard_created_issues(self, request, slug):
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@ -472,6 +494,7 @@ def dashboard_recent_activity(self, request, slug):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
actor=request.user,
).select_related("actor", "workspace", "issue", "project")[:8]
@ -487,6 +510,7 @@ def dashboard_recent_projects(self, request, slug):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
actor=request.user,
)
.values_list("project_id", flat=True)
@ -501,6 +525,7 @@ def dashboard_recent_projects(self, request, slug):
additional_projects = Project.objects.filter(
project_projectmember__member=request.user,
project_projectmember__is_active=True,
archived_at__isnull=True,
workspace__slug=slug,
).exclude(id__in=unique_project_ids)
@ -523,6 +548,7 @@ def dashboard_recent_collaborators(self, request, slug):
actor=OuterRef("member"),
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.values("actor")
.annotate(num_activities=Count("pk"))
@ -535,6 +561,7 @@ def dashboard_recent_collaborators(self, request, slug):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.annotate(
num_activities=Coalesce(

View File

@ -29,7 +29,10 @@ class ExportIssuesEndpoint(BaseAPIView):
if provider in ["csv", "xlsx", "json"]:
if not project_ids:
project_ids = Project.objects.filter(
workspace__slug=slug
workspace__slug=slug,
project_projectmember__member=request.user,
project_projectmember__is_active=True,
archived_at__isnull=True,
).values_list("id", flat=True)
project_ids = [str(project_id) for project_id in project_ids]

View File

@ -146,7 +146,8 @@ class InboxIssueViewSet(BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),

View File

@ -44,6 +44,7 @@ class IssueActivityEndpoint(BaseAPIView):
~Q(field__in=["comment", "vote", "reaction", "draft"]),
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
workspace__slug=slug,
)
.filter(**filters)
@ -54,6 +55,7 @@ class IssueActivityEndpoint(BaseAPIView):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
workspace__slug=slug,
)
.filter(**filters)

View File

@ -1,25 +1,27 @@
# Python imports
import json
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import (
Exists,
F,
Func,
OuterRef,
Prefetch,
Q,
)
# Django imports
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from rest_framework import status
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import UUIDField
from django.db.models.functions import Coalesce
# Third Party imports
from rest_framework.response import Response
from rest_framework import status
# Module imports
from .. import BaseViewSet, BaseAPIView, WebhookMixin
from plane.app.serializers import (
IssuePropertySerializer,
IssueSerializer,
IssueCreateSerializer,
IssueDetailSerializer,
)
from plane.app.permissions import (
ProjectEntityPermission,
ProjectLitePermission,

View File

@ -48,6 +48,7 @@ class IssueCommentViewSet(WebhookMixin, BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.select_related("project")
.select_related("workspace")
@ -163,6 +164,7 @@ class CommentReactionViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()

View File

@ -1,15 +1,24 @@
# Python imports
import json
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import (
Case,
CharField,
Exists,
F,
Func,
Max,
OuterRef,
Prefetch,
Q,
UUIDField,
Value,
When,
)
from django.db.models.functions import Coalesce
# Django imports
from django.utils import timezone

View File

@ -87,7 +87,7 @@ class BulkCreateIssueLabelsEndpoint(BaseAPIView):
Label(
name=label.get("name", "Migrated"),
description=label.get("description", "Migrated Issue"),
color="#" + "%06x" % random.randint(0, 0xFFFFFF),
color=f"#{random.randint(0, 0xFFFFFF+1):06X}",
project_id=project_id,
workspace_id=project.workspace_id,
created_by=request.user,

View File

@ -35,6 +35,7 @@ class IssueLinkViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()

View File

@ -34,6 +34,7 @@ class IssueReactionViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()

View File

@ -41,6 +41,7 @@ class IssueRelationViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.select_related("project")
.select_related("workspace")

View File

@ -83,7 +83,8 @@ class SubIssuesEndpoint(BaseAPIView):
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),

View File

@ -54,6 +54,7 @@ class IssueSubscriberViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()

View File

@ -1,6 +1,19 @@
# Python imports
import json
# Django Imports
from django.utils import timezone
from django.db.models import (
Prefetch,
F,
OuterRef,
Exists,
Count,
Q,
Func,
Subquery,
IntegerField,
)
from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField
from django.db.models import (
@ -88,6 +101,59 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
)
cancelled_issues = (
Issue.issue_objects.filter(
state__group="cancelled",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
completed_issues = (
Issue.issue_objects.filter(
state__group="completed",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
started_issues = (
Issue.issue_objects.filter(
state__group="started",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
unstarted_issues = (
Issue.issue_objects.filter(
state__group="unstarted",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
backlog_issues = (
Issue.issue_objects.filter(
state__group="backlog",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
total_issues = (
Issue.issue_objects.filter(
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
return (
super()
.get_queryset()
@ -107,53 +173,39 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
)
)
.annotate(
completed_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="completed",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
completed_issues=Coalesce(
Subquery(completed_issues[:1]),
Value(0, output_field=IntegerField()),
)
)
.annotate(
cancelled_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="cancelled",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
cancelled_issues=Coalesce(
Subquery(cancelled_issues[:1]),
Value(0, output_field=IntegerField()),
)
)
.annotate(
started_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="started",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
started_issues=Coalesce(
Subquery(started_issues[:1]),
Value(0, output_field=IntegerField()),
)
)
.annotate(
unstarted_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="unstarted",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
unstarted_issues=Coalesce(
Subquery(unstarted_issues[:1]),
Value(0, output_field=IntegerField()),
)
)
.annotate(
backlog_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="backlog",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
backlog_issues=Coalesce(
Subquery(backlog_issues[:1]),
Value(0, output_field=IntegerField()),
)
)
.annotate(
total_issues=Coalesce(
Subquery(total_issues[:1]),
Value(0, output_field=IntegerField()),
)
)
.annotate(
@ -203,6 +255,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
"is_favorite",
"cancelled_issues",
"completed_issues",
"total_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
@ -214,7 +267,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id):
queryset = self.get_queryset()
queryset = self.get_queryset().filter(archived_at__isnull=True)
if self.fields:
modules = ModuleSerializer(
queryset,
@ -241,6 +294,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
"external_source",
"external_id",
# computed fields
"total_issues",
"is_favorite",
"cancelled_issues",
"completed_issues",
@ -255,17 +309,8 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
def retrieve(self, request, slug, project_id, pk):
queryset = (
self.get_queryset()
.filter(archived_at__isnull=True)
.filter(pk=pk)
.annotate(
total_issues=Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
parent__isnull=True,
issue_module__module_id=pk,
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
sub_issues=Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
@ -391,14 +436,20 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
)
def partial_update(self, request, slug, project_id, pk):
queryset = self.get_queryset().filter(pk=pk)
module = self.get_queryset().filter(pk=pk)
if module.first().archived_at:
return Response(
{"error": "Archived module cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = ModuleWriteSerializer(
queryset.first(), data=request.data, partial=True
module.first(), data=request.data, partial=True
)
if serializer.is_valid():
serializer.save()
module = queryset.values(
module = module.values(
# Required fields
"id",
"workspace_id",
@ -422,6 +473,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
"cancelled_issues",
"completed_issues",
"started_issues",
"total_issues",
"unstarted_issues",
"backlog_issues",
"created_at",
@ -481,12 +533,174 @@ class ModuleLinkViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.order_by("-created_at")
.distinct()
)
class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get_queryset(self):
favorite_subquery = ModuleFavorite.objects.filter(
user=self.request.user,
module_id=OuterRef("pk"),
project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"),
)
return (
Module.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(archived_at__isnull=False)
.annotate(is_favorite=Exists(favorite_subquery))
.select_related("project")
.select_related("workspace")
.select_related("lead")
.prefetch_related("members")
.prefetch_related(
Prefetch(
"link_module",
queryset=ModuleLink.objects.select_related(
"module", "created_by"
),
)
)
.annotate(
total_issues=Count(
"issue_module",
filter=Q(
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
),
)
.annotate(
completed_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="completed",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
cancelled_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="cancelled",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
started_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="started",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
unstarted_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="unstarted",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
backlog_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="backlog",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
member_ids=Coalesce(
ArrayAgg(
"members__id",
distinct=True,
filter=~Q(members__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
)
)
.order_by("-is_favorite", "-created_at")
)
def get(self, request, slug, project_id):
queryset = self.get_queryset()
modules = queryset.values( # Required fields
"id",
"workspace_id",
"project_id",
# Model fields
"name",
"description",
"description_text",
"description_html",
"start_date",
"target_date",
"status",
"lead_id",
"member_ids",
"view_props",
"sort_order",
"external_source",
"external_id",
# computed fields
"total_issues",
"is_favorite",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
"created_at",
"updated_at",
"archived_at",
)
return Response(modules, status=status.HTTP_200_OK)
def post(self, request, slug, project_id, module_id):
module = Module.objects.get(
pk=module_id, project_id=project_id, workspace__slug=slug
)
module.archived_at = timezone.now()
module.save()
return Response(
{"archived_at": str(module.archived_at)},
status=status.HTTP_200_OK,
)
def delete(self, request, slug, project_id, module_id):
module = Module.objects.get(
pk=module_id, project_id=project_id, workspace__slug=slug
)
module.archived_at = None
module.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class ModuleFavoriteViewSet(BaseViewSet):
serializer_class = ModuleFavoriteSerializer
model = ModuleFavorite

View File

@ -70,6 +70,7 @@ class PageViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.filter(parent__isnull=True)
.filter(Q(owned_by=self.request.user) | Q(access=0))

View File

@ -1,6 +1,7 @@
# Python imports
import boto3
from django.conf import settings
from django.utils import timezone
# Django imports
from django.db import IntegrityError
@ -13,11 +14,13 @@ from django.db.models import (
Q,
Subquery,
)
from rest_framework import serializers, status
from rest_framework.permissions import AllowAny
# Third Party imports
from rest_framework.response import Response
from rest_framework import serializers, status
from rest_framework.permissions import AllowAny
# Module imports
from plane.app.permissions import (
ProjectBasePermission,
@ -29,8 +32,6 @@ from plane.app.serializers import (
ProjectListSerializer,
ProjectSerializer,
)
# Module imports
from plane.app.views.base import BaseAPIView, BaseViewSet, WebhookMixin
from plane.db.models import (
Cycle,
@ -70,7 +71,10 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
.get_queryset()
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(
Q(project_projectmember__member=self.request.user)
Q(
project_projectmember__member=self.request.user,
project_projectmember__is_active=True,
)
| Q(network=2)
)
.select_related(
@ -175,6 +179,7 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
def retrieve(self, request, slug, pk):
project = (
self.get_queryset()
.filter(archived_at__isnull=True)
.filter(pk=pk)
.annotate(
total_issues=Issue.issue_objects.filter(
@ -362,6 +367,12 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
project = Project.objects.get(pk=pk)
if project.archived_at:
return Response(
{"error": "Archived projects cannot be updated"},
status=status.HTTP_400_BAD_REQUEST,
)
serializer = ProjectSerializer(
project,
data={**request.data},
@ -416,6 +427,28 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
)
class ProjectArchiveUnarchiveEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.archived_at = timezone.now()
project.save()
return Response(
{"archived_at": str(project.archived_at)},
status=status.HTTP_200_OK,
)
def delete(self, request, slug, project_id):
project = Project.objects.get(pk=project_id, workspace__slug=slug)
project.archived_at = None
project.save()
return Response(status=status.HTTP_204_NO_CONTENT)
class ProjectIdentifierEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,

View File

@ -50,6 +50,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project_projectmember__member=self.request.user,
project_projectmember__is_active=True,
archived_at__isnull=True,
workspace__slug=slug,
)
.distinct()
@ -72,6 +73,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
workspace__slug=slug,
)
@ -97,6 +99,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
workspace__slug=slug,
)
@ -121,6 +124,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
workspace__slug=slug,
)
@ -145,6 +149,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
workspace__slug=slug,
)
@ -169,6 +174,7 @@ class GlobalSearchEndpoint(BaseAPIView):
q,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
workspace__slug=slug,
)
@ -243,6 +249,7 @@ class IssueSearchEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True
)
if workspace_search == "false":

View File

@ -33,6 +33,7 @@ class StateViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.filter(~Q(name="Triage"))
.select_related("project")

View File

@ -131,7 +131,8 @@ class GlobalViewIssuesViewSet(BaseViewSet):
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@ -297,6 +298,7 @@ class IssueViewViewSet(BaseViewSet):
.filter(
project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.select_related("project")
.select_related("workspace")

View File

@ -1,49 +1,51 @@
# Python imports
from datetime import date
from dateutil.relativedelta import relativedelta
import csv
import io
from datetime import date
from dateutil.relativedelta import relativedelta
from django.db import IntegrityError
from django.db.models import (
Count,
F,
Func,
OuterRef,
Prefetch,
Q,
)
from django.db.models.fields import DateField
from django.db.models.functions import Cast, ExtractDay, ExtractWeek
# Django imports
from django.http import HttpResponse
from django.db import IntegrityError
from django.utils import timezone
from django.db.models import (
Prefetch,
OuterRef,
Func,
F,
Q,
Count,
)
from django.db.models.functions import ExtractWeek, Cast, ExtractDay
from django.db.models.fields import DateField
# Third party modules
from rest_framework import status
from rest_framework.response import Response
from plane.app.permissions import (
WorkSpaceAdminPermission,
WorkSpaceBasePermission,
WorkspaceEntityPermission,
)
# Module imports
from plane.app.serializers import (
WorkSpaceSerializer,
WorkspaceThemeSerializer,
)
from plane.app.views.base import BaseViewSet, BaseAPIView
from plane.app.views.base import BaseAPIView, BaseViewSet
from plane.db.models import (
Workspace,
IssueActivity,
Issue,
WorkspaceTheme,
IssueActivity,
Workspace,
WorkspaceMember,
)
from plane.app.permissions import (
WorkSpaceBasePermission,
WorkSpaceAdminPermission,
WorkspaceEntityPermission,
WorkspaceTheme,
)
from plane.utils.cache import cache_response, invalidate_cache
class WorkSpaceViewSet(BaseViewSet):
model = Workspace
serializer_class = WorkSpaceSerializer
@ -138,6 +140,7 @@ class WorkSpaceViewSet(BaseViewSet):
{"slug": "The workspace with the slug already exists"},
status=status.HTTP_410_GONE,
)
@cache_response(60 * 60 * 2)
def list(self, request, *args, **kwargs):
return super().list(request, *args, **kwargs)
@ -149,6 +152,7 @@ class WorkSpaceViewSet(BaseViewSet):
@invalidate_cache(path="/api/workspaces/", user=False)
@invalidate_cache(path="/api/users/me/workspaces/")
@invalidate_cache(path="/api/users/me/settings/")
def destroy(self, request, *args, **kwargs):
return super().destroy(request, *args, **kwargs)

View File

@ -25,15 +25,11 @@ class WorkspaceEstimatesEndpoint(BaseAPIView):
estimate_ids = Project.objects.filter(
workspace__slug=slug, estimate__isnull=False
).values_list("estimate_id", flat=True)
estimates = Estimate.objects.filter(
pk__in=estimate_ids
).prefetch_related(
Prefetch(
"points",
queryset=Project.objects.select_related(
"estimate", "workspace", "project"
),
)
estimates = (
Estimate.objects.filter(pk__in=estimate_ids, workspace__slug=slug)
.prefetch_related("points")
.select_related("workspace", "project")
)
serializer = WorkspaceEstimateSerializer(estimates, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)

View File

@ -20,6 +20,7 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
serializer = LabelSerializer(labels, many=True).data
return Response(serializer, status=status.HTTP_200_OK)

View File

@ -1,41 +1,43 @@
# Django imports
from django.db.models import (
Q,
CharField,
Count,
Q,
)
from django.db.models.functions import Cast
from django.db.models import CharField
# Third party modules
from rest_framework import status
from rest_framework.response import Response
# Module imports
from plane.app.serializers import (
WorkSpaceMemberSerializer,
TeamSerializer,
UserLiteSerializer,
WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
ProjectMemberRoleSerializer,
)
from plane.app.views.base import BaseAPIView
from .. import BaseViewSet
from plane.db.models import (
User,
Workspace,
Team,
ProjectMember,
Project,
WorkspaceMember,
)
from plane.app.permissions import (
WorkSpaceAdminPermission,
WorkspaceEntityPermission,
WorkspaceUserPermission,
)
# Module imports
from plane.app.serializers import (
ProjectMemberRoleSerializer,
TeamSerializer,
UserLiteSerializer,
WorkspaceMemberAdminSerializer,
WorkspaceMemberMeSerializer,
WorkSpaceMemberSerializer,
)
from plane.app.views.base import BaseAPIView
from plane.db.models import (
Project,
ProjectMember,
Team,
User,
Workspace,
WorkspaceMember,
)
from plane.utils.cache import cache_response, invalidate_cache
from .. import BaseViewSet
class WorkSpaceMemberViewSet(BaseViewSet):
serializer_class = WorkspaceMemberAdminSerializer
@ -147,6 +149,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
@invalidate_cache(
path="/api/workspaces/:slug/members/", url_params=True, user=False
)
@invalidate_cache(path="/api/users/me/settings/")
def destroy(self, request, slug, pk):
# Check the user role who is deleting the user
workspace_member = WorkspaceMember.objects.get(
@ -214,6 +217,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
@invalidate_cache(
path="/api/workspaces/:slug/members/", url_params=True, user=False
)
@invalidate_cache(path="/api/users/me/settings/")
def leave(self, request, slug):
workspace_member = WorkspaceMember.objects.get(
workspace__slug=slug,

View File

@ -45,6 +45,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
),
)
.annotate(
@ -55,6 +56,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
@ -65,6 +67,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
@ -75,6 +78,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
@ -85,6 +89,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.annotate(
@ -95,6 +100,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
)
)
.order_by(self.kwargs.get("order_by", "-created_at"))

View File

@ -20,6 +20,7 @@ class WorkspaceStatesEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
serializer = StateSerializer(states, many=True).data
return Response(serializer, status=status.HTTP_200_OK)

View File

@ -125,7 +125,7 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
| Q(issue_subscribers__subscriber_id=user_id),
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True
project__project_projectmember__is_active=True,
)
.filter(**filters)
.select_related("workspace", "project", "state", "parent")
@ -166,7 +166,8 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
ArrayAgg(
"assignees__id",
distinct=True,
filter=~Q(assignees__id__isnull=True),
filter=~Q(assignees__id__isnull=True)
& Q(assignees__member_project__is_active=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
@ -300,6 +301,7 @@ class WorkspaceUserProfileEndpoint(BaseAPIView):
workspace__slug=slug,
project_projectmember__member=request.user,
project_projectmember__is_active=True,
archived_at__isnull=True,
)
.annotate(
created_issues=Count(
@ -388,6 +390,7 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
workspace__slug=slug,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
actor=user_id,
).select_related("actor", "workspace", "issue", "project")
@ -500,6 +503,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
subscriber_id=user_id,
project__project_projectmember__member=request.user,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.filter(**filters)
.count()

View File

@ -1,22 +1,22 @@
# Python imports
import csv
import io
import logging
# Third party imports
from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.conf import settings
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.db.models import Issue
from plane.utils.analytics_plot import build_graph_plot
from plane.utils.issue_filters import issue_filters
from plane.license.utils.instance_value import get_email_configuration
from plane.utils.analytics_plot import build_graph_plot
from plane.utils.exception_logger import log_exception
from plane.utils.issue_filters import issue_filters
row_mapping = {
"state__name": "State",
@ -55,6 +55,7 @@ def send_export_email(email, slug, csv_buffer, rows):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@ -64,6 +65,7 @@ def send_export_email(email, slug, csv_buffer, rows):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@ -210,9 +212,9 @@ def generate_segmented_rows(
None,
)
if assignee:
generated_row[
0
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
generated_row[0] = (
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
)
if x_axis == LABEL_ID:
label = next(
@ -279,9 +281,9 @@ def generate_segmented_rows(
None,
)
if assignee:
row_zero[
index + 2
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
row_zero[index + 2] = (
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
)
if segmented == LABEL_ID:
for index, segm in enumerate(row_zero[2:]):
@ -366,9 +368,9 @@ def generate_non_segmented_rows(
None,
)
if assignee:
row[
0
] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
row[0] = (
f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}"
)
if x_axis == LABEL_ID:
label = next(
@ -504,10 +506,8 @@ def analytic_export_task(email, data, slug):
csv_buffer = generate_csv_from_rows(rows)
send_export_email(email, slug, csv_buffer, rows)
logging.getLogger("plane").info("Email sent succesfully.")
return
except Exception as e:
print(e)
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return

View File

@ -1,21 +1,22 @@
import logging
from datetime import datetime
from bs4 import BeautifulSoup
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
# Django imports
from django.utils import timezone
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.conf import settings
# Module imports
from plane.db.models import EmailNotificationLog, User, Issue
from plane.db.models import EmailNotificationLog, Issue, User
from plane.license.utils.instance_value import get_email_configuration
from plane.settings.redis import redis_instance
from plane.utils.exception_logger import log_exception
# acquire and delete redis lock
@ -69,7 +70,9 @@ def stack_email_notification():
receiver_notification.get("entity_identifier"), {}
).setdefault(
str(receiver_notification.get("triggered_by_id")), []
).append(receiver_notification.get("data"))
).append(
receiver_notification.get("data")
)
# append processed notifications
processed_notifications.append(receiver_notification.get("id"))
email_notification_ids.append(receiver_notification.get("id"))
@ -182,6 +185,7 @@ def send_email_notification(
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@ -285,6 +289,7 @@ def send_email_notification(
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@ -296,7 +301,9 @@ def send_email_notification(
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email Sent Successfully")
# Update the logs
EmailNotificationLog.objects.filter(
pk__in=email_notification_ids
).update(sent_at=timezone.now())
@ -305,15 +312,20 @@ def send_email_notification(
release_lock(lock_id=lock_id)
return
except Exception as e:
capture_exception(e)
log_exception(e)
# release the lock
release_lock(lock_id=lock_id)
return
else:
print("Duplicate task recived. Skipping...")
logging.getLogger("plane").info(
"Duplicate email received skipping"
)
return
except (Issue.DoesNotExist, User.DoesNotExist) as e:
if settings.DEBUG:
print(e)
log_exception(e)
release_lock(lock_id=lock_id)
return
except Exception as e:
log_exception(e)
release_lock(lock_id=lock_id)
return

View File

@ -1,13 +1,13 @@
import uuid
import os
import uuid
# third party imports
from celery import shared_task
from sentry_sdk import capture_exception
from posthog import Posthog
# module imports
from plane.license.utils.instance_value import get_configuration_value
from plane.utils.exception_logger import log_exception
def posthogConfiguration():
@ -51,7 +51,8 @@ def auth_events(user, email, user_agent, ip, event_name, medium, first_time):
},
)
except Exception as e:
capture_exception(e)
log_exception(e)
return
@shared_task
@ -77,4 +78,5 @@ def workspace_invite_event(
},
)
except Exception as e:
capture_exception(e)
log_exception(e)
return

View File

@ -2,21 +2,22 @@
import csv
import io
import json
import boto3
import zipfile
import boto3
from botocore.client import Config
# Third party imports
from celery import shared_task
# Django imports
from django.conf import settings
from django.utils import timezone
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
from botocore.client import Config
from openpyxl import Workbook
# Module imports
from plane.db.models import Issue, ExporterHistory
from plane.db.models import ExporterHistory, Issue
from plane.utils.exception_logger import log_exception
def dateTimeConverter(time):
@ -303,6 +304,7 @@ def issue_export_task(
project_id__in=project_ids,
project__project_projectmember__member=exporter_instance.initiated_by_id,
project__project_projectmember__is_active=True,
project__archived_at__isnull=True,
)
.select_related(
"project", "workspace", "state", "parent", "created_by"
@ -403,8 +405,5 @@ def issue_export_task(
exporter_instance.status = "failed"
exporter_instance.reason = str(e)
exporter_instance.save(update_fields=["status", "reason"])
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return

View File

@ -1,17 +1,17 @@
# Python import
# Python imports
import logging
# Third party imports
from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.conf import settings
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.license.utils.instance_value import get_email_configuration
from plane.utils.exception_logger import log_exception
@shared_task
@ -26,6 +26,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@ -49,6 +50,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@ -60,10 +62,8 @@ def forgot_password(first_name, email, uidb64, token, current_site):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully")
return
except Exception as e:
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return

View File

@ -1,34 +1,36 @@
# Python imports
import json
import requests
# Third Party imports
from celery import shared_task
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.utils import timezone
# Third Party imports
from celery import shared_task
from sentry_sdk import capture_exception
from plane.app.serializers import IssueActivitySerializer
from plane.bgtasks.notification_task import notifications
# Module imports
from plane.db.models import (
User,
Issue,
Project,
Label,
IssueActivity,
State,
Cycle,
Module,
IssueReaction,
CommentReaction,
Cycle,
Issue,
IssueActivity,
IssueComment,
IssueReaction,
IssueSubscriber,
Label,
Module,
Project,
State,
User,
)
from plane.app.serializers import IssueActivitySerializer
from plane.bgtasks.notification_task import notifications
from plane.settings.redis import redis_instance
from plane.utils.exception_logger import log_exception
# Track Changes in name
@ -1647,7 +1649,7 @@ def issue_activity(
headers=headers,
)
except Exception as e:
capture_exception(e)
log_exception(e)
if notification:
notifications.delay(
@ -1668,8 +1670,5 @@ def issue_activity(
return
except Exception as e:
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return

View File

@ -2,18 +2,17 @@
import json
from datetime import timedelta
# Django imports
from django.utils import timezone
from django.db.models import Q
from django.conf import settings
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
from django.db.models import Q
# Django imports
from django.utils import timezone
# Module imports
from plane.db.models import Issue, Project, State
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import Issue, Project, State
from plane.utils.exception_logger import log_exception
@shared_task
@ -96,9 +95,7 @@ def archive_old_issues():
]
return
except Exception as e:
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return
@ -179,7 +176,5 @@ def close_old_issues():
]
return
except Exception as e:
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return

View File

@ -1,17 +1,17 @@
# Python imports
import logging
# Third party imports
from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.conf import settings
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.license.utils.instance_value import get_email_configuration
from plane.utils.exception_logger import log_exception
@shared_task
@ -23,6 +23,7 @@ def magic_link(email, key, token, current_site):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@ -41,6 +42,7 @@ def magic_link(email, key, token, current_site):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@ -52,11 +54,8 @@ def magic_link(email, key, token, current_site):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully.")
return
except Exception as e:
print(e)
capture_exception(e)
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
log_exception(e)
return

View File

@ -1,4 +1,8 @@
# Python import
# Python imports
import logging
# Third party imports
from celery import shared_task
# Django imports
# Third party imports
@ -7,11 +11,11 @@ from django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from sentry_sdk import capture_exception
# Module imports
from plane.db.models import Project, ProjectMemberInvite, User
from plane.license.utils.instance_value import get_email_configuration
from plane.utils.exception_logger import log_exception
@shared_task
@ -51,6 +55,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@ -60,6 +65,7 @@ def project_invitation(email, project_id, token, current_site, invitor):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@ -72,12 +78,10 @@ def project_invitation(email, project_id, token, current_site, invitor):
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully.")
return
except (Project.DoesNotExist, ProjectMemberInvite.DoesNotExist):
return
except Exception as e:
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return

View File

@ -1,44 +1,45 @@
import requests
import uuid
import hashlib
import json
import hmac
import json
import logging
import uuid
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
import requests
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
from plane.db.models import (
Webhook,
WebhookLog,
Project,
Issue,
Cycle,
Module,
ModuleIssue,
CycleIssue,
IssueComment,
User,
)
from plane.api.serializers import (
ProjectSerializer,
CycleSerializer,
ModuleSerializer,
CycleIssueSerializer,
ModuleIssueSerializer,
IssueCommentSerializer,
IssueExpandSerializer,
)
# Django imports
from django.conf import settings
from django.core.mail import EmailMultiAlternatives, get_connection
from django.core.serializers.json import DjangoJSONEncoder
from django.template.loader import render_to_string
from django.utils.html import strip_tags
# Module imports
from plane.api.serializers import (
CycleIssueSerializer,
CycleSerializer,
IssueCommentSerializer,
IssueExpandSerializer,
ModuleIssueSerializer,
ModuleSerializer,
ProjectSerializer,
)
from plane.db.models import (
Cycle,
CycleIssue,
Issue,
IssueComment,
Module,
ModuleIssue,
Project,
User,
Webhook,
WebhookLog,
)
from plane.license.utils.instance_value import get_email_configuration
from plane.utils.exception_logger import log_exception
SERIALIZER_MAPPER = {
"project": ProjectSerializer,
@ -174,7 +175,7 @@ def webhook_task(self, webhook, slug, event, event_data, action, current_site):
except Exception as e:
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return
@ -241,7 +242,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
except Exception as e:
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return
@ -256,6 +257,7 @@ def send_webhook_deactivation_email(
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@ -284,6 +286,7 @@ def send_webhook_deactivation_email(
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@ -295,8 +298,8 @@ def send_webhook_deactivation_email(
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent successfully.")
return
except Exception as e:
print(e)
log_exception(e)
return

View File

@ -1,18 +1,18 @@
# Python imports
import logging
# Third party imports
from celery import shared_task
# Django imports
from django.core.mail import EmailMultiAlternatives, get_connection
from django.template.loader import render_to_string
from django.utils.html import strip_tags
from django.conf import settings
# Third party imports
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.db.models import Workspace, WorkspaceMemberInvite, User
from plane.db.models import User, Workspace, WorkspaceMemberInvite
from plane.license.utils.instance_value import get_email_configuration
from plane.utils.exception_logger import log_exception
@shared_task
@ -37,6 +37,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@ -65,6 +66,7 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
use_ssl=EMAIL_USE_SSL == "1",
)
msg = EmailMultiAlternatives(
@ -76,14 +78,12 @@ def workspace_invitation(email, workspace_id, token, current_site, invitor):
)
msg.attach_alternative(html_content, "text/html")
msg.send()
logging.getLogger("plane").info("Email sent succesfully")
return
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist):
print("Workspace or WorkspaceMember Invite Does not exists")
except (Workspace.DoesNotExist, WorkspaceMemberInvite.DoesNotExist) as e:
log_exception(e)
return
except Exception as e:
# Print logs if in DEBUG mode
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return

View File

@ -23,6 +23,7 @@ class Command(BaseCommand):
EMAIL_HOST_PASSWORD,
EMAIL_PORT,
EMAIL_USE_TLS,
EMAIL_USE_SSL,
EMAIL_FROM,
) = get_email_configuration()
@ -32,6 +33,7 @@ class Command(BaseCommand):
username=EMAIL_HOST_USER,
password=EMAIL_HOST_PASSWORD,
use_tls=EMAIL_USE_TLS == "1",
use_ssl=EMAIL_USE_SSL == "1",
timeout=30,
)
# Prepare email details

View File

@ -0,0 +1,41 @@
# Generated by Django 4.2.7 on 2024-03-19 08:28
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('db', '0061_project_logo_props'),
]
operations = [
migrations.AddField(
model_name="cycle",
name="archived_at",
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name="module",
name="archived_at",
field=models.DateTimeField(null=True),
),
migrations.AddField(
model_name="project",
name="archived_at",
field=models.DateTimeField(null=True),
),
migrations.AlterField(
model_name="socialloginconnection",
name="medium",
field=models.CharField(
choices=[
("Google", "google"),
("Github", "github"),
("Jira", "jira"),
],
default=None,
max_length=20,
),
),
]

View File

@ -69,6 +69,7 @@ class Cycle(ProjectBaseModel):
external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
progress_snapshot = models.JSONField(default=dict)
archived_at = models.DateTimeField(null=True)
class Meta:
verbose_name = "Cycle"

View File

@ -91,6 +91,7 @@ class IssueManager(models.Manager):
| models.Q(issue_inbox__isnull=True)
)
.exclude(archived_at__isnull=False)
.exclude(project__archived_at__isnull=False)
.exclude(is_draft=True)
)

View File

@ -92,6 +92,7 @@ class Module(ProjectBaseModel):
sort_order = models.FloatField(default=65535)
external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True)
archived_at = models.DateTimeField(null=True)
class Meta:
unique_together = ["name", "project"]

View File

@ -114,6 +114,7 @@ class Project(BaseModel):
null=True,
related_name="default_state",
)
archived_at = models.DateTimeField(null=True)
def __str__(self):
"""Return name of the project"""

View File

@ -1,16 +1,17 @@
# Python imports
import uuid
import string
import random
import string
import uuid
import pytz
from django.contrib.auth.models import (
AbstractBaseUser,
PermissionsMixin,
UserManager,
)
# Django imports
from django.db import models
from django.contrib.auth.models import (
AbstractBaseUser,
UserManager,
PermissionsMixin,
)
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.utils import timezone

View File

@ -64,6 +64,10 @@ def get_email_configuration():
"key": "EMAIL_USE_TLS",
"default": os.environ.get("EMAIL_USE_TLS", "1"),
},
{
"key": "EMAIL_USE_SSL",
"default": os.environ.get("EMAIL_USE_SSL", "0"),
},
{
"key": "EMAIL_FROM",
"default": os.environ.get(

View File

@ -3,19 +3,20 @@
# Python imports
import os
import ssl
import certifi
from datetime import timedelta
from urllib.parse import urlparse
# Django imports
from django.core.management.utils import get_random_secret_key
import certifi
# Third party imports
import dj_database_url
import sentry_sdk
# Django imports
from django.core.management.utils import get_random_secret_key
from sentry_sdk.integrations.celery import CeleryIntegration
from sentry_sdk.integrations.django import DjangoIntegration
from sentry_sdk.integrations.redis import RedisIntegration
from sentry_sdk.integrations.celery import CeleryIntegration
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
@ -23,7 +24,7 @@ BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SECRET_KEY = os.environ.get("SECRET_KEY", get_random_secret_key())
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = False
DEBUG = int(os.environ.get("DEBUG", "0"))
# Allowed Hosts
ALLOWED_HOSTS = ["*"]

View File

@ -7,8 +7,8 @@ from .common import * # noqa
DEBUG = True
# Debug Toolbar settings
INSTALLED_APPS += ("debug_toolbar",)
MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",)
INSTALLED_APPS += ("debug_toolbar",) # noqa
MIDDLEWARE += ("debug_toolbar.middleware.DebugToolbarMiddleware",) # noqa
DEBUG_TOOLBAR_PATCH_SETTINGS = False
@ -18,7 +18,7 @@ EMAIL_BACKEND = "django.core.mail.backends.console.EmailBackend"
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": REDIS_URL,
"LOCATION": REDIS_URL, # noqa
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
},
@ -28,7 +28,7 @@ CACHES = {
INTERNAL_IPS = ("127.0.0.1",)
MEDIA_URL = "/uploads/"
MEDIA_ROOT = os.path.join(BASE_DIR, "uploads")
MEDIA_ROOT = os.path.join(BASE_DIR, "uploads") # noqa
CORS_ALLOWED_ORIGINS = [
"http://localhost:3000",
@ -36,3 +36,38 @@ CORS_ALLOWED_ORIGINS = [
"http://localhost:4000",
"http://127.0.0.1:4000",
]
LOG_DIR = os.path.join(BASE_DIR, "logs") # noqa
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
"style": "{",
},
},
"handlers": {
"console": {
"level": "DEBUG",
"class": "logging.StreamHandler",
"formatter": "verbose",
},
},
"loggers": {
"django.request": {
"handlers": ["console"],
"level": "DEBUG",
"propagate": False,
},
"plane": {
"handlers": ["console"],
"level": "DEBUG",
"propagate": False,
},
},
}

View File

@ -1,15 +1,16 @@
"""Production settings"""
import os
from .common import * # noqa
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = int(os.environ.get("DEBUG", 0)) == 1
DEBUG = True
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
INSTALLED_APPS += ("scout_apm.django",)
INSTALLED_APPS += ("scout_apm.django",) # noqa
# Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
@ -18,3 +19,62 @@ SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SCOUT_MONITOR = os.environ.get("SCOUT_MONITOR", False)
SCOUT_KEY = os.environ.get("SCOUT_KEY", "")
SCOUT_NAME = "Plane"
LOG_DIR = os.path.join(BASE_DIR, "logs") # noqa
if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR)
LOGGING = {
"version": 1,
"disable_existing_loggers": False,
"formatters": {
"verbose": {
"format": "{levelname} {asctime} {module} {process:d} {thread:d} {message}",
"style": "{",
},
"json": {
"()": "pythonjsonlogger.jsonlogger.JsonFormatter",
"fmt": "%(levelname)s %(asctime)s %(module)s %(name)s %(message)s",
},
},
"handlers": {
"console": {
"class": "logging.StreamHandler",
"formatter": "verbose",
"level": "INFO",
},
"file": {
"class": "plane.utils.logging.SizedTimedRotatingFileHandler",
"filename": (
os.path.join(BASE_DIR, "logs", "plane-debug.log") # noqa
if DEBUG
else os.path.join(BASE_DIR, "logs", "plane-error.log") # noqa
),
"when": "s",
"maxBytes": 1024 * 1024 * 1,
"interval": 1,
"backupCount": 5,
"formatter": "json",
"level": "DEBUG" if DEBUG else "ERROR",
},
},
"loggers": {
"django": {
"handlers": ["console", "file"],
"level": "INFO",
"propagate": True,
},
"django.request": {
"handlers": ["console", "file"],
"level": "INFO",
"propagate": False,
},
"plane": {
"level": "DEBUG" if DEBUG else "ERROR",
"handlers": ["console", "file"],
"propagate": False,
},
},
}

View File

@ -7,6 +7,6 @@ DEBUG = True
# Send it in a dummy outbox
EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
INSTALLED_APPS.append(
INSTALLED_APPS.append( # noqa
"plane.tests",
)

View File

@ -1,25 +1,25 @@
# Python imports
import zoneinfo
from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import IntegrityError
# Django imports
from django.urls import resolve
from django.conf import settings
from django.utils import timezone
from django.db import IntegrityError
from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django_filters.rest_framework import DjangoFilterBackend
# Third part imports
from rest_framework import status
from rest_framework.viewsets import ModelViewSet
from rest_framework.response import Response
from rest_framework.exceptions import APIException
from rest_framework.views import APIView
from rest_framework.filters import SearchFilter
from rest_framework.permissions import IsAuthenticated
from sentry_sdk import capture_exception
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework.response import Response
from rest_framework.views import APIView
from rest_framework.viewsets import ModelViewSet
# Module imports
from plane.utils.exception_logger import log_exception
from plane.utils.paginator import BasePaginator
@ -57,7 +57,7 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
try:
return self.model.objects.all()
except Exception as e:
capture_exception(e)
log_exception(e)
raise APIException(
"Please check the view", status.HTTP_400_BAD_REQUEST
)
@ -90,14 +90,13 @@ class BaseViewSet(TimezoneMixin, ModelViewSet, BasePaginator):
)
if isinstance(e, KeyError):
capture_exception(e)
log_exception(e)
return Response(
{"error": "The required key does not exist."},
status=status.HTTP_400_BAD_REQUEST,
)
print(e) if settings.DEBUG else print("Server Error")
capture_exception(e)
log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,
@ -185,9 +184,7 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
status=status.HTTP_400_BAD_REQUEST,
)
if settings.DEBUG:
print(e)
capture_exception(e)
log_exception(e)
return Response(
{"error": "Something went wrong please try again later"},
status=status.HTTP_500_INTERNAL_SERVER_ERROR,

View File

@ -0,0 +1,15 @@
# Python imports
import logging
# Third party imports
from sentry_sdk import capture_exception
def log_exception(e):
# Log the error
logger = logging.getLogger("plane")
logger.error(e)
# Capture in sentry if configured
capture_exception(e)
return

View File

@ -0,0 +1,46 @@
import logging.handlers as handlers
import time
class SizedTimedRotatingFileHandler(handlers.TimedRotatingFileHandler):
"""
Handler for logging to a set of files, which switches from one file
to the next when the current file reaches a certain size, or at certain
timed intervals
"""
def __init__(
self,
filename,
maxBytes=0,
backupCount=0,
encoding=None,
delay=0,
when="h",
interval=1,
utc=False,
):
handlers.TimedRotatingFileHandler.__init__(
self, filename, when, interval, backupCount, encoding, delay, utc
)
self.maxBytes = maxBytes
def shouldRollover(self, record):
"""
Determine if rollover should occur.
Basically, see if the supplied record would cause the file to exceed
the size limit we have.
"""
if self.stream is None: # delay was set...
self.stream = self._open()
if self.maxBytes > 0: # are we rolling over?
msg = "%s\n" % self.format(record)
# due to non-posix-compliant Windows feature
self.stream.seek(0, 2)
if self.stream.tell() + len(msg) >= self.maxBytes:
return 1
t = int(time.time())
if t >= self.rolloverAt:
return 1
return 0

View File

@ -1,6 +1,6 @@
# base requirements
Django==4.2.10
Django==4.2.11
psycopg==3.1.12
djangorestframework==3.14.0
redis==4.6.0
@ -27,6 +27,7 @@ psycopg-binary==3.1.12
psycopg-c==3.1.12
scout-apm==2.26.1
openpyxl==3.1.2
python-json-logger==2.0.7
beautifulsoup4==4.12.2
dj-database-url==2.1.0
posthog==3.0.2

View File

@ -1,82 +0,0 @@
# 1-Click Self-Hosting
In this guide, we will walk you through the process of setting up a 1-click self-hosted environment. Self-hosting allows you to have full control over your applications and data. It's a great way to ensure privacy, control, and customization.
Let's get started!
## Installing Plane
Installing Plane is a very easy and minimal step process.
### Prerequisite
- Operating System (latest): Debian / Ubuntu / Centos
- Supported CPU Architechture: AMD64 / ARM64 / x86_64 / aarch64
### Downloading Latest Stable Release
```
curl -fsSL https://raw.githubusercontent.com/makeplane/plane/master/deploy/1-click/install.sh | sh -
```
<details>
<summary>Downloading Preview Release</summary>
```
export BRANCH=preview
curl -fsSL https://raw.githubusercontent.com/makeplane/plane/preview/deploy/1-click/install.sh | sh -
```
NOTE: `Preview` builds do not support ARM64/AARCH64 CPU architecture
</details>
--
Expect this after a successful install
![Install Output](images/install.png)
Access the application on a browser via http://server-ip-address
---
### Get Control of your Plane Server Setup
Plane App is available via the command `plane-app`. Running the command `plane-app --help` helps you to manage Plane
![Plane Help](images/help.png)
<ins>Basic Operations</ins>:
1. Start Server using `plane-app start`
1. Stop Server using `plane-app stop`
1. Restart Server using `plane-app restart`
<ins>Advanced Operations</ins>:
1. Configure Plane using `plane-app --configure`. This will give you options to modify
- NGINX Port (default 80)
- Domain Name (default is the local server public IP address)
- File Upload Size (default 5MB)
- External Postgres DB Url (optional - default empty)
- External Redis URL (optional - default empty)
- AWS S3 Bucket (optional - to be configured only in case the user wants to use an S3 Bucket)
1. Upgrade Plane using `plane-app --upgrade`. This will get the latest stable version of Plane files (docker-compose.yaml, .env, and docker images)
1. Updating Plane App installer using `plane-app --update-installer` will update the `plane-app` utility.
1. Uninstall Plane using `plane-app --uninstall`. This will uninstall the Plane application from the server and all docker containers but do not remove the data stored in Postgres, Redis, and Minio.
1. Plane App can be reinstalled using `plane-app --install`.
<ins>Application Data is stored in the mentioned folders</ins>:
1. DB Data: /opt/plane/data/postgres
1. Redis Data: /opt/plane/data/redis
1. Minio Data: /opt/plane/data/minio

Binary file not shown.

Before

Width:  |  Height:  |  Size: 109 KiB

Binary file not shown.

Before

Width:  |  Height:  |  Size: 173 KiB

View File

@ -1,20 +0,0 @@
#!/bin/bash
export GIT_REPO=makeplane/plane
# Check if the user has sudo access
if command -v curl &> /dev/null; then
sudo curl -sSL \
-o /usr/local/bin/plane-app \
https://raw.githubusercontent.com/$GIT_REPO/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
else
sudo wget -q \
-O /usr/local/bin/plane-app \
https://raw.githubusercontent.com/$GIT_REPO/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
fi
sudo chmod +x /usr/local/bin/plane-app
sudo sed -i 's@export DEPLOY_BRANCH=${BRANCH:-master}@export DEPLOY_BRANCH='${BRANCH:-master}'@' /usr/local/bin/plane-app
sudo sed -i 's@CODE_REPO=${GIT_REPO:-makeplane/plane}@CODE_REPO='$GIT_REPO'@' /usr/local/bin/plane-app
plane-app -i #--help

View File

@ -1,791 +0,0 @@
#!/bin/bash
function print_header() {
clear
cat <<"EOF"
---------------------------------------
____ _
| _ \| | __ _ _ __ ___
| |_) | |/ _` | '_ \ / _ \
| __/| | (_| | | | | __/
|_| |_|\__,_|_| |_|\___|
---------------------------------------
Project management tool from the future
---------------------------------------
EOF
}
function update_env_file() {
config_file=$1
key=$2
value=$3
# Check if the config file exists
if [ ! -f "$config_file" ]; then
echo "Config file not found. Creating a new one..." >&2
sudo touch "$config_file"
fi
# Check if the key already exists in the config file
if sudo grep "^$key=" "$config_file"; then
sudo awk -v key="$key" -v value="$value" -F '=' '{if ($1 == key) $2 = value} 1' OFS='=' "$config_file" | sudo tee "$config_file.tmp" > /dev/null
sudo mv "$config_file.tmp" "$config_file" &> /dev/null
else
# sudo echo "$key=$value" >> "$config_file"
echo -e "$key=$value" | sudo tee -a "$config_file" > /dev/null
fi
}
function read_env_file() {
config_file=$1
key=$2
# Check if the config file exists
if [ ! -f "$config_file" ]; then
echo "Config file not found. Creating a new one..." >&2
sudo touch "$config_file"
fi
# Check if the key already exists in the config file
if sudo grep -q "^$key=" "$config_file"; then
value=$(sudo awk -v key="$key" -F '=' '{if ($1 == key) print $2}' "$config_file")
echo "$value"
else
echo ""
fi
}
function update_config() {
config_file="$PLANE_INSTALL_DIR/config.env"
update_env_file $config_file $1 $2
}
function read_config() {
config_file="$PLANE_INSTALL_DIR/config.env"
read_env_file $config_file $1
}
function update_env() {
config_file="$PLANE_INSTALL_DIR/.env"
update_env_file $config_file $1 $2
}
function read_env() {
config_file="$PLANE_INSTALL_DIR/.env"
read_env_file $config_file $1
}
function show_message() {
print_header
if [ "$2" == "replace_last_line" ]; then
PROGRESS_MSG[-1]="$1"
else
PROGRESS_MSG+=("$1")
fi
for statement in "${PROGRESS_MSG[@]}"; do
echo "$statement"
done
}
function prepare_environment() {
show_message "Prepare Environment..." >&2
show_message "- Updating OS with required tools ✋" >&2
sudo "$PACKAGE_MANAGER" update -y
# sudo "$PACKAGE_MANAGER" upgrade -y
local required_tools=("curl" "awk" "wget" "nano" "dialog" "git" "uidmap" "jq")
for tool in "${required_tools[@]}"; do
if ! command -v $tool &> /dev/null; then
sudo "$PACKAGE_MANAGER" install -y $tool
fi
done
show_message "- OS Updated ✅" "replace_last_line" >&2
# Install Docker if not installed
if ! command -v docker &> /dev/null; then
show_message "- Installing Docker ✋" >&2
# curl -o- https://get.docker.com | bash -
if [ "$PACKAGE_MANAGER" == "yum" ]; then
sudo $PACKAGE_MANAGER install -y yum-utils
sudo yum-config-manager --add-repo https://download.docker.com/linux/centos/docker-ce.repo &> /dev/null
elif [ "$PACKAGE_MANAGER" == "apt-get" ]; then
# Add Docker's official GPG key:
sudo $PACKAGE_MANAGER update
sudo $PACKAGE_MANAGER install ca-certificates curl &> /dev/null
sudo install -m 0755 -d /etc/apt/keyrings &> /dev/null
sudo curl -fsSL https://download.docker.com/linux/ubuntu/gpg -o /etc/apt/keyrings/docker.asc &> /dev/null
sudo chmod a+r /etc/apt/keyrings/docker.asc &> /dev/null
# Add the repository to Apt sources:
echo \
"deb [arch=$(dpkg --print-architecture) signed-by=/etc/apt/keyrings/docker.asc] https://download.docker.com/linux/ubuntu \
$(. /etc/os-release && echo "$VERSION_CODENAME") stable" | \
sudo tee /etc/apt/sources.list.d/docker.list > /dev/null
sudo $PACKAGE_MANAGER update
fi
sudo $PACKAGE_MANAGER install docker-ce docker-ce-cli containerd.io docker-buildx-plugin docker-compose-plugin -y
show_message "- Docker Installed ✅" "replace_last_line" >&2
else
show_message "- Docker is already installed ✅" >&2
fi
update_config "PLANE_ARCH" "$CPU_ARCH"
update_config "DOCKER_VERSION" "$(docker -v | awk '{print $3}' | sed 's/,//g')"
update_config "PLANE_DATA_DIR" "$DATA_DIR"
update_config "PLANE_LOG_DIR" "$LOG_DIR"
# echo "TRUE"
echo "Environment prepared successfully ✅"
show_message "Environment prepared successfully ✅" >&2
show_message "" >&2
return 0
}
function download_plane() {
# Download Docker Compose File from github url
show_message "Downloading Plane Setup Files ✋" >&2
sudo curl -H 'Cache-Control: no-cache, no-store' \
-s -o $PLANE_INSTALL_DIR/docker-compose.yaml \
https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/selfhost/docker-compose.yml?token=$(date +%s)
sudo curl -H 'Cache-Control: no-cache, no-store' \
-s -o $PLANE_INSTALL_DIR/variables-upgrade.env \
https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/selfhost/variables.env?token=$(date +%s)
# if .env does not exists rename variables-upgrade.env to .env
if [ ! -f "$PLANE_INSTALL_DIR/.env" ]; then
sudo mv $PLANE_INSTALL_DIR/variables-upgrade.env $PLANE_INSTALL_DIR/.env
fi
show_message "Plane Setup Files Downloaded ✅" "replace_last_line" >&2
show_message "" >&2
echo "PLANE_DOWNLOADED"
return 0
}
function printUsageInstructions() {
show_message "" >&2
show_message "----------------------------------" >&2
show_message "Usage Instructions" >&2
show_message "----------------------------------" >&2
show_message "" >&2
show_message "To use the Plane Setup utility, use below commands" >&2
show_message "" >&2
show_message "Usage: plane-app [OPTION]" >&2
show_message "" >&2
show_message " start Start Server" >&2
show_message " stop Stop Server" >&2
show_message " restart Restart Server" >&2
show_message "" >&2
show_message "other options" >&2
show_message " -i, --install Install Plane" >&2
show_message " -c, --configure Configure Plane" >&2
show_message " -up, --upgrade Upgrade Plane" >&2
show_message " -un, --uninstall Uninstall Plane" >&2
show_message " -ui, --update-installer Update Plane Installer" >&2
show_message " -h, --help Show help" >&2
show_message "" >&2
show_message "" >&2
show_message "Application Data is stored in mentioned folders" >&2
show_message " - DB Data: $DATA_DIR/postgres" >&2
show_message " - Redis Data: $DATA_DIR/redis" >&2
show_message " - Minio Data: $DATA_DIR/minio" >&2
show_message "" >&2
show_message "" >&2
show_message "----------------------------------" >&2
show_message "" >&2
}
function build_local_image() {
show_message "- Downloading Plane Source Code ✋" >&2
REPO=https://github.com/$CODE_REPO.git
CURR_DIR=$PWD
PLANE_TEMP_CODE_DIR=$PLANE_INSTALL_DIR/temp
sudo rm -rf $PLANE_TEMP_CODE_DIR > /dev/null
sudo git clone $REPO $PLANE_TEMP_CODE_DIR --branch $DEPLOY_BRANCH --single-branch -q > /dev/null
sudo cp $PLANE_TEMP_CODE_DIR/deploy/selfhost/build.yml $PLANE_TEMP_CODE_DIR/build.yml
show_message "- Plane Source Code Downloaded ✅" "replace_last_line" >&2
show_message "- Building Docker Images ✋" >&2
sudo docker compose --env-file=$PLANE_INSTALL_DIR/.env -f $PLANE_TEMP_CODE_DIR/build.yml build --no-cache
}
function check_for_docker_images() {
show_message "" >&2
# show_message "Building Plane Images" >&2
CURR_DIR=$(pwd)
if [ "$DEPLOY_BRANCH" == "master" ]; then
update_env "APP_RELEASE" "latest"
export APP_RELEASE=latest
else
update_env "APP_RELEASE" "$DEPLOY_BRANCH"
export APP_RELEASE=$DEPLOY_BRANCH
fi
if [ $USE_GLOBAL_IMAGES == 1 ]; then
# show_message "Building Plane Images for $CPU_ARCH is not required. Skipping... ✅" "replace_last_line" >&2
export DOCKERHUB_USER=makeplane
update_env "DOCKERHUB_USER" "$DOCKERHUB_USER"
update_env "PULL_POLICY" "always"
echo "Building Plane Images for $CPU_ARCH is not required. Skipping..."
else
export DOCKERHUB_USER=myplane
show_message "Building Plane Images for $CPU_ARCH " >&2
update_env "DOCKERHUB_USER" "$DOCKERHUB_USER"
update_env "PULL_POLICY" "never"
build_local_image
sudo rm -rf $PLANE_INSTALL_DIR/temp > /dev/null
show_message "- Docker Images Built ✅" "replace_last_line" >&2
sudo cd $CURR_DIR
fi
sudo sed -i "s|- pgdata:|- $DATA_DIR/postgres:|g" $PLANE_INSTALL_DIR/docker-compose.yaml
sudo sed -i "s|- redisdata:|- $DATA_DIR/redis:|g" $PLANE_INSTALL_DIR/docker-compose.yaml
sudo sed -i "s|- uploads:|- $DATA_DIR/minio:|g" $PLANE_INSTALL_DIR/docker-compose.yaml
show_message "Downloading Plane Images for $CPU_ARCH ✋" >&2
sudo docker compose -f $PLANE_INSTALL_DIR/docker-compose.yaml --env-file=$PLANE_INSTALL_DIR/.env pull
show_message "Plane Images Downloaded ✅" "replace_last_line" >&2
}
function configure_plane() {
show_message "" >&2
show_message "Configuring Plane" >&2
show_message "" >&2
exec 3>&1
nginx_port=$(read_env "NGINX_PORT")
domain_name=$(read_env "DOMAIN_NAME")
upload_limit=$(read_env "FILE_SIZE_LIMIT")
NGINX_SETTINGS=$(dialog \
--ok-label "Next" \
--cancel-label "Skip" \
--backtitle "Plane Configuration" \
--title "Nginx Settings" \
--form "" \
0 0 0 \
"Port:" 1 1 "${nginx_port:-80}" 1 10 50 0 \
"Domain:" 2 1 "${domain_name:-localhost}" 2 10 50 0 \
"Upload Limit:" 3 1 "${upload_limit:-5242880}" 3 10 15 0 \
2>&1 1>&3)
save_nginx_settings=0
if [ $? -eq 0 ]; then
save_nginx_settings=1
nginx_port=$(echo "$NGINX_SETTINGS" | sed -n 1p)
domain_name=$(echo "$NGINX_SETTINGS" | sed -n 2p)
upload_limit=$(echo "$NGINX_SETTINGS" | sed -n 3p)
fi
# smtp_host=$(read_env "EMAIL_HOST")
# smtp_user=$(read_env "EMAIL_HOST_USER")
# smtp_password=$(read_env "EMAIL_HOST_PASSWORD")
# smtp_port=$(read_env "EMAIL_PORT")
# smtp_from=$(read_env "EMAIL_FROM")
# smtp_tls=$(read_env "EMAIL_USE_TLS")
# smtp_ssl=$(read_env "EMAIL_USE_SSL")
# SMTP_SETTINGS=$(dialog \
# --ok-label "Next" \
# --cancel-label "Skip" \
# --backtitle "Plane Configuration" \
# --title "SMTP Settings" \
# --form "" \
# 0 0 0 \
# "Host:" 1 1 "$smtp_host" 1 10 80 0 \
# "User:" 2 1 "$smtp_user" 2 10 80 0 \
# "Password:" 3 1 "$smtp_password" 3 10 80 0 \
# "Port:" 4 1 "${smtp_port:-587}" 4 10 5 0 \
# "From:" 5 1 "${smtp_from:-Mailer <mailer@example.com>}" 5 10 80 0 \
# "TLS:" 6 1 "${smtp_tls:-1}" 6 10 1 1 \
# "SSL:" 7 1 "${smtp_ssl:-0}" 7 10 1 1 \
# 2>&1 1>&3)
# save_smtp_settings=0
# if [ $? -eq 0 ]; then
# save_smtp_settings=1
# smtp_host=$(echo "$SMTP_SETTINGS" | sed -n 1p)
# smtp_user=$(echo "$SMTP_SETTINGS" | sed -n 2p)
# smtp_password=$(echo "$SMTP_SETTINGS" | sed -n 3p)
# smtp_port=$(echo "$SMTP_SETTINGS" | sed -n 4p)
# smtp_from=$(echo "$SMTP_SETTINGS" | sed -n 5p)
# smtp_tls=$(echo "$SMTP_SETTINGS" | sed -n 6p)
# fi
external_pgdb_url=$(dialog \
--backtitle "Plane Configuration" \
--title "Using External Postgres Database ?" \
--ok-label "Next" \
--cancel-label "Skip" \
--inputbox "Enter your external database url" \
8 60 3>&1 1>&2 2>&3)
external_redis_url=$(dialog \
--backtitle "Plane Configuration" \
--title "Using External Redis Database ?" \
--ok-label "Next" \
--cancel-label "Skip" \
--inputbox "Enter your external redis url" \
8 60 3>&1 1>&2 2>&3)
aws_region=$(read_env "AWS_REGION")
aws_access_key=$(read_env "AWS_ACCESS_KEY_ID")
aws_secret_key=$(read_env "AWS_SECRET_ACCESS_KEY")
aws_bucket=$(read_env "AWS_S3_BUCKET_NAME")
AWS_S3_SETTINGS=$(dialog \
--ok-label "Next" \
--cancel-label "Skip" \
--backtitle "Plane Configuration" \
--title "AWS S3 Bucket Configuration" \
--form "" \
0 0 0 \
"Region:" 1 1 "$aws_region" 1 10 50 0 \
"Access Key:" 2 1 "$aws_access_key" 2 10 50 0 \
"Secret Key:" 3 1 "$aws_secret_key" 3 10 50 0 \
"Bucket:" 4 1 "$aws_bucket" 4 10 50 0 \
2>&1 1>&3)
save_aws_settings=0
if [ $? -eq 0 ]; then
save_aws_settings=1
aws_region=$(echo "$AWS_S3_SETTINGS" | sed -n 1p)
aws_access_key=$(echo "$AWS_S3_SETTINGS" | sed -n 2p)
aws_secret_key=$(echo "$AWS_S3_SETTINGS" | sed -n 3p)
aws_bucket=$(echo "$AWS_S3_SETTINGS" | sed -n 4p)
fi
# display dialogbox asking for confirmation to continue
CONFIRM_CONFIG=$(dialog \
--title "Confirm Configuration" \
--backtitle "Plane Configuration" \
--yes-label "Confirm" \
--no-label "Cancel" \
--yesno \
"
save_ngnix_settings: $save_nginx_settings
nginx_port: $nginx_port
domain_name: $domain_name
upload_limit: $upload_limit
save_aws_settings: $save_aws_settings
aws_region: $aws_region
aws_access_key: $aws_access_key
aws_secret_key: $aws_secret_key
aws_bucket: $aws_bucket
pdgb_url: $external_pgdb_url
redis_url: $external_redis_url
" \
0 0 3>&1 1>&2 2>&3)
if [ $? -eq 0 ]; then
if [ $save_nginx_settings == 1 ]; then
update_env "NGINX_PORT" "$nginx_port"
update_env "DOMAIN_NAME" "$domain_name"
update_env "WEB_URL" "http://$domain_name"
update_env "CORS_ALLOWED_ORIGINS" "http://$domain_name"
update_env "FILE_SIZE_LIMIT" "$upload_limit"
fi
# check enable smpt settings value
# if [ $save_smtp_settings == 1 ]; then
# update_env "EMAIL_HOST" "$smtp_host"
# update_env "EMAIL_HOST_USER" "$smtp_user"
# update_env "EMAIL_HOST_PASSWORD" "$smtp_password"
# update_env "EMAIL_PORT" "$smtp_port"
# update_env "EMAIL_FROM" "$smtp_from"
# update_env "EMAIL_USE_TLS" "$smtp_tls"
# update_env "EMAIL_USE_SSL" "$smtp_ssl"
# fi
# check enable aws settings value
if [[ $save_aws_settings == 1 && $aws_access_key != "" && $aws_secret_key != "" ]] ; then
update_env "USE_MINIO" "0"
update_env "AWS_REGION" "$aws_region"
update_env "AWS_ACCESS_KEY_ID" "$aws_access_key"
update_env "AWS_SECRET_ACCESS_KEY" "$aws_secret_key"
update_env "AWS_S3_BUCKET_NAME" "$aws_bucket"
elif [[ -z $aws_access_key || -z $aws_secret_key ]] ; then
update_env "USE_MINIO" "1"
update_env "AWS_REGION" ""
update_env "AWS_ACCESS_KEY_ID" ""
update_env "AWS_SECRET_ACCESS_KEY" ""
update_env "AWS_S3_BUCKET_NAME" "uploads"
fi
if [ "$external_pgdb_url" != "" ]; then
update_env "DATABASE_URL" "$external_pgdb_url"
fi
if [ "$external_redis_url" != "" ]; then
update_env "REDIS_URL" "$external_redis_url"
fi
fi
exec 3>&-
}
function upgrade_configuration() {
upg_env_file="$PLANE_INSTALL_DIR/variables-upgrade.env"
# Check if the file exists
if [ -f "$upg_env_file" ]; then
# Read each line from the file
while IFS= read -r line; do
# Skip comments and empty lines
if [[ "$line" =~ ^\s*#.*$ ]] || [[ -z "$line" ]]; then
continue
fi
# Split the line into key and value
key=$(echo "$line" | cut -d'=' -f1)
value=$(echo "$line" | cut -d'=' -f2-)
current_value=$(read_env "$key")
if [ -z "$current_value" ]; then
update_env "$key" "$value"
fi
done < "$upg_env_file"
fi
}
function install() {
show_message ""
if [ "$(uname)" == "Linux" ]; then
OS="linux"
OS_NAME=$(sudo awk -F= '/^ID=/{print $2}' /etc/os-release)
OS_NAME=$(echo "$OS_NAME" | tr -d '"')
print_header
if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] ||
[ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then
OS_SUPPORTED=true
show_message "******** Installing Plane ********"
show_message ""
prepare_environment
if [ $? -eq 0 ]; then
download_plane
if [ $? -eq 0 ]; then
# create_service
check_for_docker_images
last_installed_on=$(read_config "INSTALLATION_DATE")
# if [ "$last_installed_on" == "" ]; then
# configure_plane
# fi
update_env "NGINX_PORT" "80"
update_env "DOMAIN_NAME" "$MY_IP"
update_env "WEB_URL" "http://$MY_IP"
update_env "CORS_ALLOWED_ORIGINS" "http://$MY_IP"
update_config "INSTALLATION_DATE" "$(date '+%Y-%m-%d')"
show_message "Plane Installed Successfully ✅"
show_message ""
else
show_message "Download Failed ❌"
exit 1
fi
else
show_message "Initialization Failed ❌"
exit 1
fi
else
OS_SUPPORTED=false
PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌"
show_message ""
exit 1
fi
else
PROGRESS_MSG="❌❌❌ Unsupported OS Detected : $(uname) ❌❌❌"
show_message ""
exit 1
fi
}
function upgrade() {
print_header
if [ "$(uname)" == "Linux" ]; then
OS="linux"
OS_NAME=$(sudo awk -F= '/^ID=/{print $2}' /etc/os-release)
OS_NAME=$(echo "$OS_NAME" | tr -d '"')
if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] ||
[ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then
OS_SUPPORTED=true
show_message "******** Upgrading Plane ********"
show_message ""
prepare_environment
if [ $? -eq 0 ]; then
stop_server
download_plane
if [ $? -eq 0 ]; then
check_for_docker_images
upgrade_configuration
update_config "UPGRADE_DATE" "$(date)"
start_server
show_message ""
show_message "Plane Upgraded Successfully ✅"
show_message ""
printUsageInstructions
else
show_message "Download Failed ❌"
exit 1
fi
else
show_message "Initialization Failed ❌"
exit 1
fi
else
PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌"
show_message ""
exit 1
fi
else
PROGRESS_MSG="❌❌❌ Unsupported OS Detected : $(uname) ❌❌❌"
show_message ""
exit 1
fi
}
function uninstall() {
print_header
if [ "$(uname)" == "Linux" ]; then
OS="linux"
OS_NAME=$(awk -F= '/^ID=/{print $2}' /etc/os-release)
OS_NAME=$(echo "$OS_NAME" | tr -d '"')
if [ "$OS_NAME" == "ubuntu" ] || [ "$OS_NAME" == "debian" ] ||
[ "$OS_NAME" == "centos" ] || [ "$OS_NAME" == "amazon" ]; then
OS_SUPPORTED=true
show_message "******** Uninstalling Plane ********"
show_message ""
stop_server
if ! [ -x "$(command -v docker)" ]; then
echo "DOCKER_NOT_INSTALLED" &> /dev/null
else
# Ask of user input to confirm uninstall docker ?
CONFIRM_DOCKER_PURGE=$(dialog --title "Uninstall Docker" --defaultno --yesno "Are you sure you want to uninstall docker ?" 8 60 3>&1 1>&2 2>&3)
if [ $? -eq 0 ]; then
show_message "- Uninstalling Docker ✋"
sudo docker images -q | xargs -r sudo docker rmi -f &> /dev/null
sudo "$PACKAGE_MANAGER" remove -y docker-engine docker docker.io docker-ce docker-ce-cli docker-compose-plugin &> /dev/null
sudo "$PACKAGE_MANAGER" autoremove -y docker-engine docker docker.io docker-ce docker-compose-plugin &> /dev/null
show_message "- Docker Uninstalled ✅" "replace_last_line" >&2
fi
fi
sudo rm $PLANE_INSTALL_DIR/.env &> /dev/null
sudo rm $PLANE_INSTALL_DIR/variables-upgrade.env &> /dev/null
sudo rm $PLANE_INSTALL_DIR/config.env &> /dev/null
sudo rm $PLANE_INSTALL_DIR/docker-compose.yaml &> /dev/null
# rm -rf $PLANE_INSTALL_DIR &> /dev/null
show_message "- Configuration Cleaned ✅"
show_message ""
show_message "******** Plane Uninstalled ********"
show_message ""
show_message ""
show_message "Plane Configuration Cleaned with some exceptions"
show_message "- DB Data: $DATA_DIR/postgres"
show_message "- Redis Data: $DATA_DIR/redis"
show_message "- Minio Data: $DATA_DIR/minio"
show_message ""
show_message ""
show_message "Thank you for using Plane. We hope to see you again soon."
show_message ""
show_message ""
else
PROGRESS_MSG="❌❌ Unsupported OS Varient Detected : $OS_NAME ❌❌"
show_message ""
exit 1
fi
else
PROGRESS_MSG="❌❌❌ Unsupported OS Detected : $(uname) ❌❌❌"
show_message ""
exit 1
fi
}
function start_server() {
docker_compose_file="$PLANE_INSTALL_DIR/docker-compose.yaml"
env_file="$PLANE_INSTALL_DIR/.env"
# check if both the files exits
if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then
show_message "Starting Plane Server ($APP_RELEASE) ✋"
sudo docker compose -f $docker_compose_file --env-file=$env_file up -d
# Wait for containers to be running
echo "Waiting for containers to start..."
while ! sudo docker compose -f "$docker_compose_file" --env-file="$env_file" ps --services --filter "status=running" --quiet | grep -q "."; do
sleep 1
done
# wait for migrator container to exit with status 0 before starting the application
migrator_container_id=$(sudo docker container ls -aq -f "name=plane-migrator")
# if migrator container is running, wait for it to exit
if [ -n "$migrator_container_id" ]; then
while sudo docker inspect --format='{{.State.Status}}' $migrator_container_id | grep -q "running"; do
show_message "Waiting for Plane Server ($APP_RELEASE) to start...✋ (Migrator in progress)" "replace_last_line" >&2
sleep 1
done
fi
# if migrator exit status is not 0, show error message and exit
if [ -n "$migrator_container_id" ]; then
migrator_exit_code=$(sudo docker inspect --format='{{.State.ExitCode}}' $migrator_container_id)
if [ $migrator_exit_code -ne 0 ]; then
# show_message "Migrator failed with exit code $migrator_exit_code ❌" "replace_last_line" >&2
show_message "Plane Server failed to start ❌" "replace_last_line" >&2
stop_server
exit 1
fi
fi
api_container_id=$(sudo docker container ls -q -f "name=plane-api")
while ! sudo docker logs $api_container_id 2>&1 | grep -i "Application startup complete";
do
show_message "Waiting for Plane Server ($APP_RELEASE) to start...✋ (API starting)" "replace_last_line" >&2
sleep 1
done
show_message "Plane Server Started ($APP_RELEASE) ✅" "replace_last_line" >&2
show_message "---------------------------------------------------------------" >&2
show_message "Access the Plane application at http://$MY_IP" >&2
show_message "---------------------------------------------------------------" >&2
else
show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2
fi
}
function stop_server() {
docker_compose_file="$PLANE_INSTALL_DIR/docker-compose.yaml"
env_file="$PLANE_INSTALL_DIR/.env"
# check if both the files exits
if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then
show_message "Stopping Plane Server ($APP_RELEASE) ✋"
sudo docker compose -f $docker_compose_file --env-file=$env_file down
show_message "Plane Server Stopped ($APP_RELEASE) ✅" "replace_last_line" >&2
else
show_message "Plane Server not installed [Skipping] ✅" "replace_last_line" >&2
fi
}
function restart_server() {
docker_compose_file="$PLANE_INSTALL_DIR/docker-compose.yaml"
env_file="$PLANE_INSTALL_DIR/.env"
# check if both the files exits
if [ -f "$docker_compose_file" ] && [ -f "$env_file" ]; then
show_message "Restarting Plane Server ($APP_RELEASE) ✋"
sudo docker compose -f $docker_compose_file --env-file=$env_file restart
show_message "Plane Server Restarted ($APP_RELEASE) ✅" "replace_last_line" >&2
else
show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2
fi
}
function show_help() {
# print_header
show_message "Usage: plane-app [OPTION]" >&2
show_message "" >&2
show_message " start Start Server" >&2
show_message " stop Stop Server" >&2
show_message " restart Restart Server" >&2
show_message "" >&2
show_message "other options" >&2
show_message " -i, --install Install Plane" >&2
show_message " -c, --configure Configure Plane" >&2
show_message " -up, --upgrade Upgrade Plane" >&2
show_message " -un, --uninstall Uninstall Plane" >&2
show_message " -ui, --update-installer Update Plane Installer" >&2
show_message " -h, --help Show help" >&2
show_message "" >&2
exit 1
}
function update_installer() {
show_message "Updating Plane Installer ✋" >&2
sudo curl -H 'Cache-Control: no-cache, no-store' \
-s -o /usr/local/bin/plane-app \
https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/1-click/plane-app?token=$(date +%s)
sudo chmod +x /usr/local/bin/plane-app > /dev/null&> /dev/null
show_message "Plane Installer Updated ✅" "replace_last_line" >&2
}
export DEPLOY_BRANCH=${BRANCH:-master}
export APP_RELEASE=$DEPLOY_BRANCH
export DOCKERHUB_USER=makeplane
export PULL_POLICY=always
if [ "$DEPLOY_BRANCH" == "master" ]; then
export APP_RELEASE=latest
fi
PLANE_INSTALL_DIR=/opt/plane
DATA_DIR=$PLANE_INSTALL_DIR/data
LOG_DIR=$PLANE_INSTALL_DIR/logs
CODE_REPO=${GIT_REPO:-makeplane/plane}
OS_SUPPORTED=false
CPU_ARCH=$(uname -m)
PROGRESS_MSG=""
USE_GLOBAL_IMAGES=0
PACKAGE_MANAGER=""
MY_IP=$(curl -s ifconfig.me)
if [[ $CPU_ARCH == "amd64" || $CPU_ARCH == "x86_64" || ( $DEPLOY_BRANCH == "master" && ( $CPU_ARCH == "arm64" || $CPU_ARCH == "aarch64" ) ) ]]; then
USE_GLOBAL_IMAGES=1
fi
sudo mkdir -p $PLANE_INSTALL_DIR/{data,log}
if command -v apt-get &> /dev/null; then
PACKAGE_MANAGER="apt-get"
elif command -v yum &> /dev/null; then
PACKAGE_MANAGER="yum"
elif command -v apk &> /dev/null; then
PACKAGE_MANAGER="apk"
fi
if [ "$1" == "start" ]; then
start_server
elif [ "$1" == "stop" ]; then
stop_server
elif [ "$1" == "restart" ]; then
restart_server
elif [ "$1" == "--install" ] || [ "$1" == "-i" ]; then
install
start_server
show_message "" >&2
show_message "To view help, use plane-app --help " >&2
elif [ "$1" == "--configure" ] || [ "$1" == "-c" ]; then
configure_plane
printUsageInstructions
elif [ "$1" == "--upgrade" ] || [ "$1" == "-up" ]; then
upgrade
elif [ "$1" == "--uninstall" ] || [ "$1" == "-un" ]; then
uninstall
elif [ "$1" == "--update-installer" ] || [ "$1" == "-ui" ]; then
update_installer
elif [ "$1" == "--help" ] || [ "$1" == "-h" ]; then
show_help
else
show_help
fi

View File

@ -70,6 +70,8 @@ services:
command: ./bin/takeoff
deploy:
replicas: ${API_REPLICAS:-1}
volumes:
- logs_api:/code/plane/logs
depends_on:
- plane-db
- plane-redis
@ -80,6 +82,8 @@ services:
pull_policy: ${PULL_POLICY:-always}
restart: unless-stopped
command: ./bin/worker
volumes:
- logs_worker:/code/plane/logs
depends_on:
- api
- plane-db
@ -91,6 +95,8 @@ services:
pull_policy: ${PULL_POLICY:-always}
restart: unless-stopped
command: ./bin/beat
volumes:
- logs_beat-worker:/code/plane/logs
depends_on:
- api
- plane-db
@ -104,6 +110,8 @@ services:
command: >
sh -c "python manage.py wait_for_db &&
python manage.py migrate"
volumes:
- logs_migrator:/code/plane/logs
depends_on:
- plane-db
- plane-redis
@ -149,3 +157,7 @@ volumes:
pgdata:
redisdata:
uploads:
logs_api:
logs_worker:
logs_beat-worker:
logs_migrator:

View File

@ -4,18 +4,18 @@ import { findTableAncestor } from "src/lib/utils";
import { UploadImage } from "src/types/upload-image";
export const toggleHeadingOne = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 1 }).run();
else editor.chain().focus().toggleHeading({ level: 1 }).run();
if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 1 }).run();
else editor.chain().focus().clearNodes().toggleHeading({ level: 1 }).run();
};
export const toggleHeadingTwo = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 2 }).run();
else editor.chain().focus().toggleHeading({ level: 2 }).run();
if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 2 }).run();
else editor.chain().focus().clearNodes().toggleHeading({ level: 2 }).run();
};
export const toggleHeadingThree = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 3 }).run();
else editor.chain().focus().toggleHeading({ level: 3 }).run();
if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 3 }).run();
else editor.chain().focus().clearNodes().toggleHeading({ level: 3 }).run();
};
export const toggleBold = (editor: Editor, range?: Range) => {
@ -37,10 +37,10 @@ export const toggleCodeBlock = (editor: Editor, range?: Range) => {
// Check if code block is active then toggle code block
if (editor.isActive("codeBlock")) {
if (range) {
editor.chain().focus().deleteRange(range).toggleCodeBlock().run();
editor.chain().focus().deleteRange(range).clearNodes().toggleCodeBlock().run();
return;
}
editor.chain().focus().toggleCodeBlock().run();
editor.chain().focus().clearNodes().toggleCodeBlock().run();
return;
}
@ -49,32 +49,32 @@ export const toggleCodeBlock = (editor: Editor, range?: Range) => {
if (isSelectionEmpty) {
if (range) {
editor.chain().focus().deleteRange(range).toggleCodeBlock().run();
editor.chain().focus().deleteRange(range).clearNodes().toggleCodeBlock().run();
return;
}
editor.chain().focus().toggleCodeBlock().run();
editor.chain().focus().clearNodes().toggleCodeBlock().run();
} else {
if (range) {
editor.chain().focus().deleteRange(range).toggleCode().run();
editor.chain().focus().deleteRange(range).clearNodes().toggleCode().run();
return;
}
editor.chain().focus().toggleCode().run();
editor.chain().focus().clearNodes().toggleCode().run();
}
};
export const toggleOrderedList = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).toggleOrderedList().run();
else editor.chain().focus().toggleOrderedList().run();
if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleOrderedList().run();
else editor.chain().focus().clearNodes().toggleOrderedList().run();
};
export const toggleBulletList = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).toggleBulletList().run();
else editor.chain().focus().toggleBulletList().run();
if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleBulletList().run();
else editor.chain().focus().clearNodes().toggleBulletList().run();
};
export const toggleTaskList = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).toggleTaskList().run();
else editor.chain().focus().toggleTaskList().run();
if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleTaskList().run();
else editor.chain().focus().clearNodes().toggleTaskList().run();
};
export const toggleStrike = (editor: Editor, range?: Range) => {
@ -83,8 +83,8 @@ export const toggleStrike = (editor: Editor, range?: Range) => {
};
export const toggleBlockquote = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).toggleBlockquote().run();
else editor.chain().focus().toggleBlockquote().run();
if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleBlockquote().run();
else editor.chain().focus().clearNodes().toggleBlockquote().run();
};
export const insertTableCommand = (editor: Editor, range?: Range) => {
@ -97,8 +97,8 @@ export const insertTableCommand = (editor: Editor, range?: Range) => {
}
}
}
if (range) editor.chain().focus().deleteRange(range).insertTable({ rows: 3, cols: 3 }).run();
else editor.chain().focus().insertTable({ rows: 3, cols: 3 }).run();
if (range) editor.chain().focus().deleteRange(range).clearNodes().insertTable({ rows: 3, cols: 3 }).run();
else editor.chain().focus().clearNodes().insertTable({ rows: 3, cols: 3 }).run();
};
export const unsetLinkEditor = (editor: Editor) => {

View File

@ -7,6 +7,7 @@ import { AlertLabel } from "src/ui/components/alert-label";
import { IVerticalDropdownItemProps, VerticalDropdownMenu } from "src/ui/components/vertical-dropdown-menu";
import { SummaryPopover } from "src/ui/components/summary-popover";
import { InfoPopover } from "src/ui/components/info-popover";
import { getDate } from "src/utils/date-utils";
interface IEditorHeader {
editor: Editor;
@ -72,7 +73,7 @@ export const EditorHeader = (props: IEditorHeader) => {
Icon={Archive}
backgroundColor="bg-blue-500/20"
textColor="text-blue-500"
label={`Archived at ${new Date(archivedAt).toLocaleString()}`}
label={`Archived at ${getDate(archivedAt)?.toLocaleString()}`}
/>
)}

View File

@ -3,13 +3,15 @@ import { usePopper } from "react-popper";
import { Calendar, History, Info } from "lucide-react";
// types
import { DocumentDetails } from "src/types/editor-types";
//utils
import { getDate } from "src/utils/date-utils";
type Props = {
documentDetails: DocumentDetails;
};
// function to render a Date in the format- 25 May 2023 at 2:53PM
const renderDate = (date: Date): string => {
const renderDate = (date: Date | undefined): string => {
const options: Intl.DateTimeFormatOptions = {
day: "numeric",
month: "long",
@ -52,14 +54,14 @@ export const InfoPopover: React.FC<Props> = (props) => {
<h6 className="text-xs text-custom-text-400">Last updated on</h6>
<h5 className="flex items-center gap-1 text-sm">
<History className="h-3 w-3" />
{renderDate(new Date(documentDetails.last_updated_at))}
{renderDate(getDate(documentDetails?.last_updated_at))}
</h5>
</div>
<div className="space-y-1.5">
<h6 className="text-xs text-custom-text-400">Created on</h6>
<h5 className="flex items-center gap-1 text-sm">
<Calendar className="h-3 w-3" />
{renderDate(new Date(documentDetails.created_on))}
{renderDate(getDate(documentDetails?.created_on))}
</h5>
</div>
</div>

View File

@ -0,0 +1,26 @@
function isNumber(value: any) {
return typeof value === "number";
}
/**
* This method returns a date from string of type yyyy-mm-dd
* This method is recommended to use instead of new Date() as this does not introduce any timezone offsets
* @param date
* @returns date or undefined
*/
export const getDate = (date: string | Date | undefined | null): Date | undefined => {
try {
if (!date || date === "") return;
if (typeof date !== "string" && !(date instanceof String)) return date;
const [yearString, monthString, dayString] = date.substring(0, 10).split("-");
const year = parseInt(yearString);
const month = parseInt(monthString);
const day = parseInt(dayString);
if (!isNumber(year) || !isNumber(month) || !isNumber(day)) return;
return new Date(year, month - 1, day);
} catch (e) {
return undefined;
}
};

View File

@ -85,7 +85,10 @@ const getSuggestionItems =
searchTerms: ["p", "paragraph"],
icon: <CaseSensitive className="h-3.5 w-3.5" />,
command: ({ editor, range }: CommandProps) => {
editor.chain().focus().deleteRange(range).toggleNode("paragraph", "paragraph").run();
if (range) {
editor.chain().focus().deleteRange(range).clearNodes().run();
}
editor.chain().focus().clearNodes().run();
},
},
{

View File

@ -25,16 +25,20 @@ type EditorBubbleMenuProps = Omit<BubbleMenuProps, "children">;
export const EditorBubbleMenu: FC<EditorBubbleMenuProps> = (props: any) => {
const items: BubbleMenuItem[] = [
BoldItem(props.editor),
ItalicItem(props.editor),
UnderLineItem(props.editor),
StrikeThroughItem(props.editor),
...(props.editor.isActive("code")
? []
: [
BoldItem(props.editor),
ItalicItem(props.editor),
UnderLineItem(props.editor),
StrikeThroughItem(props.editor),
]),
CodeItem(props.editor),
];
const bubbleMenuProps: EditorBubbleMenuProps = {
...props,
shouldShow: ({ view, state, editor }) => {
shouldShow: ({ state, editor }) => {
const { selection } = state;
const { empty } = selection;
@ -64,6 +68,7 @@ export const EditorBubbleMenu: FC<EditorBubbleMenuProps> = (props: any) => {
const [isLinkSelectorOpen, setIsLinkSelectorOpen] = useState(false);
const [isSelecting, setIsSelecting] = useState(false);
useEffect(() => {
function handleMouseDown() {
function handleMouseMove() {
@ -108,14 +113,16 @@ export const EditorBubbleMenu: FC<EditorBubbleMenuProps> = (props: any) => {
}}
/>
)}
<LinkSelector
editor={props.editor!!}
isOpen={isLinkSelectorOpen}
setIsOpen={() => {
setIsLinkSelectorOpen(!isLinkSelectorOpen);
setIsNodeSelectorOpen(false);
}}
/>
{!props.editor.isActive("code") && (
<LinkSelector
editor={props.editor}
isOpen={isLinkSelectorOpen}
setIsOpen={() => {
setIsLinkSelectorOpen(!isLinkSelectorOpen);
setIsNodeSelectorOpen(false);
}}
/>
)}
<div className="flex">
{items.map((item) => (
<button

View File

@ -84,8 +84,8 @@ export const LinkSelector: FC<LinkSelectorProps> = ({ editor, isOpen, setIsOpen
className="flex items-center rounded-sm p-1 text-custom-text-300 transition-all hover:bg-custom-background-90"
type="button"
onClick={(e) => {
e.stopPropagation();
onLinkSubmit();
e.stopPropagation();
}}
>
<Check className="h-4 w-4" />

View File

@ -26,7 +26,7 @@ export const NodeSelector: FC<NodeSelectorProps> = ({ editor, isOpen, setIsOpen
{
name: "Text",
icon: TextIcon,
command: () => editor.chain().focus().toggleNode("paragraph", "paragraph").run(),
command: () => editor.chain().focus().clearNodes().run(),
isActive: () => editor.isActive("paragraph") && !editor.isActive("bulletList") && !editor.isActive("orderedList"),
},
HeadingOneItem(editor),

View File

@ -31,6 +31,7 @@ export interface ICycle {
unstarted_issues: number;
updated_at: Date;
updated_by: string;
archived_at: string | null;
assignee_ids: string[];
view_props: {
filters: IIssueFilterOptions;

View File

@ -13,6 +13,11 @@ export type TCycleFilters = {
status?: string[] | null;
};
export type TCycleFiltersByState = {
default: TCycleFilters;
archived: TCycleFilters;
};
export type TCycleStoredFilters = {
display_filters?: TCycleDisplayFilters;
filters?: TCycleFilters;

View File

@ -1,7 +1,7 @@
export * from "./github-importer";
export * from "./jira-importer";
import { IProjectLite } from "../projects";
import { IProjectLite } from "../project";
// types
import { IUserLite } from "../users";

View File

@ -1,5 +1,5 @@
import { TIssue } from "../issues/base";
import type { IProjectLite } from "../projects";
import type { IProjectLite } from "../project";
export type TInboxIssueExtended = {
completed_at: string | null;

Some files were not shown because too many files have changed in this diff Show More