mirror of
https://github.com/makeplane/plane
synced 2024-06-14 14:31:34 +00:00
Merge branch 'develop' into chore/page-transactions
This commit is contained in:
commit
8bd7737d7b
4
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
4
.github/ISSUE_TEMPLATE/--bug-report.yaml
vendored
@ -2,7 +2,7 @@ name: Bug report
|
||||
description: Create a bug report to help us improve Plane
|
||||
title: "[bug]: "
|
||||
labels: [🐛bug]
|
||||
assignees: [srinivaspendem, pushya-plane]
|
||||
assignees: [srinivaspendem, pushya22]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
@ -45,7 +45,7 @@ body:
|
||||
- Deploy preview
|
||||
validations:
|
||||
required: true
|
||||
type: dropdown
|
||||
- type: dropdown
|
||||
id: browser
|
||||
attributes:
|
||||
label: Browser
|
||||
|
@ -2,7 +2,7 @@ name: Feature request
|
||||
description: Suggest a feature to improve Plane
|
||||
title: "[feature]: "
|
||||
labels: [✨feature]
|
||||
assignees: [srinivaspendem, pushya-plane]
|
||||
assignees: [srinivaspendem, pushya22]
|
||||
body:
|
||||
- type: markdown
|
||||
attributes:
|
||||
|
71
.github/workflows/build-branch.yml
vendored
71
.github/workflows/build-branch.yml
vendored
@ -23,12 +23,16 @@ jobs:
|
||||
gh_buildx_version: ${{ steps.set_env_variables.outputs.BUILDX_VERSION }}
|
||||
gh_buildx_platforms: ${{ steps.set_env_variables.outputs.BUILDX_PLATFORMS }}
|
||||
gh_buildx_endpoint: ${{ steps.set_env_variables.outputs.BUILDX_ENDPOINT }}
|
||||
build_frontend: ${{ steps.changed_files.outputs.frontend_any_changed }}
|
||||
build_space: ${{ steps.changed_files.outputs.space_any_changed }}
|
||||
build_backend: ${{ steps.changed_files.outputs.backend_any_changed }}
|
||||
build_proxy: ${{ steps.changed_files.outputs.proxy_any_changed }}
|
||||
|
||||
steps:
|
||||
- id: set_env_variables
|
||||
name: Set Environment Variables
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] || [ "${{ github.event_name }}" == "release" ]; then
|
||||
echo "BUILDX_DRIVER=cloud" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_VERSION=lab:latest" >> $GITHUB_OUTPUT
|
||||
echo "BUILDX_PLATFORMS=linux/amd64,linux/arm64" >> $GITHUB_OUTPUT
|
||||
@ -41,7 +45,36 @@ jobs:
|
||||
fi
|
||||
echo "TARGET_BRANCH=${{ env.TARGET_BRANCH }}" >> $GITHUB_OUTPUT
|
||||
|
||||
- id: checkout_files
|
||||
name: Checkout Files
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Get changed files
|
||||
id: changed_files
|
||||
uses: tj-actions/changed-files@v42
|
||||
with:
|
||||
files_yaml: |
|
||||
frontend:
|
||||
- web/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
space:
|
||||
- space/**
|
||||
- packages/**
|
||||
- 'package.json'
|
||||
- 'yarn.lock'
|
||||
- 'tsconfig.json'
|
||||
- 'turbo.json'
|
||||
backend:
|
||||
- apiserver/**
|
||||
proxy:
|
||||
- nginx/**
|
||||
|
||||
branch_build_push_frontend:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_frontend == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
@ -54,10 +87,10 @@ jobs:
|
||||
steps:
|
||||
- name: Set Frontend Docker Tag
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:stable
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend:latest
|
||||
else
|
||||
TAG=${{ env.FRONTEND_TAG }}
|
||||
fi
|
||||
@ -77,7 +110,7 @@ jobs:
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Frontend to Docker Container Registry
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
@ -93,6 +126,7 @@ jobs:
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_space:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_space == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
@ -105,10 +139,10 @@ jobs:
|
||||
steps:
|
||||
- name: Set Space Docker Tag
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-space:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:stable
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-space:latest
|
||||
else
|
||||
TAG=${{ env.SPACE_TAG }}
|
||||
fi
|
||||
@ -128,7 +162,7 @@ jobs:
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Space to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
@ -144,6 +178,7 @@ jobs:
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
branch_build_push_backend:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_backend == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
@ -156,10 +191,10 @@ jobs:
|
||||
steps:
|
||||
- name: Set Backend Docker Tag
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:stable
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-backend:latest
|
||||
else
|
||||
TAG=${{ env.BACKEND_TAG }}
|
||||
fi
|
||||
@ -179,7 +214,7 @@ jobs:
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Backend to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
@ -194,8 +229,8 @@ jobs:
|
||||
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
|
||||
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
|
||||
|
||||
|
||||
branch_build_push_proxy:
|
||||
if: ${{ needs.branch_build_setup.outputs.build_proxy == 'true' || github.event_name == 'release' || needs.branch_build_setup.outputs.gh_branch_name == 'master' }}
|
||||
runs-on: ubuntu-20.04
|
||||
needs: [branch_build_setup]
|
||||
env:
|
||||
@ -208,10 +243,10 @@ jobs:
|
||||
steps:
|
||||
- name: Set Proxy Docker Tag
|
||||
run: |
|
||||
if [ "${{ env.TARGET_BRANCH }}" == "master" ] && [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
|
||||
if [ "${{ github.event_name }}" == "release" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable,${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:${{ github.event.release.tag_name }}
|
||||
elif [ "${{ env.TARGET_BRANCH }}" == "master" ]; then
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:stable
|
||||
TAG=${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy:latest
|
||||
else
|
||||
TAG=${{ env.PROXY_TAG }}
|
||||
fi
|
||||
@ -231,7 +266,7 @@ jobs:
|
||||
endpoint: ${{ env.BUILDX_ENDPOINT }}
|
||||
|
||||
- name: Check out the repo
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v4
|
||||
|
||||
- name: Build and Push Plane-Proxy to Docker Hub
|
||||
uses: docker/build-push-action@v5.1.0
|
||||
|
@ -21,7 +21,6 @@ jobs:
|
||||
uses: actions/setup-node@v2
|
||||
with:
|
||||
node-version: 18.x
|
||||
cache: "yarn"
|
||||
|
||||
- name: Get changed files
|
||||
id: changed-files
|
||||
|
25
.github/workflows/create-sync-pr.yml
vendored
25
.github/workflows/create-sync-pr.yml
vendored
@ -2,7 +2,7 @@ name: Create Sync Action
|
||||
|
||||
on:
|
||||
workflow_dispatch:
|
||||
push:
|
||||
push:
|
||||
branches:
|
||||
- preview
|
||||
|
||||
@ -17,7 +17,7 @@ jobs:
|
||||
contents: read
|
||||
steps:
|
||||
- name: Checkout Code
|
||||
uses: actions/checkout@v4.1.1
|
||||
uses: actions/checkout@v4.1.1
|
||||
with:
|
||||
persist-credentials: false
|
||||
fetch-depth: 0
|
||||
@ -31,14 +31,25 @@ jobs:
|
||||
sudo apt update
|
||||
sudo apt install gh -y
|
||||
|
||||
- name: Push Changes to Target Repo
|
||||
- name: Push Changes to Target Repo A
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||
run: |
|
||||
TARGET_REPO="${{ secrets.SYNC_TARGET_REPO_NAME }}"
|
||||
TARGET_BRANCH="${{ secrets.SYNC_TARGET_BRANCH_NAME }}"
|
||||
TARGET_REPO="${{ secrets.TARGET_REPO_A }}"
|
||||
TARGET_BRANCH="${{ secrets.TARGET_REPO_A_BRANCH_NAME }}"
|
||||
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
||||
|
||||
git checkout $SOURCE_BRANCH
|
||||
git remote add target-origin "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||
git push target-origin $SOURCE_BRANCH:$TARGET_BRANCH
|
||||
git remote add target-origin-a "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||
git push target-origin-a $SOURCE_BRANCH:$TARGET_BRANCH
|
||||
|
||||
- name: Push Changes to Target Repo B
|
||||
env:
|
||||
GH_TOKEN: ${{ secrets.ACCESS_TOKEN }}
|
||||
run: |
|
||||
TARGET_REPO="${{ secrets.TARGET_REPO_B }}"
|
||||
TARGET_BRANCH="${{ secrets.TARGET_REPO_B_BRANCH_NAME }}"
|
||||
SOURCE_BRANCH="${{ env.SOURCE_BRANCH_NAME }}"
|
||||
|
||||
git remote add target-origin-b "https://$GH_TOKEN@github.com/$TARGET_REPO.git"
|
||||
git push target-origin-b $SOURCE_BRANCH:$TARGET_BRANCH
|
||||
|
@ -1,4 +1,4 @@
|
||||
{
|
||||
"name": "plane-api",
|
||||
"version": "0.15.1"
|
||||
"version": "0.16.0"
|
||||
}
|
||||
|
@ -45,7 +45,10 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
return (
|
||||
Cycle.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
@ -390,7 +393,10 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
)
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.filter(cycle_id=self.kwargs.get("cycle_id"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
|
@ -352,7 +352,10 @@ class LabelAPIEndpoint(BaseAPIView):
|
||||
return (
|
||||
Label.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("parent")
|
||||
@ -481,7 +484,10 @@ class IssueLinkAPIEndpoint(BaseAPIView):
|
||||
IssueLink.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
@ -607,11 +613,11 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
)
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(issue_id=self.kwargs.get("issue_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("issue")
|
||||
.select_related("actor")
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.select_related("workspace", "project", "issue", "actor")
|
||||
.annotate(
|
||||
is_member=Exists(
|
||||
ProjectMember.objects.filter(
|
||||
@ -647,6 +653,33 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id, issue_id):
|
||||
|
||||
# Validation check if the issue already exists
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
and request.data.get("external_source")
|
||||
and IssueComment.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
external_source=request.data.get("external_source"),
|
||||
external_id=request.data.get("external_id"),
|
||||
).exists()
|
||||
):
|
||||
issue_comment = IssueComment.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
external_id=request.data.get("external_id"),
|
||||
external_source=request.data.get("external_source"),
|
||||
).first()
|
||||
return Response(
|
||||
{
|
||||
"error": "Issue Comment with the same external id and external source already exists",
|
||||
"id": str(issue_comment.id),
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
|
||||
serializer = IssueCommentSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
@ -680,6 +713,29 @@ class IssueCommentAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
IssueCommentSerializer(issue_comment).data,
|
||||
cls=DjangoJSONEncoder,
|
||||
)
|
||||
|
||||
# Validation check if the issue already exists
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
and (issue_comment.external_id != str(request.data.get("external_id")))
|
||||
and IssueComment.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
external_source=request.data.get(
|
||||
"external_source", issue_comment.external_source
|
||||
),
|
||||
external_id=request.data.get("external_id"),
|
||||
).exists()
|
||||
):
|
||||
return Response(
|
||||
{
|
||||
"error": "Issue Comment with the same external id and external source already exists",
|
||||
"id": str(issue_comment.id),
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
|
||||
serializer = IssueCommentSerializer(
|
||||
issue_comment, data=request.data, partial=True
|
||||
)
|
||||
@ -734,6 +790,7 @@ class IssueActivityAPIEndpoint(BaseAPIView):
|
||||
.filter(
|
||||
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.select_related("actor", "workspace", "issue", "project")
|
||||
).order_by(request.GET.get("order_by", "created_at"))
|
||||
|
@ -273,7 +273,10 @@ class ModuleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(module_id=self.kwargs.get("module_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("module")
|
||||
|
@ -1,7 +1,5 @@
|
||||
# Python imports
|
||||
from itertools import groupby
|
||||
|
||||
# Django imports
|
||||
from django.db import IntegrityError
|
||||
from django.db.models import Q
|
||||
|
||||
# Third party imports
|
||||
@ -26,7 +24,10 @@ class StateAPIEndpoint(BaseAPIView):
|
||||
return (
|
||||
State.objects.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.filter(~Q(name="Triage"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
@ -34,37 +35,51 @@ class StateAPIEndpoint(BaseAPIView):
|
||||
)
|
||||
|
||||
def post(self, request, slug, project_id):
|
||||
serializer = StateSerializer(
|
||||
data=request.data, context={"project_id": project_id}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
and request.data.get("external_source")
|
||||
and State.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
external_source=request.data.get("external_source"),
|
||||
external_id=request.data.get("external_id"),
|
||||
).exists()
|
||||
):
|
||||
state = State.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
external_id=request.data.get("external_id"),
|
||||
external_source=request.data.get("external_source"),
|
||||
).first()
|
||||
return Response(
|
||||
{
|
||||
"error": "State with the same external id and external source already exists",
|
||||
"id": str(state.id),
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
try:
|
||||
serializer = StateSerializer(
|
||||
data=request.data, context={"project_id": project_id}
|
||||
)
|
||||
if serializer.is_valid():
|
||||
if (
|
||||
request.data.get("external_id")
|
||||
and request.data.get("external_source")
|
||||
and State.objects.filter(
|
||||
project_id=project_id,
|
||||
workspace__slug=slug,
|
||||
external_source=request.data.get("external_source"),
|
||||
external_id=request.data.get("external_id"),
|
||||
).exists()
|
||||
):
|
||||
state = State.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
external_id=request.data.get("external_id"),
|
||||
external_source=request.data.get("external_source"),
|
||||
).first()
|
||||
return Response(
|
||||
{
|
||||
"error": "State with the same external id and external source already exists",
|
||||
"id": str(state.id),
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
serializer.save(project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
serializer.save(project_id=project_id)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
except IntegrityError as e:
|
||||
state = State.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
name=request.data.get("name"),
|
||||
).first()
|
||||
return Response(
|
||||
{
|
||||
"error": "State with the same name already exists in the project",
|
||||
"id": str(state.id),
|
||||
},
|
||||
status=status.HTTP_409_CONFLICT,
|
||||
)
|
||||
|
||||
def get(self, request, slug, project_id, state_id=None):
|
||||
if state_id:
|
||||
|
@ -69,9 +69,13 @@ from .issue import (
|
||||
RelatedIssueSerializer,
|
||||
IssuePublicSerializer,
|
||||
IssueDetailSerializer,
|
||||
IssueReactionLiteSerializer,
|
||||
IssueAttachmentLiteSerializer,
|
||||
IssueLinkLiteSerializer,
|
||||
)
|
||||
|
||||
from .module import (
|
||||
ModuleDetailSerializer,
|
||||
ModuleWriteSerializer,
|
||||
ModuleSerializer,
|
||||
ModuleIssueSerializer,
|
||||
|
@ -58,9 +58,12 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
IssueSerializer,
|
||||
LabelSerializer,
|
||||
CycleIssueSerializer,
|
||||
IssueFlatSerializer,
|
||||
IssueLiteSerializer,
|
||||
IssueRelationSerializer,
|
||||
InboxIssueLiteSerializer
|
||||
InboxIssueLiteSerializer,
|
||||
IssueReactionLiteSerializer,
|
||||
IssueAttachmentLiteSerializer,
|
||||
IssueLinkLiteSerializer,
|
||||
)
|
||||
|
||||
# Expansion mapper
|
||||
@ -79,12 +82,34 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
"assignees": UserLiteSerializer,
|
||||
"labels": LabelSerializer,
|
||||
"issue_cycle": CycleIssueSerializer,
|
||||
"parent": IssueSerializer,
|
||||
"parent": IssueLiteSerializer,
|
||||
"issue_relation": IssueRelationSerializer,
|
||||
"issue_inbox" : InboxIssueLiteSerializer,
|
||||
"issue_inbox": InboxIssueLiteSerializer,
|
||||
"issue_reactions": IssueReactionLiteSerializer,
|
||||
"issue_attachment": IssueAttachmentLiteSerializer,
|
||||
"issue_link": IssueLinkLiteSerializer,
|
||||
"sub_issues": IssueLiteSerializer,
|
||||
}
|
||||
|
||||
self.fields[field] = expansion[field](many=True if field in ["members", "assignees", "labels", "issue_cycle", "issue_relation", "issue_inbox"] else False)
|
||||
|
||||
self.fields[field] = expansion[field](
|
||||
many=(
|
||||
True
|
||||
if field
|
||||
in [
|
||||
"members",
|
||||
"assignees",
|
||||
"labels",
|
||||
"issue_cycle",
|
||||
"issue_relation",
|
||||
"issue_inbox",
|
||||
"issue_reactions",
|
||||
"issue_attachment",
|
||||
"issue_link",
|
||||
"sub_issues",
|
||||
]
|
||||
else False
|
||||
)
|
||||
)
|
||||
|
||||
return self.fields
|
||||
|
||||
@ -105,7 +130,11 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
LabelSerializer,
|
||||
CycleIssueSerializer,
|
||||
IssueRelationSerializer,
|
||||
InboxIssueLiteSerializer
|
||||
InboxIssueLiteSerializer,
|
||||
IssueLiteSerializer,
|
||||
IssueReactionLiteSerializer,
|
||||
IssueAttachmentLiteSerializer,
|
||||
IssueLinkLiteSerializer,
|
||||
)
|
||||
|
||||
# Expansion mapper
|
||||
@ -124,9 +153,13 @@ class DynamicBaseSerializer(BaseSerializer):
|
||||
"assignees": UserLiteSerializer,
|
||||
"labels": LabelSerializer,
|
||||
"issue_cycle": CycleIssueSerializer,
|
||||
"parent": IssueSerializer,
|
||||
"parent": IssueLiteSerializer,
|
||||
"issue_relation": IssueRelationSerializer,
|
||||
"issue_inbox" : InboxIssueLiteSerializer,
|
||||
"issue_inbox": InboxIssueLiteSerializer,
|
||||
"issue_reactions": IssueReactionLiteSerializer,
|
||||
"issue_attachment": IssueAttachmentLiteSerializer,
|
||||
"issue_link": IssueLinkLiteSerializer,
|
||||
"sub_issues": IssueLiteSerializer,
|
||||
}
|
||||
# Check if field in expansion then expand the field
|
||||
if expand in expansion:
|
||||
|
@ -3,10 +3,7 @@ from rest_framework import serializers
|
||||
|
||||
# Module imports
|
||||
from .base import BaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .issue import IssueStateSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from plane.db.models import (
|
||||
Cycle,
|
||||
CycleIssue,
|
||||
@ -14,7 +11,6 @@ from plane.db.models import (
|
||||
CycleUserProperties,
|
||||
)
|
||||
|
||||
|
||||
class CycleWriteSerializer(BaseSerializer):
|
||||
def validate(self, data):
|
||||
if (
|
||||
@ -30,60 +26,6 @@ class CycleWriteSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = "__all__"
|
||||
|
||||
|
||||
class CycleSerializer(BaseSerializer):
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
completed_issues = serializers.IntegerField(read_only=True)
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
assignees = serializers.SerializerMethodField(read_only=True)
|
||||
total_estimates = serializers.IntegerField(read_only=True)
|
||||
completed_estimates = serializers.IntegerField(read_only=True)
|
||||
started_estimates = serializers.IntegerField(read_only=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
read_only=True, source="workspace"
|
||||
)
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
status = serializers.CharField(read_only=True)
|
||||
|
||||
def validate(self, data):
|
||||
if (
|
||||
data.get("start_date", None) is not None
|
||||
and data.get("end_date", None) is not None
|
||||
and data.get("start_date", None) > data.get("end_date", None)
|
||||
):
|
||||
raise serializers.ValidationError(
|
||||
"Start date cannot exceed end date"
|
||||
)
|
||||
return data
|
||||
|
||||
def get_assignees(self, obj):
|
||||
members = [
|
||||
{
|
||||
"avatar": assignee.avatar,
|
||||
"display_name": assignee.display_name,
|
||||
"id": assignee.id,
|
||||
}
|
||||
for issue_cycle in obj.issue_cycle.prefetch_related(
|
||||
"issue__assignees"
|
||||
).all()
|
||||
for assignee in issue_cycle.issue.assignees.all()
|
||||
]
|
||||
# Use a set comprehension to return only the unique objects
|
||||
unique_objects = {frozenset(item.items()) for item in members}
|
||||
|
||||
# Convert the set back to a list of dictionaries
|
||||
unique_list = [dict(item) for item in unique_objects]
|
||||
|
||||
return unique_list
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
@ -91,6 +33,52 @@ class CycleSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class CycleSerializer(BaseSerializer):
|
||||
# favorite
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
# state group wise distribution
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
completed_issues = serializers.IntegerField(read_only=True)
|
||||
started_issues = serializers.IntegerField(read_only=True)
|
||||
unstarted_issues = serializers.IntegerField(read_only=True)
|
||||
backlog_issues = serializers.IntegerField(read_only=True)
|
||||
|
||||
# active | draft | upcoming | completed
|
||||
status = serializers.CharField(read_only=True)
|
||||
|
||||
|
||||
class Meta:
|
||||
model = Cycle
|
||||
fields = [
|
||||
# necessary fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# model fields
|
||||
"name",
|
||||
"description",
|
||||
"start_date",
|
||||
"end_date",
|
||||
"owned_by_id",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"status",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class CycleIssueSerializer(BaseSerializer):
|
||||
issue_detail = IssueStateSerializer(read_only=True, source="issue")
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
|
@ -444,6 +444,22 @@ class IssueLinkSerializer(BaseSerializer):
|
||||
return IssueLink.objects.create(**validated_data)
|
||||
|
||||
|
||||
class IssueLinkLiteSerializer(BaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = IssueLink
|
||||
fields = [
|
||||
"id",
|
||||
"issue_id",
|
||||
"title",
|
||||
"url",
|
||||
"metadata",
|
||||
"created_by_id",
|
||||
"created_at",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueAttachmentSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = IssueAttachment
|
||||
@ -459,6 +475,21 @@ class IssueAttachmentSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueAttachmentLiteSerializer(DynamicBaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = IssueAttachment
|
||||
fields = [
|
||||
"id",
|
||||
"asset",
|
||||
"attributes",
|
||||
"issue_id",
|
||||
"updated_at",
|
||||
"updated_by_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueReactionSerializer(BaseSerializer):
|
||||
actor_detail = UserLiteSerializer(read_only=True, source="actor")
|
||||
|
||||
@ -473,6 +504,18 @@ class IssueReactionSerializer(BaseSerializer):
|
||||
]
|
||||
|
||||
|
||||
class IssueReactionLiteSerializer(DynamicBaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = IssueReaction
|
||||
fields = [
|
||||
"id",
|
||||
"actor_id",
|
||||
"issue_id",
|
||||
"reaction",
|
||||
]
|
||||
|
||||
|
||||
class CommentReactionSerializer(BaseSerializer):
|
||||
class Meta:
|
||||
model = CommentReaction
|
||||
@ -503,9 +546,7 @@ class IssueCommentSerializer(BaseSerializer):
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
read_only=True, source="workspace"
|
||||
)
|
||||
comment_reactions = CommentReactionSerializer(
|
||||
read_only=True, many=True
|
||||
)
|
||||
comment_reactions = CommentReactionSerializer(read_only=True, many=True)
|
||||
is_member = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
@ -558,18 +599,17 @@ class IssueStateSerializer(DynamicBaseSerializer):
|
||||
|
||||
class IssueSerializer(DynamicBaseSerializer):
|
||||
# ids
|
||||
project_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
state_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
parent_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
|
||||
module_ids = serializers.SerializerMethodField()
|
||||
module_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(), required=False,
|
||||
)
|
||||
|
||||
# Many to many
|
||||
label_ids = serializers.PrimaryKeyRelatedField(
|
||||
read_only=True, many=True, source="labels"
|
||||
label_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(), required=False,
|
||||
)
|
||||
assignee_ids = serializers.PrimaryKeyRelatedField(
|
||||
read_only=True, many=True, source="assignees"
|
||||
assignee_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(), required=False,
|
||||
)
|
||||
|
||||
# Count items
|
||||
@ -577,9 +617,6 @@ class IssueSerializer(DynamicBaseSerializer):
|
||||
attachment_count = serializers.IntegerField(read_only=True)
|
||||
link_count = serializers.IntegerField(read_only=True)
|
||||
|
||||
# is_subscribed
|
||||
is_subscribed = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = [
|
||||
@ -606,57 +643,45 @@ class IssueSerializer(DynamicBaseSerializer):
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_subscribed",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
def get_module_ids(self, obj):
|
||||
# Access the prefetched modules and extract module IDs
|
||||
return [module for module in obj.issue_module.values_list("module_id", flat=True)]
|
||||
|
||||
|
||||
class IssueDetailSerializer(IssueSerializer):
|
||||
description_html = serializers.CharField()
|
||||
description_html = serializers.CharField()
|
||||
is_subscribed = serializers.BooleanField(read_only=True)
|
||||
|
||||
class Meta(IssueSerializer.Meta):
|
||||
fields = IssueSerializer.Meta.fields + ['description_html']
|
||||
fields = IssueSerializer.Meta.fields + [
|
||||
"description_html",
|
||||
"is_subscribed",
|
||||
]
|
||||
|
||||
|
||||
class IssueLiteSerializer(DynamicBaseSerializer):
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
read_only=True, source="workspace"
|
||||
)
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
state_detail = StateLiteSerializer(read_only=True, source="state")
|
||||
label_details = LabelLiteSerializer(
|
||||
read_only=True, source="labels", many=True
|
||||
)
|
||||
assignee_details = UserLiteSerializer(
|
||||
read_only=True, source="assignees", many=True
|
||||
)
|
||||
sub_issues_count = serializers.IntegerField(read_only=True)
|
||||
cycle_id = serializers.UUIDField(read_only=True)
|
||||
module_id = serializers.UUIDField(read_only=True)
|
||||
attachment_count = serializers.IntegerField(read_only=True)
|
||||
link_count = serializers.IntegerField(read_only=True)
|
||||
issue_reactions = IssueReactionSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta:
|
||||
model = Issue
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"start_date",
|
||||
"target_date",
|
||||
"completed_at",
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
fields = [
|
||||
"id",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssueDetailSerializer(IssueSerializer):
|
||||
description_html = serializers.CharField()
|
||||
is_subscribed = serializers.BooleanField()
|
||||
|
||||
class Meta(IssueSerializer.Meta):
|
||||
fields = IssueSerializer.Meta.fields + [
|
||||
"description_html",
|
||||
"is_subscribed",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
class IssuePublicSerializer(BaseSerializer):
|
||||
|
@ -5,7 +5,6 @@ from rest_framework import serializers
|
||||
from .base import BaseSerializer, DynamicBaseSerializer
|
||||
from .user import UserLiteSerializer
|
||||
from .project import ProjectLiteSerializer
|
||||
from .workspace import WorkspaceLiteSerializer
|
||||
|
||||
from plane.db.models import (
|
||||
User,
|
||||
@ -19,17 +18,18 @@ from plane.db.models import (
|
||||
|
||||
|
||||
class ModuleWriteSerializer(BaseSerializer):
|
||||
members = serializers.ListField(
|
||||
lead_id = serializers.PrimaryKeyRelatedField(
|
||||
source="lead",
|
||||
queryset=User.objects.all(),
|
||||
required=False,
|
||||
allow_null=True,
|
||||
)
|
||||
member_ids = serializers.ListField(
|
||||
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
|
||||
write_only=True,
|
||||
required=False,
|
||||
)
|
||||
|
||||
project_detail = ProjectLiteSerializer(source="project", read_only=True)
|
||||
workspace_detail = WorkspaceLiteSerializer(
|
||||
source="workspace", read_only=True
|
||||
)
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
fields = "__all__"
|
||||
@ -44,7 +44,9 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
|
||||
def to_representation(self, instance):
|
||||
data = super().to_representation(instance)
|
||||
data["members"] = [str(member.id) for member in instance.members.all()]
|
||||
data["member_ids"] = [
|
||||
str(member.id) for member in instance.members.all()
|
||||
]
|
||||
return data
|
||||
|
||||
def validate(self, data):
|
||||
@ -59,12 +61,10 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
return data
|
||||
|
||||
def create(self, validated_data):
|
||||
members = validated_data.pop("members", None)
|
||||
|
||||
members = validated_data.pop("member_ids", None)
|
||||
project = self.context["project"]
|
||||
|
||||
module = Module.objects.create(**validated_data, project=project)
|
||||
|
||||
if members is not None:
|
||||
ModuleMember.objects.bulk_create(
|
||||
[
|
||||
@ -85,7 +85,7 @@ class ModuleWriteSerializer(BaseSerializer):
|
||||
return module
|
||||
|
||||
def update(self, instance, validated_data):
|
||||
members = validated_data.pop("members", None)
|
||||
members = validated_data.pop("member_ids", None)
|
||||
|
||||
if members is not None:
|
||||
ModuleMember.objects.filter(module=instance).delete()
|
||||
@ -142,7 +142,6 @@ class ModuleIssueSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ModuleLinkSerializer(BaseSerializer):
|
||||
created_by_detail = UserLiteSerializer(read_only=True, source="created_by")
|
||||
|
||||
class Meta:
|
||||
model = ModuleLink
|
||||
@ -170,12 +169,9 @@ class ModuleLinkSerializer(BaseSerializer):
|
||||
|
||||
|
||||
class ModuleSerializer(DynamicBaseSerializer):
|
||||
project_detail = ProjectLiteSerializer(read_only=True, source="project")
|
||||
lead_detail = UserLiteSerializer(read_only=True, source="lead")
|
||||
members_detail = UserLiteSerializer(
|
||||
read_only=True, many=True, source="members"
|
||||
member_ids = serializers.ListField(
|
||||
child=serializers.UUIDField(), required=False, allow_null=True
|
||||
)
|
||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||
is_favorite = serializers.BooleanField(read_only=True)
|
||||
total_issues = serializers.IntegerField(read_only=True)
|
||||
cancelled_issues = serializers.IntegerField(read_only=True)
|
||||
@ -186,15 +182,46 @@ class ModuleSerializer(DynamicBaseSerializer):
|
||||
|
||||
class Meta:
|
||||
model = Module
|
||||
fields = "__all__"
|
||||
read_only_fields = [
|
||||
"workspace",
|
||||
"project",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
fields = [
|
||||
# Required fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# Model fields
|
||||
"name",
|
||||
"description",
|
||||
"description_text",
|
||||
"description_html",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"status",
|
||||
"lead_id",
|
||||
"member_ids",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
# computed fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
]
|
||||
read_only_fields = fields
|
||||
|
||||
|
||||
|
||||
class ModuleDetailSerializer(ModuleSerializer):
|
||||
|
||||
link_module = ModuleLinkSerializer(read_only=True, many=True)
|
||||
|
||||
class Meta(ModuleSerializer.Meta):
|
||||
fields = ModuleSerializer.Meta.fields + ['link_module']
|
||||
|
||||
|
||||
class ModuleFavoriteSerializer(BaseSerializer):
|
||||
|
@ -2,6 +2,7 @@ from django.urls import path
|
||||
|
||||
|
||||
from plane.app.views import (
|
||||
IssueListEndpoint,
|
||||
IssueViewSet,
|
||||
LabelViewSet,
|
||||
BulkCreateIssueLabelsEndpoint,
|
||||
@ -25,6 +26,11 @@ from plane.app.views import (
|
||||
|
||||
|
||||
urlpatterns = [
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/list/",
|
||||
IssueListEndpoint.as_view(),
|
||||
name="project-issue",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
|
||||
IssueViewSet.as_view(
|
||||
@ -84,11 +90,13 @@ urlpatterns = [
|
||||
BulkImportIssuesEndpoint.as_view(),
|
||||
name="project-issues-bulk",
|
||||
),
|
||||
# deprecated endpoint TODO: remove once confirmed
|
||||
path(
|
||||
"workspaces/<str:slug>/my-issues/",
|
||||
UserWorkSpaceIssues.as_view(),
|
||||
name="workspace-issues",
|
||||
),
|
||||
##
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:issue_id>/sub-issues/",
|
||||
SubIssuesEndpoint.as_view(),
|
||||
@ -251,23 +259,15 @@ urlpatterns = [
|
||||
name="project-issue-archive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-issues/<uuid:pk>/",
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/archive/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"get": "retrieve",
|
||||
"delete": "destroy",
|
||||
"post": "archive",
|
||||
"delete": "unarchive",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/projects/<uuid:project_id>/unarchive/<uuid:pk>/",
|
||||
IssueArchiveViewSet.as_view(
|
||||
{
|
||||
"post": "unarchive",
|
||||
}
|
||||
),
|
||||
name="project-issue-archive",
|
||||
name="project-issue-archive-unarchive",
|
||||
),
|
||||
## End Issue Archives
|
||||
## Issue Relation
|
||||
|
@ -22,6 +22,8 @@ from plane.app.views import (
|
||||
WorkspaceUserPropertiesEndpoint,
|
||||
WorkspaceStatesEndpoint,
|
||||
WorkspaceEstimatesEndpoint,
|
||||
WorkspaceModulesEndpoint,
|
||||
WorkspaceCyclesEndpoint,
|
||||
)
|
||||
|
||||
|
||||
@ -219,4 +221,14 @@ urlpatterns = [
|
||||
WorkspaceEstimatesEndpoint.as_view(),
|
||||
name="workspace-estimate",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/modules/",
|
||||
WorkspaceModulesEndpoint.as_view(),
|
||||
name="workspace-modules",
|
||||
),
|
||||
path(
|
||||
"workspaces/<str:slug>/cycles/",
|
||||
WorkspaceCyclesEndpoint.as_view(),
|
||||
name="workspace-cycles",
|
||||
),
|
||||
]
|
||||
|
@ -49,6 +49,8 @@ from .workspace import (
|
||||
WorkspaceUserPropertiesEndpoint,
|
||||
WorkspaceStatesEndpoint,
|
||||
WorkspaceEstimatesEndpoint,
|
||||
WorkspaceModulesEndpoint,
|
||||
WorkspaceCyclesEndpoint,
|
||||
)
|
||||
from .state import StateViewSet
|
||||
from .view import (
|
||||
@ -67,6 +69,7 @@ from .cycle import (
|
||||
)
|
||||
from .asset import FileAssetEndpoint, UserAssetsEndpoint, FileAssetViewSet
|
||||
from .issue import (
|
||||
IssueListEndpoint,
|
||||
IssueViewSet,
|
||||
WorkSpaceIssuesEndpoint,
|
||||
IssueActivityEndpoint,
|
||||
|
@ -1,6 +1,7 @@
|
||||
# Django imports
|
||||
from django.db.models import Count, Sum, F, Q
|
||||
from django.db.models.functions import ExtractMonth
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@ -331,8 +332,9 @@ class DefaultAnalyticsEndpoint(BaseAPIView):
|
||||
.order_by("state_group")
|
||||
)
|
||||
|
||||
current_year = timezone.now().year
|
||||
issue_completed_month_wise = (
|
||||
base_issues.filter(completed_at__isnull=False)
|
||||
base_issues.filter(completed_at__year=current_year)
|
||||
.annotate(month=ExtractMonth("completed_at"))
|
||||
.values("month")
|
||||
.annotate(count=Count("*"))
|
||||
|
@ -66,15 +66,15 @@ class ConfigurationEndpoint(BaseAPIView):
|
||||
},
|
||||
{
|
||||
"key": "SLACK_CLIENT_ID",
|
||||
"default": os.environ.get("SLACK_CLIENT_ID", "1"),
|
||||
"default": os.environ.get("SLACK_CLIENT_ID", None),
|
||||
},
|
||||
{
|
||||
"key": "POSTHOG_API_KEY",
|
||||
"default": os.environ.get("POSTHOG_API_KEY", "1"),
|
||||
"default": os.environ.get("POSTHOG_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "POSTHOG_HOST",
|
||||
"default": os.environ.get("POSTHOG_HOST", "1"),
|
||||
"default": os.environ.get("POSTHOG_HOST", None),
|
||||
},
|
||||
{
|
||||
"key": "UNSPLASH_ACCESS_KEY",
|
||||
@ -181,11 +181,11 @@ class MobileConfigurationEndpoint(BaseAPIView):
|
||||
},
|
||||
{
|
||||
"key": "POSTHOG_API_KEY",
|
||||
"default": os.environ.get("POSTHOG_API_KEY", "1"),
|
||||
"default": os.environ.get("POSTHOG_API_KEY", None),
|
||||
},
|
||||
{
|
||||
"key": "POSTHOG_HOST",
|
||||
"default": os.environ.get("POSTHOG_HOST", "1"),
|
||||
"default": os.environ.get("POSTHOG_HOST", None),
|
||||
},
|
||||
{
|
||||
"key": "UNSPLASH_ACCESS_KEY",
|
||||
|
@ -20,7 +20,10 @@ from django.core import serializers
|
||||
from django.utils import timezone
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
@ -33,7 +36,6 @@ from plane.app.serializers import (
|
||||
CycleIssueSerializer,
|
||||
CycleFavoriteSerializer,
|
||||
IssueSerializer,
|
||||
IssueStateSerializer,
|
||||
CycleWriteSerializer,
|
||||
CycleUserPropertiesSerializer,
|
||||
)
|
||||
@ -51,7 +53,6 @@ from plane.db.models import (
|
||||
IssueAttachment,
|
||||
Label,
|
||||
CycleUserProperties,
|
||||
IssueSubscriber,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
@ -73,7 +74,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
subquery = CycleFavorite.objects.filter(
|
||||
favorite_subquery = CycleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
cycle_id=OuterRef("pk"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
@ -84,11 +85,28 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.select_related("project", "workspace", "owned_by")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__assignees",
|
||||
queryset=User.objects.only(
|
||||
"avatar", "first_name", "id"
|
||||
).distinct(),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__labels",
|
||||
queryset=Label.objects.only(
|
||||
"name", "color", "id"
|
||||
).distinct(),
|
||||
)
|
||||
)
|
||||
.annotate(is_favorite=Exists(favorite_subquery))
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle",
|
||||
@ -148,29 +166,6 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_estimates=Sum("issue_cycle__issue__estimate_point")
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
status=Case(
|
||||
When(
|
||||
@ -190,20 +185,16 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
output_field=CharField(),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__assignees",
|
||||
queryset=User.objects.only(
|
||||
"avatar", "first_name", "id"
|
||||
).distinct(),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_cycle__issue__labels",
|
||||
queryset=Label.objects.only(
|
||||
"name", "color", "id"
|
||||
).distinct(),
|
||||
.annotate(
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_cycle__issue__assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(
|
||||
issue_cycle__issue__assignees__id__isnull=True
|
||||
),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "name")
|
||||
@ -213,12 +204,8 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
def list(self, request, slug, project_id):
|
||||
queryset = self.get_queryset()
|
||||
cycle_view = request.GET.get("cycle_view", "all")
|
||||
fields = [
|
||||
field
|
||||
for field in request.GET.get("fields", "").split(",")
|
||||
if field
|
||||
]
|
||||
|
||||
# Update the order by
|
||||
queryset = queryset.order_by("-is_favorite", "-created_at")
|
||||
|
||||
# Current Cycle
|
||||
@ -228,9 +215,35 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
end_date__gte=timezone.now(),
|
||||
)
|
||||
|
||||
data = CycleSerializer(queryset, many=True).data
|
||||
data = queryset.values(
|
||||
# necessary fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# model fields
|
||||
"name",
|
||||
"description",
|
||||
"start_date",
|
||||
"end_date",
|
||||
"owned_by_id",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
)
|
||||
|
||||
if len(data):
|
||||
if data:
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=data[0]["id"],
|
||||
@ -315,19 +328,45 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
}
|
||||
|
||||
if data[0]["start_date"] and data[0]["end_date"]:
|
||||
data[0]["distribution"][
|
||||
"completion_chart"
|
||||
] = burndown_plot(
|
||||
queryset=queryset.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
cycle_id=data[0]["id"],
|
||||
data[0]["distribution"]["completion_chart"] = (
|
||||
burndown_plot(
|
||||
queryset=queryset.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
cycle_id=data[0]["id"],
|
||||
)
|
||||
)
|
||||
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
cycles = CycleSerializer(queryset, many=True).data
|
||||
return Response(cycles, status=status.HTTP_200_OK)
|
||||
data = queryset.values(
|
||||
# necessary fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# model fields
|
||||
"name",
|
||||
"description",
|
||||
"start_date",
|
||||
"end_date",
|
||||
"owned_by_id",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
)
|
||||
return Response(data, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id):
|
||||
if (
|
||||
@ -337,7 +376,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
request.data.get("start_date", None) is not None
|
||||
and request.data.get("end_date", None) is not None
|
||||
):
|
||||
serializer = CycleSerializer(data=request.data)
|
||||
serializer = CycleWriteSerializer(data=request.data)
|
||||
if serializer.is_valid():
|
||||
serializer.save(
|
||||
project_id=project_id,
|
||||
@ -346,12 +385,36 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
cycle = (
|
||||
self.get_queryset()
|
||||
.filter(pk=serializer.data["id"])
|
||||
.values(
|
||||
# necessary fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# model fields
|
||||
"name",
|
||||
"description",
|
||||
"start_date",
|
||||
"end_date",
|
||||
"owned_by_id",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
serializer = CycleSerializer(cycle)
|
||||
return Response(
|
||||
serializer.data, status=status.HTTP_201_CREATED
|
||||
)
|
||||
return Response(cycle, status=status.HTTP_201_CREATED)
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
@ -364,10 +427,11 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
cycle = Cycle.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
queryset = (
|
||||
self.get_queryset()
|
||||
.filter(workspace__slug=slug, project_id=project_id, pk=pk)
|
||||
)
|
||||
|
||||
cycle = queryset.first()
|
||||
request_data = request.data
|
||||
|
||||
if (
|
||||
@ -375,7 +439,7 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
and cycle.end_date < timezone.now().date()
|
||||
):
|
||||
if "sort_order" in request_data:
|
||||
# Can only change sort order
|
||||
# Can only change sort order for a completed cycle``
|
||||
request_data = {
|
||||
"sort_order": request_data.get(
|
||||
"sort_order", cycle.sort_order
|
||||
@ -394,12 +458,71 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
cycle = queryset.values(
|
||||
# necessary fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# model fields
|
||||
"name",
|
||||
"description",
|
||||
"start_date",
|
||||
"end_date",
|
||||
"owned_by_id",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
).first()
|
||||
return Response(cycle, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
queryset = self.get_queryset().get(pk=pk)
|
||||
|
||||
queryset = self.get_queryset().filter(pk=pk)
|
||||
data = (
|
||||
self.get_queryset()
|
||||
.filter(pk=pk)
|
||||
.values(
|
||||
# necessary fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# model fields
|
||||
"name",
|
||||
"description",
|
||||
"start_date",
|
||||
"end_date",
|
||||
"owned_by_id",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
"progress_snapshot",
|
||||
# meta fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"assignee_ids",
|
||||
"status",
|
||||
)
|
||||
.first()
|
||||
)
|
||||
queryset = queryset.first()
|
||||
# Assignee Distribution
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
@ -488,7 +611,6 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
data = CycleSerializer(queryset).data
|
||||
data["distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
@ -570,7 +692,10 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.filter(cycle_id=self.kwargs.get("cycle_id"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
@ -589,20 +714,18 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
]
|
||||
order_by = request.GET.get("order_by", "created_at")
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issues = (
|
||||
queryset = (
|
||||
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.filter(project_id=project_id)
|
||||
.filter(workspace__slug=slug)
|
||||
.filter(**filters)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.prefetch_related(
|
||||
"assignees",
|
||||
"labels",
|
||||
"issue_module__module",
|
||||
"issue_cycle__cycle",
|
||||
)
|
||||
.order_by(order_by)
|
||||
.filter(**filters)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
@ -621,22 +744,79 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
is_subscribed=Exists(
|
||||
IssueSubscriber.objects.filter(
|
||||
subscriber=self.request.user, issue_id=OuterRef("id")
|
||||
)
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.order_by(order_by)
|
||||
)
|
||||
serializer = IssueSerializer(
|
||||
issues, many=True, fields=fields if fields else None
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
if self.fields:
|
||||
issues = IssueSerializer(
|
||||
queryset, many=True, fields=fields if fields else None
|
||||
).data
|
||||
else:
|
||||
issues = queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
def create(self, request, slug, project_id, cycle_id):
|
||||
issues = request.data.get("issues", [])
|
||||
|
||||
if not len(issues):
|
||||
if not issues:
|
||||
return Response(
|
||||
{"error": "Issues are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@ -658,52 +838,52 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
|
||||
# Get all CycleIssues already created
|
||||
cycle_issues = list(CycleIssue.objects.filter(issue_id__in=issues))
|
||||
update_cycle_issue_activity = []
|
||||
record_to_create = []
|
||||
records_to_update = []
|
||||
cycle_issues = list(
|
||||
CycleIssue.objects.filter(
|
||||
~Q(cycle_id=cycle_id), issue_id__in=issues
|
||||
)
|
||||
)
|
||||
existing_issues = [
|
||||
str(cycle_issue.issue_id) for cycle_issue in cycle_issues
|
||||
]
|
||||
new_issues = list(set(issues) - set(existing_issues))
|
||||
|
||||
for issue in issues:
|
||||
cycle_issue = [
|
||||
cycle_issue
|
||||
for cycle_issue in cycle_issues
|
||||
if str(cycle_issue.issue_id) in issues
|
||||
]
|
||||
# Update only when cycle changes
|
||||
if len(cycle_issue):
|
||||
if cycle_issue[0].cycle_id != cycle_id:
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_issue[0].cycle_id),
|
||||
"new_cycle_id": str(cycle_id),
|
||||
"issue_id": str(cycle_issue[0].issue_id),
|
||||
}
|
||||
)
|
||||
cycle_issue[0].cycle_id = cycle_id
|
||||
records_to_update.append(cycle_issue[0])
|
||||
else:
|
||||
record_to_create.append(
|
||||
CycleIssue(
|
||||
project_id=project_id,
|
||||
workspace=cycle.workspace,
|
||||
created_by=request.user,
|
||||
updated_by=request.user,
|
||||
cycle=cycle,
|
||||
issue_id=issue,
|
||||
)
|
||||
# New issues to create
|
||||
created_records = CycleIssue.objects.bulk_create(
|
||||
[
|
||||
CycleIssue(
|
||||
project_id=project_id,
|
||||
workspace_id=cycle.workspace_id,
|
||||
created_by_id=request.user.id,
|
||||
updated_by_id=request.user.id,
|
||||
cycle_id=cycle_id,
|
||||
issue_id=issue,
|
||||
)
|
||||
|
||||
CycleIssue.objects.bulk_create(
|
||||
record_to_create,
|
||||
batch_size=10,
|
||||
ignore_conflicts=True,
|
||||
)
|
||||
CycleIssue.objects.bulk_update(
|
||||
records_to_update,
|
||||
["cycle"],
|
||||
for issue in new_issues
|
||||
],
|
||||
batch_size=10,
|
||||
)
|
||||
|
||||
# Updated Issues
|
||||
updated_records = []
|
||||
update_cycle_issue_activity = []
|
||||
# Iterate over each cycle_issue in cycle_issues
|
||||
for cycle_issue in cycle_issues:
|
||||
# Update the cycle_issue's cycle_id
|
||||
cycle_issue.cycle_id = cycle_id
|
||||
# Add the modified cycle_issue to the records_to_update list
|
||||
updated_records.append(cycle_issue)
|
||||
# Record the update activity
|
||||
update_cycle_issue_activity.append(
|
||||
{
|
||||
"old_cycle_id": str(cycle_issue.cycle_id),
|
||||
"new_cycle_id": str(cycle_id),
|
||||
"issue_id": str(cycle_issue.issue_id),
|
||||
}
|
||||
)
|
||||
|
||||
# Update the cycle issues
|
||||
CycleIssue.objects.bulk_update(updated_records, ["cycle_id"], batch_size=100)
|
||||
# Capture Issue Activity
|
||||
issue_activity.delay(
|
||||
type="cycle.activity.created",
|
||||
@ -715,7 +895,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
{
|
||||
"updated_cycle_issues": update_cycle_issue_activity,
|
||||
"created_cycle_issues": serializers.serialize(
|
||||
"json", record_to_create
|
||||
"json", created_records
|
||||
),
|
||||
}
|
||||
),
|
||||
@ -723,16 +903,7 @@ class CycleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
)
|
||||
|
||||
# Return all Cycle Issues
|
||||
issues = self.get_queryset().values_list("issue_id", flat=True)
|
||||
|
||||
return Response(
|
||||
IssueSerializer(
|
||||
Issue.objects.filter(pk__in=issues), many=True
|
||||
).data,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
def destroy(self, request, slug, project_id, cycle_id, issue_id):
|
||||
cycle_issue = CycleIssue.objects.get(
|
||||
@ -776,6 +947,7 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
)
|
||||
|
||||
# Check if any cycle intersects in the given interval
|
||||
cycles = Cycle.objects.filter(
|
||||
Q(workspace__slug=slug)
|
||||
& Q(project_id=project_id)
|
||||
@ -785,7 +957,6 @@ class CycleDateCheckEndpoint(BaseAPIView):
|
||||
| Q(start_date__gte=start_date, end_date__lte=end_date)
|
||||
)
|
||||
).exclude(pk=cycle_id)
|
||||
|
||||
if cycles.exists():
|
||||
return Response(
|
||||
{
|
||||
@ -909,29 +1080,6 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_estimates=Sum("issue_cycle__issue__estimate_point")
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
)
|
||||
|
||||
# Pass the new_cycle queryset to burndown_plot
|
||||
@ -942,6 +1090,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
cycle_id=cycle_id,
|
||||
)
|
||||
|
||||
# Get the assignee distribution
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
@ -980,7 +1129,22 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
)
|
||||
.order_by("display_name")
|
||||
)
|
||||
# assignee distribution serialized
|
||||
assignee_distribution_data = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": (
|
||||
str(item["assignee_id"]) if item["assignee_id"] else None
|
||||
),
|
||||
"avatar": item["avatar"],
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
}
|
||||
for item in assignee_distribution
|
||||
]
|
||||
|
||||
# Get the label distribution
|
||||
label_distribution = (
|
||||
Issue.objects.filter(
|
||||
issue_cycle__cycle_id=cycle_id,
|
||||
@ -1023,7 +1187,9 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
assignee_distribution_data = [
|
||||
{
|
||||
"display_name": item["display_name"],
|
||||
"assignee_id": str(item["assignee_id"]) if item["assignee_id"] else None,
|
||||
"assignee_id": (
|
||||
str(item["assignee_id"]) if item["assignee_id"] else None
|
||||
),
|
||||
"avatar": item["avatar"],
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
@ -1032,11 +1198,14 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
for item in assignee_distribution
|
||||
]
|
||||
|
||||
# Label distribution serilization
|
||||
label_distribution_data = [
|
||||
{
|
||||
"label_name": item["label_name"],
|
||||
"color": item["color"],
|
||||
"label_id": str(item["label_id"]) if item["label_id"] else None,
|
||||
"label_id": (
|
||||
str(item["label_id"]) if item["label_id"] else None
|
||||
),
|
||||
"total_issues": item["total_issues"],
|
||||
"completed_issues": item["completed_issues"],
|
||||
"pending_issues": item["pending_issues"],
|
||||
@ -1055,10 +1224,7 @@ class TransferCycleIssueEndpoint(BaseAPIView):
|
||||
"started_issues": old_cycle.first().started_issues,
|
||||
"unstarted_issues": old_cycle.first().unstarted_issues,
|
||||
"backlog_issues": old_cycle.first().backlog_issues,
|
||||
"total_estimates": old_cycle.first().total_estimates,
|
||||
"completed_estimates": old_cycle.first().completed_estimates,
|
||||
"started_estimates": old_cycle.first().started_estimates,
|
||||
"distribution":{
|
||||
"distribution": {
|
||||
"labels": label_distribution_data,
|
||||
"assignees": assignee_distribution_data,
|
||||
"completion_chart": completion_chart,
|
||||
|
@ -15,6 +15,10 @@ from django.db.models import (
|
||||
Func,
|
||||
Prefetch,
|
||||
)
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.utils import timezone
|
||||
|
||||
# Third Party imports
|
||||
@ -54,6 +58,7 @@ def dashboard_overview_stats(self, request, slug):
|
||||
|
||||
pending_issues_count = Issue.issue_objects.filter(
|
||||
~Q(state__group__in=["completed", "cancelled"]),
|
||||
target_date__lt=timezone.now().date(),
|
||||
project__project_projectmember__is_active=True,
|
||||
project__project_projectmember__member=request.user,
|
||||
workspace__slug=slug,
|
||||
@ -130,7 +135,32 @@ def dashboard_assigned_issues(self, request, slug):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.order_by("created_at")
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
@ -259,6 +289,32 @@ def dashboard_created_issues(self, request, slug):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.order_by("created_at")
|
||||
)
|
||||
|
||||
|
@ -3,8 +3,12 @@ import json
|
||||
|
||||
# Django import
|
||||
from django.utils import timezone
|
||||
from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
|
||||
from django.db.models import Q, Count, OuterRef, Func, F, Prefetch, Exists
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Third party imports
|
||||
from rest_framework import status
|
||||
@ -21,13 +25,15 @@ from plane.db.models import (
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
ProjectMember,
|
||||
IssueReaction,
|
||||
IssueSubscriber,
|
||||
)
|
||||
from plane.app.serializers import (
|
||||
IssueCreateSerializer,
|
||||
IssueSerializer,
|
||||
InboxSerializer,
|
||||
InboxIssueSerializer,
|
||||
IssueCreateSerializer,
|
||||
IssueStateInboxSerializer,
|
||||
IssueDetailSerializer,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
@ -92,7 +98,7 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
Issue.objects.filter(
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
issue_inbox__inbox_id=self.kwargs.get("inbox_id")
|
||||
issue_inbox__inbox_id=self.kwargs.get("inbox_id"),
|
||||
)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
@ -127,14 +133,75 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
def list(self, request, slug, project_id, inbox_id):
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issue_queryset = self.get_queryset().filter(**filters).order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
||||
issues_data = IssueSerializer(issue_queryset, expand=self.expand, many=True).data
|
||||
issue_queryset = (
|
||||
self.get_queryset()
|
||||
.filter(**filters)
|
||||
.order_by("issue_inbox__snoozed_till", "issue_inbox__status")
|
||||
)
|
||||
if self.expand:
|
||||
issues = IssueSerializer(
|
||||
issue_queryset, expand=self.expand, many=True
|
||||
).data
|
||||
else:
|
||||
issues = issue_queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
return Response(
|
||||
issues_data,
|
||||
issues,
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
@ -199,8 +266,8 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
source=request.data.get("source", "in-app"),
|
||||
)
|
||||
|
||||
issue = (self.get_queryset().filter(pk=issue.id).first())
|
||||
serializer = IssueSerializer(issue ,expand=self.expand)
|
||||
issue = self.get_queryset().filter(pk=issue.id).first()
|
||||
serializer = IssueSerializer(issue, expand=self.expand)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def partial_update(self, request, slug, project_id, inbox_id, issue_id):
|
||||
@ -230,11 +297,7 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
issue_data = request.data.pop("issue", False)
|
||||
|
||||
if bool(issue_data):
|
||||
issue = Issue.objects.get(
|
||||
pk=inbox_issue.issue_id,
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
)
|
||||
issue = self.get_queryset().filter(pk=inbox_issue.issue_id).first()
|
||||
# Only allow guests and viewers to edit name and description
|
||||
if project_member.role <= 10:
|
||||
# viewers and guests since only viewers and guests
|
||||
@ -320,20 +383,54 @@ class InboxIssueViewSet(BaseViewSet):
|
||||
if state is not None:
|
||||
issue.state = state
|
||||
issue.save()
|
||||
issue = (self.get_queryset().filter(pk=issue_id).first())
|
||||
serializer = IssueSerializer(issue, expand=self.expand)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
return Response(status=status.HTTP_204_NO_CONTENT)
|
||||
return Response(
|
||||
serializer.errors, status=status.HTTP_400_BAD_REQUEST
|
||||
)
|
||||
else:
|
||||
issue = (self.get_queryset().filter(pk=issue_id).first())
|
||||
serializer = IssueSerializer(issue ,expand=self.expand)
|
||||
issue = self.get_queryset().filter(pk=issue_id).first()
|
||||
serializer = IssueSerializer(issue, expand=self.expand)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def retrieve(self, request, slug, project_id, inbox_id, issue_id):
|
||||
issue = self.get_queryset().filter(pk=issue_id).first()
|
||||
serializer = IssueSerializer(issue, expand=self.expand,)
|
||||
issue = (
|
||||
self.get_queryset()
|
||||
.filter(pk=issue_id)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related(
|
||||
"issue", "actor"
|
||||
),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_attachment",
|
||||
queryset=IssueAttachment.objects.select_related("issue"),
|
||||
)
|
||||
)
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_link",
|
||||
queryset=IssueLink.objects.select_related("created_by"),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
is_subscribed=Exists(
|
||||
IssueSubscriber.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_id=project_id,
|
||||
issue_id=OuterRef("pk"),
|
||||
subscriber=request.user,
|
||||
)
|
||||
)
|
||||
)
|
||||
).first()
|
||||
if issue is None:
|
||||
return Response({"error": "Requested object was not found"}, status=status.HTTP_404_NOT_FOUND)
|
||||
|
||||
serializer = IssueDetailSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
def destroy(self, request, slug, project_id, inbox_id, issue_id):
|
||||
|
@ -36,7 +36,10 @@ class SlackProjectSyncViewSet(BaseViewSet):
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
)
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
)
|
||||
|
||||
def create(self, request, slug, project_id, workspace_integration_id):
|
||||
|
File diff suppressed because it is too large
Load Diff
@ -4,11 +4,12 @@ import json
|
||||
# Django Imports
|
||||
from django.utils import timezone
|
||||
from django.db.models import Prefetch, F, OuterRef, Func, Exists, Count, Q
|
||||
from django.core import serializers
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.core.serializers.json import DjangoJSONEncoder
|
||||
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
@ -24,6 +25,7 @@ from plane.app.serializers import (
|
||||
ModuleFavoriteSerializer,
|
||||
IssueSerializer,
|
||||
ModuleUserPropertiesSerializer,
|
||||
ModuleDetailSerializer,
|
||||
)
|
||||
from plane.app.permissions import (
|
||||
ProjectEntityPermission,
|
||||
@ -38,11 +40,9 @@ from plane.db.models import (
|
||||
ModuleFavorite,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueSubscriber,
|
||||
ModuleUserProperties,
|
||||
)
|
||||
from plane.bgtasks.issue_activites_task import issue_activity
|
||||
from plane.utils.grouper import group_results
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.analytics_plot import burndown_plot
|
||||
|
||||
@ -62,7 +62,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
|
||||
def get_queryset(self):
|
||||
subquery = ModuleFavorite.objects.filter(
|
||||
favorite_subquery = ModuleFavorite.objects.filter(
|
||||
user=self.request.user,
|
||||
module_id=OuterRef("pk"),
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
@ -73,7 +73,7 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||
.get_queryset()
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
.annotate(is_favorite=Exists(favorite_subquery))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("lead")
|
||||
@ -145,6 +145,16 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
member_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"members__id",
|
||||
distinct=True,
|
||||
filter=~Q(members__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
)
|
||||
)
|
||||
.order_by("-is_favorite", "-created_at")
|
||||
)
|
||||
|
||||
@ -157,25 +167,84 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
|
||||
module = Module.objects.get(pk=serializer.data["id"])
|
||||
serializer = ModuleSerializer(module)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
module = (
|
||||
self.get_queryset()
|
||||
.filter(pk=serializer.data["id"])
|
||||
.values( # Required fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# Model fields
|
||||
"name",
|
||||
"description",
|
||||
"description_text",
|
||||
"description_html",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"status",
|
||||
"lead_id",
|
||||
"member_ids",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
# computed fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
)
|
||||
).first()
|
||||
return Response(module, status=status.HTTP_201_CREATED)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def list(self, request, slug, project_id):
|
||||
queryset = self.get_queryset()
|
||||
fields = [
|
||||
field
|
||||
for field in request.GET.get("fields", "").split(",")
|
||||
if field
|
||||
]
|
||||
modules = ModuleSerializer(
|
||||
queryset, many=True, fields=fields if fields else None
|
||||
).data
|
||||
if self.fields:
|
||||
modules = ModuleSerializer(
|
||||
queryset,
|
||||
many=True,
|
||||
fields=self.fields,
|
||||
).data
|
||||
else:
|
||||
modules = queryset.values( # Required fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# Model fields
|
||||
"name",
|
||||
"description",
|
||||
"description_text",
|
||||
"description_html",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"status",
|
||||
"lead_id",
|
||||
"member_ids",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
# computed fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
)
|
||||
return Response(modules, status=status.HTTP_200_OK)
|
||||
|
||||
def retrieve(self, request, slug, project_id, pk):
|
||||
queryset = self.get_queryset().get(pk=pk)
|
||||
queryset = self.get_queryset().filter(pk=pk)
|
||||
|
||||
assignee_distribution = (
|
||||
Issue.objects.filter(
|
||||
@ -269,16 +338,16 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||
.order_by("label_name")
|
||||
)
|
||||
|
||||
data = ModuleSerializer(queryset).data
|
||||
data = ModuleDetailSerializer(queryset.first()).data
|
||||
data["distribution"] = {
|
||||
"assignees": assignee_distribution,
|
||||
"labels": label_distribution,
|
||||
"completion_chart": {},
|
||||
}
|
||||
|
||||
if queryset.start_date and queryset.target_date:
|
||||
if queryset.first().start_date and queryset.first().target_date:
|
||||
data["distribution"]["completion_chart"] = burndown_plot(
|
||||
queryset=queryset,
|
||||
queryset=queryset.first(),
|
||||
slug=slug,
|
||||
project_id=project_id,
|
||||
module_id=pk,
|
||||
@ -289,6 +358,47 @@ class ModuleViewSet(WebhookMixin, BaseViewSet):
|
||||
status=status.HTTP_200_OK,
|
||||
)
|
||||
|
||||
def partial_update(self, request, slug, project_id, pk):
|
||||
queryset = self.get_queryset().filter(pk=pk)
|
||||
serializer = ModuleWriteSerializer(
|
||||
queryset.first(), data=request.data, partial=True
|
||||
)
|
||||
|
||||
if serializer.is_valid():
|
||||
serializer.save()
|
||||
module = queryset.values(
|
||||
# Required fields
|
||||
"id",
|
||||
"workspace_id",
|
||||
"project_id",
|
||||
# Model fields
|
||||
"name",
|
||||
"description",
|
||||
"description_text",
|
||||
"description_html",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"status",
|
||||
"lead_id",
|
||||
"member_ids",
|
||||
"view_props",
|
||||
"sort_order",
|
||||
"external_source",
|
||||
"external_id",
|
||||
# computed fields
|
||||
"is_favorite",
|
||||
"total_issues",
|
||||
"cancelled_issues",
|
||||
"completed_issues",
|
||||
"started_issues",
|
||||
"unstarted_issues",
|
||||
"backlog_issues",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
).first()
|
||||
return Response(module, status=status.HTTP_200_OK)
|
||||
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
|
||||
|
||||
def destroy(self, request, slug, project_id, pk):
|
||||
module = Module.objects.get(
|
||||
workspace__slug=slug, project_id=project_id, pk=pk
|
||||
@ -331,17 +441,15 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
ProjectEntityPermission,
|
||||
]
|
||||
|
||||
|
||||
def get_queryset(self):
|
||||
return (
|
||||
Issue.issue_objects.filter(
|
||||
project_id=self.kwargs.get("project_id"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
issue_module__module_id=self.kwargs.get("module_id")
|
||||
issue_module__module_id=self.kwargs.get("module_id"),
|
||||
)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("labels", "assignees")
|
||||
.prefetch_related('issue_module__module')
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
@ -365,6 +473,32 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
).distinct()
|
||||
|
||||
@method_decorator(gzip_page)
|
||||
@ -376,15 +510,44 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
]
|
||||
filters = issue_filters(request.query_params, "GET")
|
||||
issue_queryset = self.get_queryset().filter(**filters)
|
||||
serializer = IssueSerializer(
|
||||
issue_queryset, many=True, fields=fields if fields else None
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
if self.fields or self.expand:
|
||||
issues = IssueSerializer(
|
||||
issue_queryset, many=True, fields=fields if fields else None
|
||||
).data
|
||||
else:
|
||||
issues = issue_queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
# create multiple issues inside a module
|
||||
def create_module_issues(self, request, slug, project_id, module_id):
|
||||
issues = request.data.get("issues", [])
|
||||
if not len(issues):
|
||||
if not issues:
|
||||
return Response(
|
||||
{"error": "Issues are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@ -420,15 +583,12 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
for issue in issues
|
||||
]
|
||||
issues = (self.get_queryset().filter(pk__in=issues))
|
||||
serializer = IssueSerializer(issues , many=True)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
# create multiple module inside an issue
|
||||
def create_issue_modules(self, request, slug, project_id, issue_id):
|
||||
modules = request.data.get("modules", [])
|
||||
if not len(modules):
|
||||
if not modules:
|
||||
return Response(
|
||||
{"error": "Modules are required"},
|
||||
status=status.HTTP_400_BAD_REQUEST,
|
||||
@ -466,10 +626,7 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
for module in modules
|
||||
]
|
||||
|
||||
issue = (self.get_queryset().filter(pk=issue_id).first())
|
||||
serializer = IssueSerializer(issue)
|
||||
return Response(serializer.data, status=status.HTTP_201_CREATED)
|
||||
|
||||
return Response({"message": "success"}, status=status.HTTP_201_CREATED)
|
||||
|
||||
def destroy(self, request, slug, project_id, module_id, issue_id):
|
||||
module_issue = ModuleIssue.objects.get(
|
||||
@ -484,7 +641,9 @@ class ModuleIssueViewSet(WebhookMixin, BaseViewSet):
|
||||
actor_id=str(request.user.id),
|
||||
issue_id=str(issue_id),
|
||||
project_id=str(project_id),
|
||||
current_instance=json.dumps({"module_name": module_issue.module.name}),
|
||||
current_instance=json.dumps(
|
||||
{"module_name": module_issue.module.name}
|
||||
),
|
||||
epoch=int(timezone.now().timestamp()),
|
||||
notification=True,
|
||||
origin=request.META.get("HTTP_ORIGIN"),
|
||||
@ -514,7 +673,10 @@ class ModuleLinkViewSet(BaseViewSet):
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(module_id=self.kwargs.get("module_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.order_by("-created_at")
|
||||
.distinct()
|
||||
)
|
||||
|
@ -70,7 +70,10 @@ class PageViewSet(BaseViewSet):
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.filter(parent__isnull=True)
|
||||
.filter(Q(owned_by=self.request.user) | Q(access=0))
|
||||
.select_related("project")
|
||||
|
@ -77,6 +77,12 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
||||
]
|
||||
|
||||
def get_queryset(self):
|
||||
sort_order = ProjectMember.objects.filter(
|
||||
member=self.request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
is_active=True,
|
||||
).values("sort_order")
|
||||
return self.filter_queryset(
|
||||
super()
|
||||
.get_queryset()
|
||||
@ -147,6 +153,7 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
||||
)
|
||||
)
|
||||
)
|
||||
.annotate(sort_order=Subquery(sort_order))
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"project_projectmember",
|
||||
@ -166,16 +173,8 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
||||
for field in request.GET.get("fields", "").split(",")
|
||||
if field
|
||||
]
|
||||
|
||||
sort_order_query = ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
project_id=OuterRef("pk"),
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
is_active=True,
|
||||
).values("sort_order")
|
||||
projects = (
|
||||
self.get_queryset()
|
||||
.annotate(sort_order=Subquery(sort_order_query))
|
||||
.order_by("sort_order", "name")
|
||||
)
|
||||
if request.GET.get("per_page", False) and request.GET.get(
|
||||
@ -204,7 +203,7 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
|
||||
serializer.save()
|
||||
|
||||
# Add the user as Administrator to the project
|
||||
project_member = ProjectMember.objects.create(
|
||||
_ = ProjectMember.objects.create(
|
||||
project_id=serializer.data["id"],
|
||||
member=request.user,
|
||||
role=20,
|
||||
|
@ -48,8 +48,8 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
return (
|
||||
Project.objects.filter(
|
||||
q,
|
||||
Q(project_projectmember__member=self.request.user)
|
||||
| Q(network=2),
|
||||
project_projectmember__member=self.request.user,
|
||||
project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
.distinct()
|
||||
@ -71,6 +71,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
issues = Issue.issue_objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
@ -95,6 +96,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
cycles = Cycle.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
@ -118,6 +120,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
modules = Module.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
@ -141,6 +144,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
pages = Page.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
@ -164,6 +168,7 @@ class GlobalSearchEndpoint(BaseAPIView):
|
||||
issue_views = IssueView.objects.filter(
|
||||
q,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
workspace__slug=slug,
|
||||
)
|
||||
|
||||
@ -236,6 +241,7 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
issues = Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
|
||||
if workspace_search == "false":
|
||||
@ -247,12 +253,7 @@ class IssueSearchEndpoint(BaseAPIView):
|
||||
if parent == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), parent__isnull=True
|
||||
).exclude(
|
||||
pk__in=Issue.issue_objects.filter(
|
||||
parent__isnull=False
|
||||
).values_list("parent_id", flat=True)
|
||||
)
|
||||
~Q(pk=issue_id), ~Q(pk=issue.parent_id), ~Q(parent_id=issue_id))
|
||||
if issue_relation == "true" and issue_id:
|
||||
issue = Issue.issue_objects.get(pk=issue_id)
|
||||
issues = issues.filter(
|
||||
|
@ -31,7 +31,10 @@ class StateViewSet(BaseViewSet):
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.filter(~Q(name="Triage"))
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
|
@ -1,6 +1,6 @@
|
||||
# Django imports
|
||||
from django.db.models import (
|
||||
Prefetch,
|
||||
Q,
|
||||
OuterRef,
|
||||
Func,
|
||||
F,
|
||||
@ -13,16 +13,21 @@ from django.db.models import (
|
||||
)
|
||||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.gzip import gzip_page
|
||||
from django.db.models import Prefetch, OuterRef, Exists
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Value, UUIDField
|
||||
|
||||
# Third party imports
|
||||
from rest_framework.response import Response
|
||||
from rest_framework import status
|
||||
|
||||
# Module imports
|
||||
from . import BaseViewSet, BaseAPIView
|
||||
from . import BaseViewSet
|
||||
from plane.app.serializers import (
|
||||
GlobalViewSerializer,
|
||||
IssueViewSerializer,
|
||||
IssueSerializer,
|
||||
IssueViewFavoriteSerializer,
|
||||
@ -30,22 +35,16 @@ from plane.app.serializers import (
|
||||
from plane.app.permissions import (
|
||||
WorkspaceEntityPermission,
|
||||
ProjectEntityPermission,
|
||||
WorkspaceViewerPermission,
|
||||
ProjectLitePermission,
|
||||
)
|
||||
from plane.db.models import (
|
||||
Workspace,
|
||||
GlobalView,
|
||||
IssueView,
|
||||
Issue,
|
||||
IssueViewFavorite,
|
||||
IssueReaction,
|
||||
IssueLink,
|
||||
IssueAttachment,
|
||||
IssueSubscriber,
|
||||
)
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.utils.grouper import group_results
|
||||
|
||||
|
||||
class GlobalViewViewSet(BaseViewSet):
|
||||
@ -87,13 +86,60 @@ class GlobalViewIssuesViewSet(BaseViewSet):
|
||||
.values("count")
|
||||
)
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
.prefetch_related("assignees", "labels", "issue_module__module")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"issue_reactions",
|
||||
queryset=IssueReaction.objects.select_related("actor"),
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
)
|
||||
|
||||
@ -121,30 +167,7 @@ class GlobalViewIssuesViewSet(BaseViewSet):
|
||||
issue_queryset = (
|
||||
self.get_queryset()
|
||||
.filter(**filters)
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.annotate(cycle_id=F("issue_cycle__cycle_id"))
|
||||
.annotate(
|
||||
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
attachment_count=IssueAttachment.objects.filter(
|
||||
issue=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
sub_issues_count=Issue.issue_objects.filter(
|
||||
parent=OuterRef("id")
|
||||
)
|
||||
.order_by()
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
)
|
||||
|
||||
# Priority Ordering
|
||||
@ -207,10 +230,39 @@ class GlobalViewIssuesViewSet(BaseViewSet):
|
||||
else:
|
||||
issue_queryset = issue_queryset.order_by(order_by_param)
|
||||
|
||||
serializer = IssueSerializer(
|
||||
issue_queryset, many=True, fields=fields if fields else None
|
||||
)
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
if self.fields:
|
||||
issues = IssueSerializer(
|
||||
issue_queryset, many=True, fields=self.fields
|
||||
).data
|
||||
else:
|
||||
issues = issue_queryset.values(
|
||||
"id",
|
||||
"name",
|
||||
"state_id",
|
||||
"sort_order",
|
||||
"completed_at",
|
||||
"estimate_point",
|
||||
"priority",
|
||||
"start_date",
|
||||
"target_date",
|
||||
"sequence_id",
|
||||
"project_id",
|
||||
"parent_id",
|
||||
"cycle_id",
|
||||
"module_ids",
|
||||
"label_ids",
|
||||
"assignee_ids",
|
||||
"sub_issues_count",
|
||||
"created_at",
|
||||
"updated_at",
|
||||
"created_by",
|
||||
"updated_by",
|
||||
"attachment_count",
|
||||
"link_count",
|
||||
"is_draft",
|
||||
"archived_at",
|
||||
)
|
||||
return Response(issues, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class IssueViewViewSet(BaseViewSet):
|
||||
@ -235,7 +287,10 @@ class IssueViewViewSet(BaseViewSet):
|
||||
.get_queryset()
|
||||
.filter(workspace__slug=self.kwargs.get("slug"))
|
||||
.filter(project_id=self.kwargs.get("project_id"))
|
||||
.filter(project__project_projectmember__member=self.request.user)
|
||||
.filter(
|
||||
project__project_projectmember__member=self.request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.annotate(is_favorite=Exists(subquery))
|
||||
|
@ -22,9 +22,14 @@ from django.db.models import (
|
||||
When,
|
||||
Max,
|
||||
IntegerField,
|
||||
Sum,
|
||||
)
|
||||
from django.db.models.functions import ExtractWeek, Cast, ExtractDay
|
||||
from django.db.models.fields import DateField
|
||||
from django.contrib.postgres.aggregates import ArrayAgg
|
||||
from django.contrib.postgres.fields import ArrayField
|
||||
from django.db.models import Value, UUIDField
|
||||
from django.db.models.functions import Coalesce
|
||||
|
||||
# Third party modules
|
||||
from rest_framework import status
|
||||
@ -73,6 +78,9 @@ from plane.db.models import (
|
||||
WorkspaceUserProperties,
|
||||
Estimate,
|
||||
EstimatePoint,
|
||||
Module,
|
||||
ModuleLink,
|
||||
Cycle,
|
||||
)
|
||||
from plane.app.permissions import (
|
||||
WorkSpaceBasePermission,
|
||||
@ -85,6 +93,12 @@ from plane.app.permissions import (
|
||||
from plane.bgtasks.workspace_invitation_task import workspace_invitation
|
||||
from plane.utils.issue_filters import issue_filters
|
||||
from plane.bgtasks.event_tracking_task import workspace_invite_event
|
||||
from plane.app.serializers.module import (
|
||||
ModuleSerializer,
|
||||
)
|
||||
from plane.app.serializers.cycle import (
|
||||
CycleSerializer,
|
||||
)
|
||||
|
||||
|
||||
class WorkSpaceViewSet(BaseViewSet):
|
||||
@ -546,7 +560,6 @@ class WorkSpaceMemberViewSet(BaseViewSet):
|
||||
.get_queryset()
|
||||
.filter(
|
||||
workspace__slug=self.kwargs.get("slug"),
|
||||
member__is_bot=False,
|
||||
is_active=True,
|
||||
)
|
||||
.select_related("workspace", "workspace__owner")
|
||||
@ -754,7 +767,6 @@ class WorkspaceProjectMemberEndpoint(BaseAPIView):
|
||||
project_ids = (
|
||||
ProjectMember.objects.filter(
|
||||
member=request.user,
|
||||
member__is_bot=False,
|
||||
is_active=True,
|
||||
)
|
||||
.values_list("project_id", flat=True)
|
||||
@ -764,7 +776,6 @@ class WorkspaceProjectMemberEndpoint(BaseAPIView):
|
||||
# Get all the project members in which the user is involved
|
||||
project_members = ProjectMember.objects.filter(
|
||||
workspace__slug=slug,
|
||||
member__is_bot=False,
|
||||
project_id__in=project_ids,
|
||||
is_active=True,
|
||||
).select_related("project", "member", "workspace")
|
||||
@ -1075,6 +1086,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True
|
||||
)
|
||||
.filter(**filters)
|
||||
.annotate(state_group=F("state__group"))
|
||||
@ -1090,6 +1102,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True
|
||||
)
|
||||
.filter(**filters)
|
||||
.values("priority")
|
||||
@ -1112,6 +1125,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
Issue.issue_objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
created_by_id=user_id,
|
||||
)
|
||||
.filter(**filters)
|
||||
@ -1123,6 +1137,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
@ -1134,6 +1149,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
assignees__in=[user_id],
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
@ -1145,6 +1161,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
assignees__in=[user_id],
|
||||
state__group="completed",
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
@ -1155,6 +1172,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView):
|
||||
workspace__slug=slug,
|
||||
subscriber_id=user_id,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True
|
||||
)
|
||||
.filter(**filters)
|
||||
.count()
|
||||
@ -1204,6 +1222,7 @@ class WorkspaceUserActivityEndpoint(BaseAPIView):
|
||||
~Q(field__in=["comment", "vote", "reaction", "draft"]),
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True,
|
||||
actor=user_id,
|
||||
).select_related("actor", "workspace", "issue", "project")
|
||||
|
||||
@ -1234,6 +1253,7 @@ class WorkspaceUserProfileEndpoint(BaseAPIView):
|
||||
Project.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project_projectmember__member=request.user,
|
||||
project_projectmember__is_active=True,
|
||||
)
|
||||
.annotate(
|
||||
created_issues=Count(
|
||||
@ -1343,6 +1363,7 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
||||
| Q(issue_subscribers__subscriber_id=user_id),
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True
|
||||
)
|
||||
.filter(**filters)
|
||||
.select_related("workspace", "project", "state", "parent")
|
||||
@ -1370,6 +1391,32 @@ class WorkspaceUserProfileIssuesEndpoint(BaseAPIView):
|
||||
.annotate(count=Func(F("id"), function="Count"))
|
||||
.values("count")
|
||||
)
|
||||
.annotate(
|
||||
label_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"labels__id",
|
||||
distinct=True,
|
||||
filter=~Q(labels__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
assignee_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"assignees__id",
|
||||
distinct=True,
|
||||
filter=~Q(assignees__id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
module_ids=Coalesce(
|
||||
ArrayAgg(
|
||||
"issue_module__module_id",
|
||||
distinct=True,
|
||||
filter=~Q(issue_module__module_id__isnull=True),
|
||||
),
|
||||
Value([], output_field=ArrayField(UUIDField())),
|
||||
),
|
||||
)
|
||||
.order_by("created_at")
|
||||
).distinct()
|
||||
|
||||
@ -1448,6 +1495,7 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
|
||||
labels = Label.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True
|
||||
)
|
||||
serializer = LabelSerializer(labels, many=True).data
|
||||
return Response(serializer, status=status.HTTP_200_OK)
|
||||
@ -1462,6 +1510,7 @@ class WorkspaceStatesEndpoint(BaseAPIView):
|
||||
states = State.objects.filter(
|
||||
workspace__slug=slug,
|
||||
project__project_projectmember__member=request.user,
|
||||
project__project_projectmember__is_active=True
|
||||
)
|
||||
serializer = StateSerializer(states, many=True).data
|
||||
return Response(serializer, status=status.HTTP_200_OK)
|
||||
@ -1490,6 +1539,192 @@ class WorkspaceEstimatesEndpoint(BaseAPIView):
|
||||
return Response(serializer.data, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class WorkspaceModulesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceViewerPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
modules = (
|
||||
Module.objects.filter(workspace__slug=slug)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("lead")
|
||||
.prefetch_related("members")
|
||||
.prefetch_related(
|
||||
Prefetch(
|
||||
"link_module",
|
||||
queryset=ModuleLink.objects.select_related(
|
||||
"module", "created_by"
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_module",
|
||||
filter=Q(
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
),
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="completed",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="cancelled",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="started",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="unstarted",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_module__issue__state__group",
|
||||
filter=Q(
|
||||
issue_module__issue__state__group="backlog",
|
||||
issue_module__issue__archived_at__isnull=True,
|
||||
issue_module__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
)
|
||||
|
||||
serializer = ModuleSerializer(modules, many=True).data
|
||||
return Response(serializer, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class WorkspaceCyclesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceViewerPermission,
|
||||
]
|
||||
|
||||
def get(self, request, slug):
|
||||
cycles = (
|
||||
Cycle.objects.filter(workspace__slug=slug)
|
||||
.select_related("project")
|
||||
.select_related("workspace")
|
||||
.select_related("owned_by")
|
||||
.annotate(
|
||||
total_issues=Count(
|
||||
"issue_cycle",
|
||||
filter=Q(
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
completed_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
cancelled_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="cancelled",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
unstarted_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="unstarted",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
backlog_issues=Count(
|
||||
"issue_cycle__issue__state__group",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="backlog",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
total_estimates=Sum("issue_cycle__issue__estimate_point")
|
||||
)
|
||||
.annotate(
|
||||
completed_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="completed",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.annotate(
|
||||
started_estimates=Sum(
|
||||
"issue_cycle__issue__estimate_point",
|
||||
filter=Q(
|
||||
issue_cycle__issue__state__group="started",
|
||||
issue_cycle__issue__archived_at__isnull=True,
|
||||
issue_cycle__issue__is_draft=False,
|
||||
),
|
||||
)
|
||||
)
|
||||
.order_by(self.kwargs.get("order_by", "-created_at"))
|
||||
.distinct()
|
||||
)
|
||||
serializer = CycleSerializer(cycles, many=True).data
|
||||
return Response(serializer, status=status.HTTP_200_OK)
|
||||
|
||||
|
||||
class WorkspaceUserPropertiesEndpoint(BaseAPIView):
|
||||
permission_classes = [
|
||||
WorkspaceViewerPermission,
|
||||
|
@ -1,21 +1,33 @@
|
||||
from datetime import datetime
|
||||
from bs4 import BeautifulSoup
|
||||
|
||||
|
||||
# Third party imports
|
||||
from celery import shared_task
|
||||
from sentry_sdk import capture_exception
|
||||
|
||||
# Django imports
|
||||
from django.utils import timezone
|
||||
from django.core.mail import EmailMultiAlternatives, get_connection
|
||||
from django.template.loader import render_to_string
|
||||
from django.utils.html import strip_tags
|
||||
from django.conf import settings
|
||||
|
||||
# Module imports
|
||||
from plane.db.models import EmailNotificationLog, User, Issue
|
||||
from plane.license.utils.instance_value import get_email_configuration
|
||||
from plane.settings.redis import redis_instance
|
||||
|
||||
# acquire and delete redis lock
|
||||
def acquire_lock(lock_id, expire_time=300):
|
||||
redis_client = redis_instance()
|
||||
"""Attempt to acquire a lock with a specified expiration time."""
|
||||
return redis_client.set(lock_id, 'true', nx=True, ex=expire_time)
|
||||
|
||||
def release_lock(lock_id):
|
||||
"""Release a lock."""
|
||||
redis_client = redis_instance()
|
||||
redis_client.delete(lock_id)
|
||||
|
||||
@shared_task
|
||||
def stack_email_notification():
|
||||
# get all email notifications
|
||||
@ -142,135 +154,155 @@ def process_html_content(content):
|
||||
processed_content_list.append(processed_content)
|
||||
return processed_content_list
|
||||
|
||||
|
||||
@shared_task
|
||||
def send_email_notification(
|
||||
issue_id, notification_data, receiver_id, email_notification_ids
|
||||
):
|
||||
# Convert UUIDs to a sorted, concatenated string
|
||||
sorted_ids = sorted(email_notification_ids)
|
||||
ids_str = "_".join(str(id) for id in sorted_ids)
|
||||
lock_id = f"send_email_notif_{issue_id}_{receiver_id}_{ids_str}"
|
||||
|
||||
# acquire the lock for sending emails
|
||||
try:
|
||||
ri = redis_instance()
|
||||
base_api = (ri.get(str(issue_id)).decode())
|
||||
data = create_payload(notification_data=notification_data)
|
||||
if acquire_lock(lock_id=lock_id):
|
||||
# get the redis instance
|
||||
ri = redis_instance()
|
||||
base_api = (ri.get(str(issue_id)).decode())
|
||||
data = create_payload(notification_data=notification_data)
|
||||
|
||||
# Get email configurations
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_FROM,
|
||||
) = get_email_configuration()
|
||||
# Get email configurations
|
||||
(
|
||||
EMAIL_HOST,
|
||||
EMAIL_HOST_USER,
|
||||
EMAIL_HOST_PASSWORD,
|
||||
EMAIL_PORT,
|
||||
EMAIL_USE_TLS,
|
||||
EMAIL_FROM,
|
||||
) = get_email_configuration()
|
||||
|
||||
receiver = User.objects.get(pk=receiver_id)
|
||||
issue = Issue.objects.get(pk=issue_id)
|
||||
template_data = []
|
||||
total_changes = 0
|
||||
comments = []
|
||||
actors_involved = []
|
||||
for actor_id, changes in data.items():
|
||||
actor = User.objects.get(pk=actor_id)
|
||||
total_changes = total_changes + len(changes)
|
||||
comment = changes.pop("comment", False)
|
||||
mention = changes.pop("mention", False)
|
||||
actors_involved.append(actor_id)
|
||||
if comment:
|
||||
comments.append(
|
||||
{
|
||||
"actor_comments": comment,
|
||||
"actor_detail": {
|
||||
"avatar_url": actor.avatar,
|
||||
"first_name": actor.first_name,
|
||||
"last_name": actor.last_name,
|
||||
},
|
||||
}
|
||||
receiver = User.objects.get(pk=receiver_id)
|
||||
issue = Issue.objects.get(pk=issue_id)
|
||||
template_data = []
|
||||
total_changes = 0
|
||||
comments = []
|
||||
actors_involved = []
|
||||
for actor_id, changes in data.items():
|
||||
actor = User.objects.get(pk=actor_id)
|
||||
total_changes = total_changes + len(changes)
|
||||
comment = changes.pop("comment", False)
|
||||
mention = changes.pop("mention", False)
|
||||
actors_involved.append(actor_id)
|
||||
if comment:
|
||||
comments.append(
|
||||
{
|
||||
"actor_comments": comment,
|
||||
"actor_detail": {
|
||||
"avatar_url": actor.avatar,
|
||||
"first_name": actor.first_name,
|
||||
"last_name": actor.last_name,
|
||||
},
|
||||
}
|
||||
)
|
||||
if mention:
|
||||
mention["new_value"] = process_html_content(mention.get("new_value"))
|
||||
mention["old_value"] = process_html_content(mention.get("old_value"))
|
||||
comments.append(
|
||||
{
|
||||
"actor_comments": mention,
|
||||
"actor_detail": {
|
||||
"avatar_url": actor.avatar,
|
||||
"first_name": actor.first_name,
|
||||
"last_name": actor.last_name,
|
||||
},
|
||||
}
|
||||
)
|
||||
activity_time = changes.pop("activity_time")
|
||||
# Parse the input string into a datetime object
|
||||
formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p")
|
||||
|
||||
if changes:
|
||||
template_data.append(
|
||||
{
|
||||
"actor_detail": {
|
||||
"avatar_url": actor.avatar,
|
||||
"first_name": actor.first_name,
|
||||
"last_name": actor.last_name,
|
||||
},
|
||||
"changes": changes,
|
||||
"issue_details": {
|
||||
"name": issue.name,
|
||||
"identifier": f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
},
|
||||
"activity_time": str(formatted_time),
|
||||
}
|
||||
)
|
||||
if mention:
|
||||
mention["new_value"] = process_html_content(mention.get("new_value"))
|
||||
mention["old_value"] = process_html_content(mention.get("old_value"))
|
||||
comments.append(
|
||||
{
|
||||
"actor_comments": mention,
|
||||
"actor_detail": {
|
||||
"avatar_url": actor.avatar,
|
||||
"first_name": actor.first_name,
|
||||
"last_name": actor.last_name,
|
||||
},
|
||||
}
|
||||
)
|
||||
activity_time = changes.pop("activity_time")
|
||||
# Parse the input string into a datetime object
|
||||
formatted_time = datetime.strptime(activity_time, "%Y-%m-%d %H:%M:%S").strftime("%H:%M %p")
|
||||
|
||||
if changes:
|
||||
template_data.append(
|
||||
{
|
||||
"actor_detail": {
|
||||
"avatar_url": actor.avatar,
|
||||
"first_name": actor.first_name,
|
||||
"last_name": actor.last_name,
|
||||
},
|
||||
"changes": changes,
|
||||
"issue_details": {
|
||||
"name": issue.name,
|
||||
"identifier": f"{issue.project.identifier}-{issue.sequence_id}",
|
||||
},
|
||||
"activity_time": str(formatted_time),
|
||||
}
|
||||
)
|
||||
summary = "Updates were made to the issue by"
|
||||
|
||||
summary = "Updates were made to the issue by"
|
||||
|
||||
# Send the mail
|
||||
subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
|
||||
context = {
|
||||
"data": template_data,
|
||||
"summary": summary,
|
||||
"actors_involved": len(set(actors_involved)),
|
||||
"issue": {
|
||||
"issue_identifier": f"{str(issue.project.identifier)}-{str(issue.sequence_id)}",
|
||||
"name": issue.name,
|
||||
# Send the mail
|
||||
subject = f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
|
||||
context = {
|
||||
"data": template_data,
|
||||
"summary": summary,
|
||||
"actors_involved": len(set(actors_involved)),
|
||||
"issue": {
|
||||
"issue_identifier": f"{str(issue.project.identifier)}-{str(issue.sequence_id)}",
|
||||
"name": issue.name,
|
||||
"issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
|
||||
},
|
||||
"receiver": {
|
||||
"email": receiver.email,
|
||||
},
|
||||
"issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
|
||||
},
|
||||
"receiver": {
|
||||
"email": receiver.email,
|
||||
},
|
||||
"issue_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/{str(issue.id)}",
|
||||
"project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/",
|
||||
"workspace":str(issue.project.workspace.slug),
|
||||
"project": str(issue.project.name),
|
||||
"user_preference": f"{base_api}/profile/preferences/email",
|
||||
"comments": comments,
|
||||
}
|
||||
html_content = render_to_string(
|
||||
"emails/notifications/issue-updates.html", context
|
||||
)
|
||||
text_content = strip_tags(html_content)
|
||||
|
||||
try:
|
||||
connection = get_connection(
|
||||
host=EMAIL_HOST,
|
||||
port=int(EMAIL_PORT),
|
||||
username=EMAIL_HOST_USER,
|
||||
password=EMAIL_HOST_PASSWORD,
|
||||
use_tls=EMAIL_USE_TLS == "1",
|
||||
"project_url": f"{base_api}/{str(issue.project.workspace.slug)}/projects/{str(issue.project.id)}/issues/",
|
||||
"workspace":str(issue.project.workspace.slug),
|
||||
"project": str(issue.project.name),
|
||||
"user_preference": f"{base_api}/profile/preferences/email",
|
||||
"comments": comments,
|
||||
}
|
||||
html_content = render_to_string(
|
||||
"emails/notifications/issue-updates.html", context
|
||||
)
|
||||
text_content = strip_tags(html_content)
|
||||
|
||||
msg = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_content,
|
||||
from_email=EMAIL_FROM,
|
||||
to=[receiver.email],
|
||||
connection=connection,
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
try:
|
||||
connection = get_connection(
|
||||
host=EMAIL_HOST,
|
||||
port=int(EMAIL_PORT),
|
||||
username=EMAIL_HOST_USER,
|
||||
password=EMAIL_HOST_PASSWORD,
|
||||
use_tls=EMAIL_USE_TLS == "1",
|
||||
)
|
||||
|
||||
EmailNotificationLog.objects.filter(
|
||||
pk__in=email_notification_ids
|
||||
).update(sent_at=timezone.now())
|
||||
msg = EmailMultiAlternatives(
|
||||
subject=subject,
|
||||
body=text_content,
|
||||
from_email=EMAIL_FROM,
|
||||
to=[receiver.email],
|
||||
connection=connection,
|
||||
)
|
||||
msg.attach_alternative(html_content, "text/html")
|
||||
msg.send()
|
||||
|
||||
EmailNotificationLog.objects.filter(
|
||||
pk__in=email_notification_ids
|
||||
).update(sent_at=timezone.now())
|
||||
|
||||
# release the lock
|
||||
release_lock(lock_id=lock_id)
|
||||
return
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
# release the lock
|
||||
release_lock(lock_id=lock_id)
|
||||
return
|
||||
else:
|
||||
print("Duplicate task recived. Skipping...")
|
||||
return
|
||||
except Exception as e:
|
||||
except (Issue.DoesNotExist, User.DoesNotExist) as e:
|
||||
if settings.DEBUG:
|
||||
print(e)
|
||||
return
|
||||
except Issue.DoesNotExist:
|
||||
release_lock(lock_id=lock_id)
|
||||
return
|
||||
|
@ -292,6 +292,7 @@ def issue_export_task(
|
||||
workspace__id=workspace_id,
|
||||
project_id__in=project_ids,
|
||||
project__project_projectmember__member=exporter_instance.initiated_by_id,
|
||||
project__project_projectmember__is_active=True
|
||||
)
|
||||
.select_related(
|
||||
"project", "workspace", "state", "parent", "created_by"
|
||||
|
@ -60,15 +60,6 @@ def service_importer(service, importer_id):
|
||||
batch_size=100,
|
||||
)
|
||||
|
||||
_ = [
|
||||
send_welcome_slack.delay(
|
||||
str(user.id),
|
||||
True,
|
||||
f"{user.email} was imported to Plane from {service}",
|
||||
)
|
||||
for user in new_users
|
||||
]
|
||||
|
||||
workspace_users = User.objects.filter(
|
||||
email__in=[
|
||||
user.get("email").strip().lower()
|
||||
|
@ -483,17 +483,23 @@ def track_archive_at(
|
||||
)
|
||||
)
|
||||
else:
|
||||
if requested_data.get("automation"):
|
||||
comment = "Plane has archived the issue"
|
||||
new_value = "archive"
|
||||
else:
|
||||
comment = "Actor has archived the issue"
|
||||
new_value = "manual_archive"
|
||||
issue_activities.append(
|
||||
IssueActivity(
|
||||
issue_id=issue_id,
|
||||
project_id=project_id,
|
||||
workspace_id=workspace_id,
|
||||
comment="Plane has archived the issue",
|
||||
comment=comment,
|
||||
verb="updated",
|
||||
actor_id=actor_id,
|
||||
field="archived_at",
|
||||
old_value=None,
|
||||
new_value="archive",
|
||||
new_value=new_value,
|
||||
epoch=epoch,
|
||||
)
|
||||
)
|
||||
|
@ -79,7 +79,7 @@ def archive_old_issues():
|
||||
issue_activity.delay(
|
||||
type="issue.activity.updated",
|
||||
requested_data=json.dumps(
|
||||
{"archived_at": str(archive_at)}
|
||||
{"archived_at": str(archive_at), "automation": True}
|
||||
),
|
||||
actor_id=str(project.created_by_id),
|
||||
issue_id=issue.id,
|
||||
|
@ -12,15 +12,9 @@ from django.contrib.auth.models import (
|
||||
PermissionsMixin,
|
||||
)
|
||||
from django.db.models.signals import post_save
|
||||
from django.conf import settings
|
||||
from django.dispatch import receiver
|
||||
from django.utils import timezone
|
||||
|
||||
# Third party imports
|
||||
from sentry_sdk import capture_exception
|
||||
from slack_sdk import WebClient
|
||||
from slack_sdk.errors import SlackApiError
|
||||
|
||||
|
||||
def get_default_onboarding():
|
||||
return {
|
||||
@ -144,25 +138,6 @@ class User(AbstractBaseUser, PermissionsMixin):
|
||||
super(User, self).save(*args, **kwargs)
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def send_welcome_slack(sender, instance, created, **kwargs):
|
||||
try:
|
||||
if created and not instance.is_bot:
|
||||
# Send message on slack as well
|
||||
if settings.SLACK_BOT_TOKEN:
|
||||
client = WebClient(token=settings.SLACK_BOT_TOKEN)
|
||||
try:
|
||||
_ = client.chat_postMessage(
|
||||
channel="#trackers",
|
||||
text=f"New user {instance.email} has signed up and begun the onboarding journey.",
|
||||
)
|
||||
except SlackApiError as e:
|
||||
print(f"Got an error: {e.response['error']}")
|
||||
return
|
||||
except Exception as e:
|
||||
capture_exception(e)
|
||||
return
|
||||
|
||||
|
||||
@receiver(post_save, sender=User)
|
||||
def create_user_notification(sender, instance, created, **kwargs):
|
||||
|
@ -1,4 +1,5 @@
|
||||
"""Global Settings"""
|
||||
|
||||
# Python imports
|
||||
import os
|
||||
import ssl
|
||||
@ -307,7 +308,9 @@ if bool(os.environ.get("SENTRY_DSN", False)) and os.environ.get(
|
||||
traces_sample_rate=1,
|
||||
send_default_pii=True,
|
||||
environment=os.environ.get("SENTRY_ENVIRONMENT", "development"),
|
||||
profiles_sample_rate=1.0,
|
||||
profiles_sample_rate=float(
|
||||
os.environ.get("SENTRY_PROFILE_SAMPLE_RATE", 0.5)
|
||||
),
|
||||
)
|
||||
|
||||
|
||||
|
@ -9,11 +9,11 @@ from plane.db.models import Issue
|
||||
|
||||
|
||||
def search_issues(query, queryset):
|
||||
fields = ["name", "sequence_id"]
|
||||
fields = ["name", "sequence_id", "project__identifier"]
|
||||
q = Q()
|
||||
for field in fields:
|
||||
if field == "sequence_id" and len(query) <= 20:
|
||||
sequences = re.findall(r"[A-Za-z0-9]{1,12}-\d+", query)
|
||||
sequences = re.findall(r"\b\d+\b", query)
|
||||
for sequence_id in sequences:
|
||||
q |= Q(**{"sequence_id": sequence_id})
|
||||
else:
|
||||
|
@ -30,7 +30,7 @@ openpyxl==3.1.2
|
||||
beautifulsoup4==4.12.2
|
||||
dj-database-url==2.1.0
|
||||
posthog==3.0.2
|
||||
cryptography==42.0.0
|
||||
cryptography==42.0.4
|
||||
lxml==4.9.3
|
||||
boto3==1.28.40
|
||||
|
||||
|
@ -1 +1 @@
|
||||
python-3.11.7
|
||||
python-3.11.8
|
78
deploy/1-click/README.md
Normal file
78
deploy/1-click/README.md
Normal file
@ -0,0 +1,78 @@
|
||||
# 1-Click Self-Hosting
|
||||
|
||||
In this guide, we will walk you through the process of setting up a 1-click self-hosted environment. Self-hosting allows you to have full control over your applications and data. It's a great way to ensure privacy, control, and customization.
|
||||
|
||||
Let's get started!
|
||||
|
||||
## Installing Plane
|
||||
|
||||
Installing Plane is a very easy and minimal step process.
|
||||
|
||||
### Prerequisite
|
||||
|
||||
- Operating System (latest): Debian / Ubuntu / Centos
|
||||
- Supported CPU Architechture: AMD64 / ARM64 / x86_64 / aarch64
|
||||
|
||||
### Downloading Latest Stable Release
|
||||
|
||||
```
|
||||
curl -fsSL https://raw.githubusercontent.com/makeplane/plane/master/deploy/1-click/install.sh | sh -
|
||||
|
||||
```
|
||||
|
||||
<details>
|
||||
<summary>Downloading Preview Release</summary>
|
||||
|
||||
```
|
||||
export BRANCH=preview
|
||||
|
||||
curl -fsSL https://raw.githubusercontent.com/makeplane/plane/preview/deploy/1-click/install.sh | sh -
|
||||
|
||||
```
|
||||
|
||||
NOTE: `Preview` builds do not support ARM64/AARCH64 CPU architecture
|
||||
</details>
|
||||
|
||||
--
|
||||
|
||||
|
||||
Expect this after a successful install
|
||||
|
||||
![Install Output](images/install.png)
|
||||
|
||||
Access the application on a browser via http://server-ip-address
|
||||
|
||||
---
|
||||
|
||||
### Get Control of your Plane Server Setup
|
||||
|
||||
Plane App is available via the command `plane-app`. Running the command `plane-app --help` helps you to manage Plane
|
||||
|
||||
![Plane Help](images/help.png)
|
||||
|
||||
<ins>Basic Operations</ins>:
|
||||
1. Start Server using `plane-app start`
|
||||
1. Stop Server using `plane-app stop`
|
||||
1. Restart Server using `plane-app restart`
|
||||
|
||||
<ins>Advanced Operations</ins>:
|
||||
1. Configure Plane using `plane-app --configure`. This will give you options to modify
|
||||
- NGINX Port (default 80)
|
||||
- Domain Name (default is the local server public IP address)
|
||||
- File Upload Size (default 5MB)
|
||||
- External Postgres DB Url (optional - default empty)
|
||||
- External Redis URL (optional - default empty)
|
||||
- AWS S3 Bucket (optional - to be configured only in case the user wants to use an S3 Bucket)
|
||||
|
||||
1. Upgrade Plane using `plane-app --upgrade`. This will get the latest stable version of Plane files (docker-compose.yaml, .env, and docker images)
|
||||
|
||||
1. Updating Plane App installer using `plane-app --update-installer` will update the `plane-app` utility.
|
||||
|
||||
1. Uninstall Plane using `plane-app --uninstall`. This will uninstall the Plane application from the server and all docker containers but do not remove the data stored in Postgres, Redis, and Minio.
|
||||
|
||||
1. Plane App can be reinstalled using `plane-app --install`.
|
||||
|
||||
<ins>Application Data is stored in the mentioned folders</ins>:
|
||||
1. DB Data: /opt/plane/data/postgres
|
||||
1. Redis Data: /opt/plane/data/redis
|
||||
1. Minio Data: /opt/plane/data/minio
|
BIN
deploy/1-click/images/help.png
Normal file
BIN
deploy/1-click/images/help.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 109 KiB |
BIN
deploy/1-click/images/install.png
Normal file
BIN
deploy/1-click/images/install.png
Normal file
Binary file not shown.
After Width: | Height: | Size: 173 KiB |
@ -1,17 +1,20 @@
|
||||
#!/bin/bash
|
||||
|
||||
export GIT_REPO=makeplane/plane
|
||||
|
||||
# Check if the user has sudo access
|
||||
if command -v curl &> /dev/null; then
|
||||
sudo curl -sSL \
|
||||
-o /usr/local/bin/plane-app \
|
||||
https://raw.githubusercontent.com/makeplane/plane/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
|
||||
https://raw.githubusercontent.com/$GIT_REPO/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
|
||||
else
|
||||
sudo wget -q \
|
||||
-O /usr/local/bin/plane-app \
|
||||
https://raw.githubusercontent.com/makeplane/plane/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
|
||||
https://raw.githubusercontent.com/$GIT_REPO/${BRANCH:-master}/deploy/1-click/plane-app?token=$(date +%s)
|
||||
fi
|
||||
|
||||
sudo chmod +x /usr/local/bin/plane-app
|
||||
sudo sed -i 's/export DEPLOY_BRANCH=${BRANCH:-master}/export DEPLOY_BRANCH='${BRANCH:-master}'/' /usr/local/bin/plane-app
|
||||
sudo sed -i 's@export DEPLOY_BRANCH=${BRANCH:-master}@export DEPLOY_BRANCH='${BRANCH:-master}'@' /usr/local/bin/plane-app
|
||||
sudo sed -i 's@CODE_REPO=${GIT_REPO:-makeplane/plane}@CODE_REPO='$GIT_REPO'@' /usr/local/bin/plane-app
|
||||
|
||||
plane-app --help
|
||||
plane-app -i #--help
|
||||
|
@ -90,9 +90,9 @@ function prepare_environment() {
|
||||
|
||||
show_message "- Updating OS with required tools ✋" >&2
|
||||
sudo "$PACKAGE_MANAGER" update -y
|
||||
sudo "$PACKAGE_MANAGER" upgrade -y
|
||||
# sudo "$PACKAGE_MANAGER" upgrade -y
|
||||
|
||||
local required_tools=("curl" "awk" "wget" "nano" "dialog" "git" "uidmap")
|
||||
local required_tools=("curl" "awk" "wget" "nano" "dialog" "git" "uidmap" "jq")
|
||||
|
||||
for tool in "${required_tools[@]}"; do
|
||||
if ! command -v $tool &> /dev/null; then
|
||||
@ -150,11 +150,11 @@ function download_plane() {
|
||||
show_message "Downloading Plane Setup Files ✋" >&2
|
||||
sudo curl -H 'Cache-Control: no-cache, no-store' \
|
||||
-s -o $PLANE_INSTALL_DIR/docker-compose.yaml \
|
||||
https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/selfhost/docker-compose.yml?token=$(date +%s)
|
||||
https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/selfhost/docker-compose.yml?token=$(date +%s)
|
||||
|
||||
sudo curl -H 'Cache-Control: no-cache, no-store' \
|
||||
-s -o $PLANE_INSTALL_DIR/variables-upgrade.env \
|
||||
https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/selfhost/variables.env?token=$(date +%s)
|
||||
https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/selfhost/variables.env?token=$(date +%s)
|
||||
|
||||
# if .env does not exists rename variables-upgrade.env to .env
|
||||
if [ ! -f "$PLANE_INSTALL_DIR/.env" ]; then
|
||||
@ -202,7 +202,7 @@ function printUsageInstructions() {
|
||||
}
|
||||
function build_local_image() {
|
||||
show_message "- Downloading Plane Source Code ✋" >&2
|
||||
REPO=https://github.com/makeplane/plane.git
|
||||
REPO=https://github.com/$CODE_REPO.git
|
||||
CURR_DIR=$PWD
|
||||
PLANE_TEMP_CODE_DIR=$PLANE_INSTALL_DIR/temp
|
||||
sudo rm -rf $PLANE_TEMP_CODE_DIR > /dev/null
|
||||
@ -290,40 +290,40 @@ function configure_plane() {
|
||||
fi
|
||||
|
||||
|
||||
smtp_host=$(read_env "EMAIL_HOST")
|
||||
smtp_user=$(read_env "EMAIL_HOST_USER")
|
||||
smtp_password=$(read_env "EMAIL_HOST_PASSWORD")
|
||||
smtp_port=$(read_env "EMAIL_PORT")
|
||||
smtp_from=$(read_env "EMAIL_FROM")
|
||||
smtp_tls=$(read_env "EMAIL_USE_TLS")
|
||||
smtp_ssl=$(read_env "EMAIL_USE_SSL")
|
||||
# smtp_host=$(read_env "EMAIL_HOST")
|
||||
# smtp_user=$(read_env "EMAIL_HOST_USER")
|
||||
# smtp_password=$(read_env "EMAIL_HOST_PASSWORD")
|
||||
# smtp_port=$(read_env "EMAIL_PORT")
|
||||
# smtp_from=$(read_env "EMAIL_FROM")
|
||||
# smtp_tls=$(read_env "EMAIL_USE_TLS")
|
||||
# smtp_ssl=$(read_env "EMAIL_USE_SSL")
|
||||
|
||||
SMTP_SETTINGS=$(dialog \
|
||||
--ok-label "Next" \
|
||||
--cancel-label "Skip" \
|
||||
--backtitle "Plane Configuration" \
|
||||
--title "SMTP Settings" \
|
||||
--form "" \
|
||||
0 0 0 \
|
||||
"Host:" 1 1 "$smtp_host" 1 10 80 0 \
|
||||
"User:" 2 1 "$smtp_user" 2 10 80 0 \
|
||||
"Password:" 3 1 "$smtp_password" 3 10 80 0 \
|
||||
"Port:" 4 1 "${smtp_port:-587}" 4 10 5 0 \
|
||||
"From:" 5 1 "${smtp_from:-Mailer <mailer@example.com>}" 5 10 80 0 \
|
||||
"TLS:" 6 1 "${smtp_tls:-1}" 6 10 1 1 \
|
||||
"SSL:" 7 1 "${smtp_ssl:-0}" 7 10 1 1 \
|
||||
2>&1 1>&3)
|
||||
# SMTP_SETTINGS=$(dialog \
|
||||
# --ok-label "Next" \
|
||||
# --cancel-label "Skip" \
|
||||
# --backtitle "Plane Configuration" \
|
||||
# --title "SMTP Settings" \
|
||||
# --form "" \
|
||||
# 0 0 0 \
|
||||
# "Host:" 1 1 "$smtp_host" 1 10 80 0 \
|
||||
# "User:" 2 1 "$smtp_user" 2 10 80 0 \
|
||||
# "Password:" 3 1 "$smtp_password" 3 10 80 0 \
|
||||
# "Port:" 4 1 "${smtp_port:-587}" 4 10 5 0 \
|
||||
# "From:" 5 1 "${smtp_from:-Mailer <mailer@example.com>}" 5 10 80 0 \
|
||||
# "TLS:" 6 1 "${smtp_tls:-1}" 6 10 1 1 \
|
||||
# "SSL:" 7 1 "${smtp_ssl:-0}" 7 10 1 1 \
|
||||
# 2>&1 1>&3)
|
||||
|
||||
save_smtp_settings=0
|
||||
if [ $? -eq 0 ]; then
|
||||
save_smtp_settings=1
|
||||
smtp_host=$(echo "$SMTP_SETTINGS" | sed -n 1p)
|
||||
smtp_user=$(echo "$SMTP_SETTINGS" | sed -n 2p)
|
||||
smtp_password=$(echo "$SMTP_SETTINGS" | sed -n 3p)
|
||||
smtp_port=$(echo "$SMTP_SETTINGS" | sed -n 4p)
|
||||
smtp_from=$(echo "$SMTP_SETTINGS" | sed -n 5p)
|
||||
smtp_tls=$(echo "$SMTP_SETTINGS" | sed -n 6p)
|
||||
fi
|
||||
# save_smtp_settings=0
|
||||
# if [ $? -eq 0 ]; then
|
||||
# save_smtp_settings=1
|
||||
# smtp_host=$(echo "$SMTP_SETTINGS" | sed -n 1p)
|
||||
# smtp_user=$(echo "$SMTP_SETTINGS" | sed -n 2p)
|
||||
# smtp_password=$(echo "$SMTP_SETTINGS" | sed -n 3p)
|
||||
# smtp_port=$(echo "$SMTP_SETTINGS" | sed -n 4p)
|
||||
# smtp_from=$(echo "$SMTP_SETTINGS" | sed -n 5p)
|
||||
# smtp_tls=$(echo "$SMTP_SETTINGS" | sed -n 6p)
|
||||
# fi
|
||||
external_pgdb_url=$(dialog \
|
||||
--backtitle "Plane Configuration" \
|
||||
--title "Using External Postgres Database ?" \
|
||||
@ -383,15 +383,6 @@ function configure_plane() {
|
||||
domain_name: $domain_name
|
||||
upload_limit: $upload_limit
|
||||
|
||||
save_smtp_settings: $save_smtp_settings
|
||||
smtp_host: $smtp_host
|
||||
smtp_user: $smtp_user
|
||||
smtp_password: $smtp_password
|
||||
smtp_port: $smtp_port
|
||||
smtp_from: $smtp_from
|
||||
smtp_tls: $smtp_tls
|
||||
smtp_ssl: $smtp_ssl
|
||||
|
||||
save_aws_settings: $save_aws_settings
|
||||
aws_region: $aws_region
|
||||
aws_access_key: $aws_access_key
|
||||
@ -413,15 +404,15 @@ function configure_plane() {
|
||||
fi
|
||||
|
||||
# check enable smpt settings value
|
||||
if [ $save_smtp_settings == 1 ]; then
|
||||
update_env "EMAIL_HOST" "$smtp_host"
|
||||
update_env "EMAIL_HOST_USER" "$smtp_user"
|
||||
update_env "EMAIL_HOST_PASSWORD" "$smtp_password"
|
||||
update_env "EMAIL_PORT" "$smtp_port"
|
||||
update_env "EMAIL_FROM" "$smtp_from"
|
||||
update_env "EMAIL_USE_TLS" "$smtp_tls"
|
||||
update_env "EMAIL_USE_SSL" "$smtp_ssl"
|
||||
fi
|
||||
# if [ $save_smtp_settings == 1 ]; then
|
||||
# update_env "EMAIL_HOST" "$smtp_host"
|
||||
# update_env "EMAIL_HOST_USER" "$smtp_user"
|
||||
# update_env "EMAIL_HOST_PASSWORD" "$smtp_password"
|
||||
# update_env "EMAIL_PORT" "$smtp_port"
|
||||
# update_env "EMAIL_FROM" "$smtp_from"
|
||||
# update_env "EMAIL_USE_TLS" "$smtp_tls"
|
||||
# update_env "EMAIL_USE_SSL" "$smtp_ssl"
|
||||
# fi
|
||||
|
||||
# check enable aws settings value
|
||||
if [[ $save_aws_settings == 1 && $aws_access_key != "" && $aws_secret_key != "" ]] ; then
|
||||
@ -493,13 +484,24 @@ function install() {
|
||||
check_for_docker_images
|
||||
|
||||
last_installed_on=$(read_config "INSTALLATION_DATE")
|
||||
if [ "$last_installed_on" == "" ]; then
|
||||
configure_plane
|
||||
fi
|
||||
printUsageInstructions
|
||||
|
||||
update_config "INSTALLATION_DATE" "$(date)"
|
||||
# if [ "$last_installed_on" == "" ]; then
|
||||
# configure_plane
|
||||
# fi
|
||||
|
||||
update_env "NGINX_PORT" "80"
|
||||
update_env "DOMAIN_NAME" "$MY_IP"
|
||||
update_env "WEB_URL" "http://$MY_IP"
|
||||
update_env "CORS_ALLOWED_ORIGINS" "http://$MY_IP"
|
||||
|
||||
update_config "INSTALLATION_DATE" "$(date '+%Y-%m-%d')"
|
||||
|
||||
if command -v crontab &> /dev/null; then
|
||||
sudo touch /etc/cron.daily/makeplane
|
||||
sudo chmod +x /etc/cron.daily/makeplane
|
||||
sudo echo "0 2 * * * root /usr/local/bin/plane-app --upgrade" > /etc/cron.daily/makeplane
|
||||
sudo crontab /etc/cron.daily/makeplane
|
||||
fi
|
||||
|
||||
show_message "Plane Installed Successfully ✅"
|
||||
show_message ""
|
||||
else
|
||||
@ -539,12 +541,15 @@ function upgrade() {
|
||||
prepare_environment
|
||||
|
||||
if [ $? -eq 0 ]; then
|
||||
stop_server
|
||||
download_plane
|
||||
if [ $? -eq 0 ]; then
|
||||
check_for_docker_images
|
||||
upgrade_configuration
|
||||
update_config "UPGRADE_DATE" "$(date)"
|
||||
|
||||
|
||||
start_server
|
||||
|
||||
show_message ""
|
||||
show_message "Plane Upgraded Successfully ✅"
|
||||
show_message ""
|
||||
@ -601,6 +606,11 @@ function uninstall() {
|
||||
sudo rm $PLANE_INSTALL_DIR/variables-upgrade.env &> /dev/null
|
||||
sudo rm $PLANE_INSTALL_DIR/config.env &> /dev/null
|
||||
sudo rm $PLANE_INSTALL_DIR/docker-compose.yaml &> /dev/null
|
||||
|
||||
if command -v crontab &> /dev/null; then
|
||||
sudo crontab -r &> /dev/null
|
||||
sudo rm /etc/cron.daily/makeplane &> /dev/null
|
||||
fi
|
||||
|
||||
# rm -rf $PLANE_INSTALL_DIR &> /dev/null
|
||||
show_message "- Configuration Cleaned ✅"
|
||||
@ -642,7 +652,39 @@ function start_server() {
|
||||
while ! sudo docker compose -f "$docker_compose_file" --env-file="$env_file" ps --services --filter "status=running" --quiet | grep -q "."; do
|
||||
sleep 1
|
||||
done
|
||||
# wait for migrator container to exit with status 0 before starting the application
|
||||
migrator_container_id=$(sudo docker container ls -aq -f "name=plane-migrator")
|
||||
|
||||
# if migrator container is running, wait for it to exit
|
||||
if [ -n "$migrator_container_id" ]; then
|
||||
while sudo docker inspect --format='{{.State.Status}}' $migrator_container_id | grep -q "running"; do
|
||||
show_message "Waiting for Plane Server ($APP_RELEASE) to start...✋ (Migrator in progress)" "replace_last_line" >&2
|
||||
sleep 1
|
||||
done
|
||||
fi
|
||||
|
||||
# if migrator exit status is not 0, show error message and exit
|
||||
if [ -n "$migrator_container_id" ]; then
|
||||
migrator_exit_code=$(sudo docker inspect --format='{{.State.ExitCode}}' $migrator_container_id)
|
||||
if [ $migrator_exit_code -ne 0 ]; then
|
||||
# show_message "Migrator failed with exit code $migrator_exit_code ❌" "replace_last_line" >&2
|
||||
show_message "Plane Server failed to start ❌" "replace_last_line" >&2
|
||||
stop_server
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
api_container_id=$(sudo docker container ls -q -f "name=plane-api")
|
||||
while ! sudo docker logs $api_container_id 2>&1 | grep -i "Application startup complete";
|
||||
do
|
||||
show_message "Waiting for Plane Server ($APP_RELEASE) to start...✋ (API starting)" "replace_last_line" >&2
|
||||
sleep 1
|
||||
done
|
||||
show_message "Plane Server Started ($APP_RELEASE) ✅" "replace_last_line" >&2
|
||||
show_message "---------------------------------------------------------------" >&2
|
||||
show_message "Access the Plane application at http://$MY_IP" >&2
|
||||
show_message "---------------------------------------------------------------" >&2
|
||||
|
||||
else
|
||||
show_message "Plane Server not installed. Please install Plane first ❌" "replace_last_line" >&2
|
||||
fi
|
||||
@ -694,7 +736,7 @@ function update_installer() {
|
||||
show_message "Updating Plane Installer ✋" >&2
|
||||
sudo curl -H 'Cache-Control: no-cache, no-store' \
|
||||
-s -o /usr/local/bin/plane-app \
|
||||
https://raw.githubusercontent.com/makeplane/plane/$DEPLOY_BRANCH/deploy/1-click/plane-app?token=$(date +%s)
|
||||
https://raw.githubusercontent.com/$CODE_REPO/$DEPLOY_BRANCH/deploy/1-click/plane-app?token=$(date +%s)
|
||||
|
||||
sudo chmod +x /usr/local/bin/plane-app > /dev/null&> /dev/null
|
||||
show_message "Plane Installer Updated ✅" "replace_last_line" >&2
|
||||
@ -711,12 +753,14 @@ fi
|
||||
|
||||
PLANE_INSTALL_DIR=/opt/plane
|
||||
DATA_DIR=$PLANE_INSTALL_DIR/data
|
||||
LOG_DIR=$PLANE_INSTALL_DIR/log
|
||||
LOG_DIR=$PLANE_INSTALL_DIR/logs
|
||||
CODE_REPO=${GIT_REPO:-makeplane/plane}
|
||||
OS_SUPPORTED=false
|
||||
CPU_ARCH=$(uname -m)
|
||||
PROGRESS_MSG=""
|
||||
USE_GLOBAL_IMAGES=0
|
||||
PACKAGE_MANAGER=""
|
||||
MY_IP=$(curl -s ifconfig.me)
|
||||
|
||||
if [[ $CPU_ARCH == "amd64" || $CPU_ARCH == "x86_64" || ( $DEPLOY_BRANCH == "master" && ( $CPU_ARCH == "arm64" || $CPU_ARCH == "aarch64" ) ) ]]; then
|
||||
USE_GLOBAL_IMAGES=1
|
||||
@ -740,6 +784,9 @@ elif [ "$1" == "restart" ]; then
|
||||
restart_server
|
||||
elif [ "$1" == "--install" ] || [ "$1" == "-i" ]; then
|
||||
install
|
||||
start_server
|
||||
show_message "" >&2
|
||||
show_message "To view help, use plane-app --help " >&2
|
||||
elif [ "$1" == "--configure" ] || [ "$1" == "-c" ]; then
|
||||
configure_plane
|
||||
printUsageInstructions
|
||||
|
@ -56,8 +56,6 @@ x-app-env : &app-env
|
||||
- BUCKET_NAME=${BUCKET_NAME:-uploads}
|
||||
- FILE_SIZE_LIMIT=${FILE_SIZE_LIMIT:-5242880}
|
||||
|
||||
|
||||
|
||||
services:
|
||||
web:
|
||||
<<: *app-env
|
||||
@ -138,7 +136,6 @@ services:
|
||||
command: postgres -c 'max_connections=1000'
|
||||
volumes:
|
||||
- pgdata:/var/lib/postgresql/data
|
||||
|
||||
plane-redis:
|
||||
<<: *app-env
|
||||
image: redis:6.2.7-alpine
|
||||
|
@ -13,6 +13,23 @@ YELLOW='\033[1;33m'
|
||||
GREEN='\033[0;32m'
|
||||
NC='\033[0m' # No Color
|
||||
|
||||
function print_header() {
|
||||
clear
|
||||
|
||||
cat <<"EOF"
|
||||
---------------------------------------
|
||||
____ _
|
||||
| _ \| | __ _ _ __ ___
|
||||
| |_) | |/ _` | '_ \ / _ \
|
||||
| __/| | (_| | | | | __/
|
||||
|_| |_|\__,_|_| |_|\___|
|
||||
|
||||
---------------------------------------
|
||||
Project management tool from the future
|
||||
---------------------------------------
|
||||
EOF
|
||||
}
|
||||
|
||||
function buildLocalImage() {
|
||||
if [ "$1" == "--force-build" ]; then
|
||||
DO_BUILD="1"
|
||||
@ -110,7 +127,7 @@ function download() {
|
||||
exit 0
|
||||
fi
|
||||
else
|
||||
docker compose -f $PLANE_INSTALL_DIR/docker-compose.yaml pull
|
||||
docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH pull
|
||||
fi
|
||||
|
||||
echo ""
|
||||
@ -121,19 +138,48 @@ function download() {
|
||||
|
||||
}
|
||||
function startServices() {
|
||||
cd $PLANE_INSTALL_DIR
|
||||
docker compose up -d --quiet-pull
|
||||
cd $SCRIPT_DIR
|
||||
docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH up -d --quiet-pull
|
||||
|
||||
local migrator_container_id=$(docker container ls -aq -f "name=plane-app-migrator")
|
||||
if [ -n "$migrator_container_id" ]; then
|
||||
local idx=0
|
||||
while docker inspect --format='{{.State.Status}}' $migrator_container_id | grep -q "running"; do
|
||||
local message=">>> Waiting for Data Migration to finish"
|
||||
local dots=$(printf '%*s' $idx | tr ' ' '.')
|
||||
echo -ne "\r$message$dots"
|
||||
((idx++))
|
||||
sleep 1
|
||||
done
|
||||
fi
|
||||
printf "\r\033[K"
|
||||
|
||||
# if migrator exit status is not 0, show error message and exit
|
||||
if [ -n "$migrator_container_id" ]; then
|
||||
local migrator_exit_code=$(docker inspect --format='{{.State.ExitCode}}' $migrator_container_id)
|
||||
if [ $migrator_exit_code -ne 0 ]; then
|
||||
echo "Plane Server failed to start ❌"
|
||||
stopServices
|
||||
exit 1
|
||||
fi
|
||||
fi
|
||||
|
||||
local api_container_id=$(docker container ls -q -f "name=plane-app-api")
|
||||
local idx2=0
|
||||
while ! docker logs $api_container_id 2>&1 | grep -m 1 -i "Application startup complete" | grep -q ".";
|
||||
do
|
||||
local message=">>> Waiting for API Service to Start"
|
||||
local dots=$(printf '%*s' $idx2 | tr ' ' '.')
|
||||
echo -ne "\r$message$dots"
|
||||
((idx2++))
|
||||
sleep 1
|
||||
done
|
||||
printf "\r\033[K"
|
||||
}
|
||||
function stopServices() {
|
||||
cd $PLANE_INSTALL_DIR
|
||||
docker compose down
|
||||
cd $SCRIPT_DIR
|
||||
docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH down
|
||||
}
|
||||
function restartServices() {
|
||||
cd $PLANE_INSTALL_DIR
|
||||
docker compose restart
|
||||
cd $SCRIPT_DIR
|
||||
docker compose -f $DOCKER_FILE_PATH --env-file=$DOCKER_ENV_PATH restart
|
||||
}
|
||||
function upgrade() {
|
||||
echo "***** STOPPING SERVICES ****"
|
||||
@ -144,47 +190,137 @@ function upgrade() {
|
||||
download
|
||||
|
||||
echo "***** PLEASE VALIDATE AND START SERVICES ****"
|
||||
}
|
||||
function viewSpecificLogs(){
|
||||
local SERVICE_NAME=$1
|
||||
|
||||
if docker-compose -f $DOCKER_FILE_PATH ps | grep -q "$SERVICE_NAME"; then
|
||||
echo "Service '$SERVICE_NAME' is running."
|
||||
else
|
||||
echo "Service '$SERVICE_NAME' is not running."
|
||||
fi
|
||||
|
||||
docker compose -f $DOCKER_FILE_PATH logs -f $SERVICE_NAME
|
||||
}
|
||||
function viewLogs(){
|
||||
|
||||
ARG_SERVICE_NAME=$2
|
||||
|
||||
if [ -z "$ARG_SERVICE_NAME" ];
|
||||
then
|
||||
echo
|
||||
echo "Select a Service you want to view the logs for:"
|
||||
echo " 1) Web"
|
||||
echo " 2) Space"
|
||||
echo " 3) API"
|
||||
echo " 4) Worker"
|
||||
echo " 5) Beat-Worker"
|
||||
echo " 6) Migrator"
|
||||
echo " 7) Proxy"
|
||||
echo " 8) Redis"
|
||||
echo " 9) Postgres"
|
||||
echo " 10) Minio"
|
||||
echo " 0) Back to Main Menu"
|
||||
echo
|
||||
read -p "Service: " DOCKER_SERVICE_NAME
|
||||
|
||||
until (( DOCKER_SERVICE_NAME >= 0 && DOCKER_SERVICE_NAME <= 10 )); do
|
||||
echo "Invalid selection. Please enter a number between 1 and 11."
|
||||
read -p "Service: " DOCKER_SERVICE_NAME
|
||||
done
|
||||
|
||||
if [ -z "$DOCKER_SERVICE_NAME" ];
|
||||
then
|
||||
echo "INVALID SERVICE NAME SUPPLIED"
|
||||
else
|
||||
case $DOCKER_SERVICE_NAME in
|
||||
1) viewSpecificLogs "web";;
|
||||
2) viewSpecificLogs "space";;
|
||||
3) viewSpecificLogs "api";;
|
||||
4) viewSpecificLogs "worker";;
|
||||
5) viewSpecificLogs "beat-worker";;
|
||||
6) viewSpecificLogs "migrator";;
|
||||
7) viewSpecificLogs "proxy";;
|
||||
8) viewSpecificLogs "plane-redis";;
|
||||
9) viewSpecificLogs "plane-db";;
|
||||
10) viewSpecificLogs "plane-minio";;
|
||||
0) askForAction;;
|
||||
*) echo "INVALID SERVICE NAME SUPPLIED";;
|
||||
esac
|
||||
fi
|
||||
elif [ -n "$ARG_SERVICE_NAME" ];
|
||||
then
|
||||
ARG_SERVICE_NAME=$(echo "$ARG_SERVICE_NAME" | tr '[:upper:]' '[:lower:]')
|
||||
case $ARG_SERVICE_NAME in
|
||||
web) viewSpecificLogs "web";;
|
||||
space) viewSpecificLogs "space";;
|
||||
api) viewSpecificLogs "api";;
|
||||
worker) viewSpecificLogs "worker";;
|
||||
beat-worker) viewSpecificLogs "beat-worker";;
|
||||
migrator) viewSpecificLogs "migrator";;
|
||||
proxy) viewSpecificLogs "proxy";;
|
||||
redis) viewSpecificLogs "plane-redis";;
|
||||
postgres) viewSpecificLogs "plane-db";;
|
||||
minio) viewSpecificLogs "plane-minio";;
|
||||
*) echo "INVALID SERVICE NAME SUPPLIED";;
|
||||
esac
|
||||
else
|
||||
echo "INVALID SERVICE NAME SUPPLIED"
|
||||
fi
|
||||
}
|
||||
function askForAction() {
|
||||
echo
|
||||
echo "Select a Action you want to perform:"
|
||||
echo " 1) Install (${CPU_ARCH})"
|
||||
echo " 2) Start"
|
||||
echo " 3) Stop"
|
||||
echo " 4) Restart"
|
||||
echo " 5) Upgrade"
|
||||
echo " 6) Exit"
|
||||
echo
|
||||
read -p "Action [2]: " ACTION
|
||||
until [[ -z "$ACTION" || "$ACTION" =~ ^[1-6]$ ]]; do
|
||||
echo "$ACTION: invalid selection."
|
||||
local DEFAULT_ACTION=$1
|
||||
|
||||
if [ -z "$DEFAULT_ACTION" ];
|
||||
then
|
||||
echo
|
||||
echo "Select a Action you want to perform:"
|
||||
echo " 1) Install (${CPU_ARCH})"
|
||||
echo " 2) Start"
|
||||
echo " 3) Stop"
|
||||
echo " 4) Restart"
|
||||
echo " 5) Upgrade"
|
||||
echo " 6) View Logs"
|
||||
echo " 7) Exit"
|
||||
echo
|
||||
read -p "Action [2]: " ACTION
|
||||
done
|
||||
echo
|
||||
until [[ -z "$ACTION" || "$ACTION" =~ ^[1-7]$ ]]; do
|
||||
echo "$ACTION: invalid selection."
|
||||
read -p "Action [2]: " ACTION
|
||||
done
|
||||
|
||||
if [ -z "$ACTION" ];
|
||||
then
|
||||
ACTION=2
|
||||
fi
|
||||
echo
|
||||
fi
|
||||
|
||||
if [ "$ACTION" == "1" ]
|
||||
if [ "$ACTION" == "1" ] || [ "$DEFAULT_ACTION" == "install" ]
|
||||
then
|
||||
install
|
||||
askForAction
|
||||
elif [ "$ACTION" == "2" ] || [ "$ACTION" == "" ]
|
||||
elif [ "$ACTION" == "2" ] || [ "$DEFAULT_ACTION" == "start" ]
|
||||
then
|
||||
startServices
|
||||
askForAction
|
||||
elif [ "$ACTION" == "3" ]
|
||||
elif [ "$ACTION" == "3" ] || [ "$DEFAULT_ACTION" == "stop" ]
|
||||
then
|
||||
stopServices
|
||||
askForAction
|
||||
elif [ "$ACTION" == "4" ]
|
||||
elif [ "$ACTION" == "4" ] || [ "$DEFAULT_ACTION" == "restart" ]
|
||||
then
|
||||
restartServices
|
||||
askForAction
|
||||
elif [ "$ACTION" == "5" ]
|
||||
elif [ "$ACTION" == "5" ] || [ "$DEFAULT_ACTION" == "upgrade" ]
|
||||
then
|
||||
upgrade
|
||||
askForAction
|
||||
elif [ "$ACTION" == "6" ]
|
||||
elif [ "$ACTION" == "6" ] || [ "$DEFAULT_ACTION" == "logs" ]
|
||||
then
|
||||
viewLogs $@
|
||||
askForAction
|
||||
elif [ "$ACTION" == "7" ]
|
||||
then
|
||||
exit 0
|
||||
else
|
||||
@ -217,4 +353,8 @@ then
|
||||
fi
|
||||
mkdir -p $PLANE_INSTALL_DIR/archive
|
||||
|
||||
askForAction
|
||||
DOCKER_FILE_PATH=$PLANE_INSTALL_DIR/docker-compose.yaml
|
||||
DOCKER_ENV_PATH=$PLANE_INSTALL_DIR/.env
|
||||
|
||||
print_header
|
||||
askForAction $@
|
||||
|
@ -137,7 +137,7 @@ services:
|
||||
dockerfile: Dockerfile.dev
|
||||
args:
|
||||
DOCKER_BUILDKIT: 1
|
||||
restart: no
|
||||
restart: "no"
|
||||
networks:
|
||||
- dev_env
|
||||
volumes:
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"repository": "https://github.com/makeplane/plane.git",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"license": "AGPL-3.0",
|
||||
"private": true,
|
||||
"workspaces": [
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/editor-core",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"description": "Core Editor that powers Plane",
|
||||
"private": true,
|
||||
"main": "./dist/index.mjs",
|
||||
|
@ -97,8 +97,8 @@ export const insertTableCommand = (editor: Editor, range?: Range) => {
|
||||
}
|
||||
}
|
||||
}
|
||||
if (range) editor.chain().focus().deleteRange(range).insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run();
|
||||
else editor.chain().focus().insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run();
|
||||
if (range) editor.chain().focus().deleteRange(range).insertTable({ rows: 3, cols: 3 }).run();
|
||||
else editor.chain().focus().insertTable({ rows: 3, cols: 3 }).run();
|
||||
};
|
||||
|
||||
export const unsetLinkEditor = (editor: Editor) => {
|
||||
|
@ -170,68 +170,6 @@ ul[data-type="taskList"] li[data-checked="true"] > div > p {
|
||||
}
|
||||
}
|
||||
|
||||
#editor-container {
|
||||
table {
|
||||
border-collapse: collapse;
|
||||
table-layout: fixed;
|
||||
margin: 0.5em 0 0.5em 0;
|
||||
|
||||
border: 1px solid rgb(var(--color-border-200));
|
||||
width: 100%;
|
||||
|
||||
td,
|
||||
th {
|
||||
min-width: 1em;
|
||||
border: 1px solid rgb(var(--color-border-200));
|
||||
padding: 10px 15px;
|
||||
vertical-align: top;
|
||||
box-sizing: border-box;
|
||||
position: relative;
|
||||
transition: background-color 0.3s ease;
|
||||
|
||||
> * {
|
||||
margin-bottom: 0;
|
||||
}
|
||||
}
|
||||
|
||||
th {
|
||||
font-weight: bold;
|
||||
text-align: left;
|
||||
background-color: rgb(var(--color-primary-100));
|
||||
}
|
||||
|
||||
td:hover {
|
||||
background-color: rgba(var(--color-primary-300), 0.1);
|
||||
}
|
||||
|
||||
.selectedCell:after {
|
||||
z-index: 2;
|
||||
position: absolute;
|
||||
content: "";
|
||||
left: 0;
|
||||
right: 0;
|
||||
top: 0;
|
||||
bottom: 0;
|
||||
background-color: rgba(var(--color-primary-300), 0.1);
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.column-resize-handle {
|
||||
position: absolute;
|
||||
right: -2px;
|
||||
top: 0;
|
||||
bottom: -2px;
|
||||
width: 2px;
|
||||
background-color: rgb(var(--color-primary-400));
|
||||
pointer-events: none;
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
.tableWrapper {
|
||||
overflow-x: auto;
|
||||
}
|
||||
|
||||
.resize-cursor {
|
||||
cursor: ew-resize;
|
||||
cursor: col-resize;
|
||||
|
@ -9,15 +9,15 @@
|
||||
border-collapse: collapse;
|
||||
table-layout: fixed;
|
||||
margin: 0;
|
||||
margin-bottom: 3rem;
|
||||
border: 1px solid rgba(var(--color-border-200));
|
||||
margin-bottom: 1rem;
|
||||
border: 2px solid rgba(var(--color-border-300));
|
||||
width: 100%;
|
||||
}
|
||||
|
||||
.tableWrapper table td,
|
||||
.tableWrapper table th {
|
||||
min-width: 1em;
|
||||
border: 1px solid rgba(var(--color-border-200));
|
||||
border: 1px solid rgba(var(--color-border-300));
|
||||
padding: 10px 15px;
|
||||
vertical-align: top;
|
||||
box-sizing: border-box;
|
||||
@ -43,7 +43,8 @@
|
||||
.tableWrapper table th {
|
||||
font-weight: bold;
|
||||
text-align: left;
|
||||
background-color: rgba(var(--color-primary-100));
|
||||
background-color: #d9e4ff;
|
||||
color: #171717;
|
||||
}
|
||||
|
||||
.tableWrapper table th * {
|
||||
@ -62,6 +63,35 @@
|
||||
pointer-events: none;
|
||||
}
|
||||
|
||||
.colorPicker {
|
||||
display: grid;
|
||||
padding: 8px 8px;
|
||||
grid-template-columns: repeat(6, 1fr);
|
||||
gap: 5px;
|
||||
}
|
||||
|
||||
.colorPickerLabel {
|
||||
font-size: 0.85rem;
|
||||
color: #6b7280;
|
||||
padding: 8px 8px;
|
||||
padding-bottom: 0px;
|
||||
}
|
||||
|
||||
.colorPickerItem {
|
||||
margin: 2px 0px;
|
||||
width: 24px;
|
||||
height: 24px;
|
||||
border-radius: 4px;
|
||||
border: none;
|
||||
cursor: pointer;
|
||||
}
|
||||
|
||||
.divider {
|
||||
background-color: #e5e7eb;
|
||||
height: 1px;
|
||||
margin: 3px 0;
|
||||
}
|
||||
|
||||
.tableWrapper table .column-resize-handle {
|
||||
position: absolute;
|
||||
right: -2px;
|
||||
@ -112,7 +142,7 @@
|
||||
}
|
||||
|
||||
.tableWrapper .tableControls .rowsControlDiv {
|
||||
background-color: rgba(var(--color-primary-100));
|
||||
background-color: #d9e4ff;
|
||||
border: 1px solid rgba(var(--color-border-200));
|
||||
border-radius: 2px;
|
||||
background-size: 1.25rem;
|
||||
@ -127,7 +157,7 @@
|
||||
}
|
||||
|
||||
.tableWrapper .tableControls .columnsControlDiv {
|
||||
background-color: rgba(var(--color-primary-100));
|
||||
background-color: #d9e4ff;
|
||||
border: 1px solid rgba(var(--color-border-200));
|
||||
border-radius: 2px;
|
||||
background-size: 1.25rem;
|
||||
@ -144,10 +174,12 @@
|
||||
.tableWrapper .tableControls .tableColorPickerToolbox {
|
||||
border: 1px solid rgba(var(--color-border-300));
|
||||
background-color: rgba(var(--color-background-100));
|
||||
border-radius: 5px;
|
||||
box-shadow: 0px 2px 4px rgba(0, 0, 0, 0.1);
|
||||
padding: 0.25rem;
|
||||
display: flex;
|
||||
flex-direction: column;
|
||||
width: 200px;
|
||||
width: max-content;
|
||||
gap: 0.25rem;
|
||||
}
|
||||
|
||||
@ -158,7 +190,7 @@
|
||||
align-items: center;
|
||||
gap: 0.5rem;
|
||||
border: none;
|
||||
padding: 0.1rem;
|
||||
padding: 0.3rem 0.5rem 0.1rem 0.1rem;
|
||||
border-radius: 4px;
|
||||
cursor: pointer;
|
||||
transition: all 0.2s;
|
||||
@ -173,9 +205,7 @@
|
||||
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .iconContainer,
|
||||
.tableWrapper .tableControls .tableToolbox .toolboxItem .colorContainer,
|
||||
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .colorContainer {
|
||||
border: 1px solid rgba(var(--color-border-300));
|
||||
border-radius: 3px;
|
||||
padding: 4px;
|
||||
padding: 4px 0px;
|
||||
display: flex;
|
||||
align-items: center;
|
||||
justify-content: center;
|
||||
@ -187,8 +217,8 @@
|
||||
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .iconContainer svg,
|
||||
.tableWrapper .tableControls .tableToolbox .toolboxItem .colorContainer svg,
|
||||
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .colorContainer svg {
|
||||
width: 2rem;
|
||||
height: 2rem;
|
||||
width: 1rem;
|
||||
height: 1rem;
|
||||
}
|
||||
|
||||
.tableToolbox {
|
||||
|
@ -25,7 +25,8 @@ import { DeleteImage } from "src/types/delete-image";
|
||||
import { IMentionHighlight, IMentionSuggestion } from "src/types/mention-suggestion";
|
||||
import { RestoreImage } from "src/types/restore-image";
|
||||
import { CustomLinkExtension } from "src/ui/extensions/custom-link";
|
||||
import { CustomCodeInlineExtension } from "./code-inline";
|
||||
import { CustomCodeInlineExtension } from "src/ui/extensions/code-inline";
|
||||
import { CustomTypographyExtension } from "src/ui/extensions/typography";
|
||||
|
||||
export const CoreEditorExtensions = (
|
||||
mentionConfig: {
|
||||
@ -79,6 +80,7 @@ export const CoreEditorExtensions = (
|
||||
"text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer",
|
||||
},
|
||||
}),
|
||||
CustomTypographyExtension,
|
||||
ImageExtension(deleteFile, restoreFile, cancelUploadImage).configure({
|
||||
HTMLAttributes: {
|
||||
class: "rounded-lg border border-custom-border-300",
|
||||
|
@ -13,7 +13,7 @@ export const TableCell = Node.create<TableCellOptions>({
|
||||
};
|
||||
},
|
||||
|
||||
content: "paragraph+",
|
||||
content: "block+",
|
||||
|
||||
addAttributes() {
|
||||
return {
|
||||
@ -33,7 +33,10 @@ export const TableCell = Node.create<TableCellOptions>({
|
||||
},
|
||||
},
|
||||
background: {
|
||||
default: "none",
|
||||
default: null,
|
||||
},
|
||||
textColor: {
|
||||
default: null,
|
||||
},
|
||||
};
|
||||
},
|
||||
@ -50,7 +53,7 @@ export const TableCell = Node.create<TableCellOptions>({
|
||||
return [
|
||||
"td",
|
||||
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, {
|
||||
style: `background-color: ${node.attrs.background}`,
|
||||
style: `background-color: ${node.attrs.background}; color: ${node.attrs.textColor}`,
|
||||
}),
|
||||
0,
|
||||
];
|
||||
|
@ -33,7 +33,7 @@ export const TableHeader = Node.create<TableHeaderOptions>({
|
||||
},
|
||||
},
|
||||
background: {
|
||||
default: "rgb(var(--color-primary-100))",
|
||||
default: "none",
|
||||
},
|
||||
};
|
||||
},
|
||||
|
@ -13,6 +13,17 @@ export const TableRow = Node.create<TableRowOptions>({
|
||||
};
|
||||
},
|
||||
|
||||
addAttributes() {
|
||||
return {
|
||||
background: {
|
||||
default: null,
|
||||
},
|
||||
textColor: {
|
||||
default: null,
|
||||
},
|
||||
};
|
||||
},
|
||||
|
||||
content: "(tableCell | tableHeader)*",
|
||||
|
||||
tableRole: "row",
|
||||
@ -22,6 +33,12 @@ export const TableRow = Node.create<TableRowOptions>({
|
||||
},
|
||||
|
||||
renderHTML({ HTMLAttributes }) {
|
||||
return ["tr", mergeAttributes(this.options.HTMLAttributes, HTMLAttributes), 0];
|
||||
const style = HTMLAttributes.background
|
||||
? `background-color: ${HTMLAttributes.background}; color: ${HTMLAttributes.textColor}`
|
||||
: "";
|
||||
|
||||
const attributes = mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, { style });
|
||||
|
||||
return ["tr", attributes, 0];
|
||||
},
|
||||
});
|
||||
|
@ -1,7 +1,7 @@
|
||||
export const icons = {
|
||||
colorPicker: `<svg xmlns="http://www.w3.org/2000/svg" length="24" viewBox="0 0 24 24" style="transform: ;msFilter:;"><path fill="rgb(var(--color-text-300))" d="M20 14c-.092.064-2 2.083-2 3.5 0 1.494.949 2.448 2 2.5.906.044 2-.891 2-2.5 0-1.5-1.908-3.436-2-3.5zM9.586 20c.378.378.88.586 1.414.586s1.036-.208 1.414-.586l7-7-.707-.707L11 4.586 8.707 2.293 7.293 3.707 9.586 6 4 11.586c-.378.378-.586.88-.586 1.414s.208 1.036.586 1.414L9.586 20zM11 7.414 16.586 13H5.414L11 7.414z"></path></svg>`,
|
||||
deleteColumn: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" length="24"><path fill="#e53e3e" d="M0 0H24V24H0z"/><path d="M12 3c.552 0 1 .448 1 1v8c.835-.628 1.874-1 3-1 2.761 0 5 2.239 5 5s-2.239 5-5 5c-1.032 0-1.99-.313-2.787-.848L13 20c0 .552-.448 1-1 1H6c-.552 0-1-.448-1-1V4c0-.552.448-1 1-1h6zm-1 2H7v14h4V5zm8 10h-6v2h6v-2z"/></svg>`,
|
||||
deleteRow: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" length="24"><path fill="#e53e3e" d="M0 0H24V24H0z"/><path d="M20 5c.552 0 1 .448 1 1v6c0 .552-.448 1-1 1 .628.835 1 1.874 1 3 0 2.761-2.239 5-5 5s-5-2.239-5-5c0-1.126.372-2.165 1-3H4c-.552 0-1-.448-1-1V6c0-.552.448-1 1-1h16zm-7 10v2h6v-2h-6zm6-8H5v4h14V7z"/></svg>`,
|
||||
deleteColumn: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-trash-2"><path d="M3 6h18"/><path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6"/><path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2"/><line x1="10" x2="10" y1="11" y2="17"/><line x1="14" x2="14" y1="11" y2="17"/></svg>`,
|
||||
deleteRow: `<svg xmlns="http://www.w3.org/2000/svg" width="16" height="16" viewBox="0 0 24 24" fill="none" stroke="currentColor" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-trash-2"><path d="M3 6h18"/><path d="M19 6v14c0 1-1 2-2 2H7c-1 0-2-1-2-2V6"/><path d="M8 6V4c0-1 1-2 2-2h4c1 0 2 1 2 2v2"/><line x1="10" x2="10" y1="11" y2="17"/><line x1="14" x2="14" y1="11" y2="17"/></svg>`,
|
||||
insertLeftTableIcon: `<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
length={24}
|
||||
@ -35,6 +35,8 @@ export const icons = {
|
||||
/>
|
||||
</svg>
|
||||
`,
|
||||
toggleColumnHeader: `<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="rgb(var(--color-text-300))" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-toggle-right"><rect width="20" height="12" x="2" y="6" rx="6" ry="6"/><circle cx="16" cy="12" r="2"/></svg>`,
|
||||
toggleRowHeader: `<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="rgb(var(--color-text-300))" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-toggle-right"><rect width="20" height="12" x="2" y="6" rx="6" ry="6"/><circle cx="16" cy="12" r="2"/></svg>`,
|
||||
insertBottomTableIcon: `<svg
|
||||
xmlns="http://www.w3.org/2000/svg"
|
||||
length={24}
|
||||
|
@ -81,53 +81,75 @@ const defaultTippyOptions: Partial<Props> = {
|
||||
placement: "right",
|
||||
};
|
||||
|
||||
function setCellsBackgroundColor(editor: Editor, backgroundColor: string) {
|
||||
function setCellsBackgroundColor(editor: Editor, color: { backgroundColor: string; textColor: string }) {
|
||||
return editor
|
||||
.chain()
|
||||
.focus()
|
||||
.updateAttributes("tableCell", {
|
||||
background: backgroundColor,
|
||||
})
|
||||
.updateAttributes("tableHeader", {
|
||||
background: backgroundColor,
|
||||
background: color.backgroundColor,
|
||||
textColor: color.textColor,
|
||||
})
|
||||
.run();
|
||||
}
|
||||
|
||||
function setTableRowBackgroundColor(editor: Editor, color: { backgroundColor: string; textColor: string }) {
|
||||
const { state, dispatch } = editor.view;
|
||||
const { selection } = state;
|
||||
if (!(selection instanceof CellSelection)) {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get the position of the hovered cell in the selection to determine the row.
|
||||
const hoveredCell = selection.$headCell || selection.$anchorCell;
|
||||
|
||||
// Find the depth of the table row node
|
||||
let rowDepth = hoveredCell.depth;
|
||||
while (rowDepth > 0 && hoveredCell.node(rowDepth).type.name !== "tableRow") {
|
||||
rowDepth--;
|
||||
}
|
||||
|
||||
// If we couldn't find a tableRow node, we can't set the background color
|
||||
if (hoveredCell.node(rowDepth).type.name !== "tableRow") {
|
||||
return false;
|
||||
}
|
||||
|
||||
// Get the position where the table row starts
|
||||
const rowStartPos = hoveredCell.start(rowDepth);
|
||||
|
||||
// Create a transaction that sets the background color on the tableRow node.
|
||||
const tr = state.tr.setNodeMarkup(rowStartPos - 1, null, {
|
||||
...hoveredCell.node(rowDepth).attrs,
|
||||
background: color.backgroundColor,
|
||||
textColor: color.textColor,
|
||||
});
|
||||
|
||||
dispatch(tr);
|
||||
return true;
|
||||
}
|
||||
|
||||
const columnsToolboxItems: ToolboxItem[] = [
|
||||
{
|
||||
label: "Add Column Before",
|
||||
label: "Toggle column header",
|
||||
icon: icons.toggleColumnHeader,
|
||||
action: ({ editor }: { editor: Editor }) => editor.chain().focus().toggleHeaderColumn().run(),
|
||||
},
|
||||
{
|
||||
label: "Add column before",
|
||||
icon: icons.insertLeftTableIcon,
|
||||
action: ({ editor }: { editor: Editor }) => editor.chain().focus().addColumnBefore().run(),
|
||||
},
|
||||
{
|
||||
label: "Add Column After",
|
||||
label: "Add column after",
|
||||
icon: icons.insertRightTableIcon,
|
||||
action: ({ editor }: { editor: Editor }) => editor.chain().focus().addColumnAfter().run(),
|
||||
},
|
||||
{
|
||||
label: "Pick Column Color",
|
||||
icon: icons.colorPicker,
|
||||
action: ({
|
||||
editor,
|
||||
triggerButton,
|
||||
controlsContainer,
|
||||
}: {
|
||||
editor: Editor;
|
||||
triggerButton: HTMLElement;
|
||||
controlsContainer: Element;
|
||||
}) => {
|
||||
createColorPickerToolbox({
|
||||
triggerButton,
|
||||
tippyOptions: {
|
||||
appendTo: controlsContainer,
|
||||
},
|
||||
onSelectColor: (color) => setCellsBackgroundColor(editor, color),
|
||||
});
|
||||
},
|
||||
label: "Pick color",
|
||||
icon: "", // No icon needed for color picker
|
||||
action: (args: any) => {}, // Placeholder action; actual color picking is handled in `createToolbox`
|
||||
},
|
||||
{
|
||||
label: "Delete Column",
|
||||
label: "Delete column",
|
||||
icon: icons.deleteColumn,
|
||||
action: ({ editor }: { editor: Editor }) => editor.chain().focus().deleteColumn().run(),
|
||||
},
|
||||
@ -135,35 +157,24 @@ const columnsToolboxItems: ToolboxItem[] = [
|
||||
|
||||
const rowsToolboxItems: ToolboxItem[] = [
|
||||
{
|
||||
label: "Add Row Above",
|
||||
label: "Toggle row header",
|
||||
icon: icons.toggleRowHeader,
|
||||
action: ({ editor }: { editor: Editor }) => editor.chain().focus().toggleHeaderRow().run(),
|
||||
},
|
||||
{
|
||||
label: "Add row above",
|
||||
icon: icons.insertTopTableIcon,
|
||||
action: ({ editor }: { editor: Editor }) => editor.chain().focus().addRowBefore().run(),
|
||||
},
|
||||
{
|
||||
label: "Add Row Below",
|
||||
label: "Add row below",
|
||||
icon: icons.insertBottomTableIcon,
|
||||
action: ({ editor }: { editor: Editor }) => editor.chain().focus().addRowAfter().run(),
|
||||
},
|
||||
{
|
||||
label: "Pick Row Color",
|
||||
icon: icons.colorPicker,
|
||||
action: ({
|
||||
editor,
|
||||
triggerButton,
|
||||
controlsContainer,
|
||||
}: {
|
||||
editor: Editor;
|
||||
triggerButton: HTMLButtonElement;
|
||||
controlsContainer: Element | "parent" | ((ref: Element) => Element) | undefined;
|
||||
}) => {
|
||||
createColorPickerToolbox({
|
||||
triggerButton,
|
||||
tippyOptions: {
|
||||
appendTo: controlsContainer,
|
||||
},
|
||||
onSelectColor: (color) => setCellsBackgroundColor(editor, color),
|
||||
});
|
||||
},
|
||||
label: "Pick color",
|
||||
icon: "",
|
||||
action: (args: any) => {}, // Placeholder action; actual color picking is handled in `createToolbox`
|
||||
},
|
||||
{
|
||||
label: "Delete Row",
|
||||
@ -176,37 +187,57 @@ function createToolbox({
|
||||
triggerButton,
|
||||
items,
|
||||
tippyOptions,
|
||||
onSelectColor,
|
||||
onClickItem,
|
||||
colors,
|
||||
}: {
|
||||
triggerButton: Element | null;
|
||||
items: ToolboxItem[];
|
||||
tippyOptions: any;
|
||||
onClickItem: (item: ToolboxItem) => void;
|
||||
onSelectColor: (color: { backgroundColor: string; textColor: string }) => void;
|
||||
colors: { [key: string]: { backgroundColor: string; textColor: string; icon?: string } };
|
||||
}): Instance<Props> {
|
||||
// @ts-expect-error
|
||||
const toolbox = tippy(triggerButton, {
|
||||
content: h(
|
||||
"div",
|
||||
{ className: "tableToolbox" },
|
||||
items.map((item) =>
|
||||
h(
|
||||
"div",
|
||||
{
|
||||
className: "toolboxItem",
|
||||
itemType: "button",
|
||||
onClick() {
|
||||
onClickItem(item);
|
||||
items.map((item, index) => {
|
||||
if (item.label === "Pick color") {
|
||||
return h("div", { className: "flex flex-col" }, [
|
||||
h("div", { className: "divider" }),
|
||||
h("div", { className: "colorPickerLabel" }, item.label),
|
||||
h(
|
||||
"div",
|
||||
{ className: "colorPicker grid" },
|
||||
Object.entries(colors).map(([colorName, colorValue]) =>
|
||||
h("div", {
|
||||
className: "colorPickerItem",
|
||||
style: `background-color: ${colorValue.backgroundColor};
|
||||
color: ${colorValue.textColor || "inherit"};`,
|
||||
innerHTML: colorValue?.icon || "",
|
||||
onClick: () => onSelectColor(colorValue),
|
||||
})
|
||||
)
|
||||
),
|
||||
h("div", { className: "divider" }),
|
||||
]);
|
||||
} else {
|
||||
return h(
|
||||
"div",
|
||||
{
|
||||
className: "toolboxItem",
|
||||
itemType: "div",
|
||||
onClick: () => onClickItem(item),
|
||||
},
|
||||
},
|
||||
[
|
||||
h("div", {
|
||||
className: "iconContainer",
|
||||
innerHTML: item.icon,
|
||||
}),
|
||||
h("div", { className: "label" }, item.label),
|
||||
]
|
||||
)
|
||||
)
|
||||
[
|
||||
h("div", { className: "iconContainer", innerHTML: item.icon }),
|
||||
h("div", { className: "label" }, item.label),
|
||||
]
|
||||
);
|
||||
}
|
||||
})
|
||||
),
|
||||
...tippyOptions,
|
||||
});
|
||||
@ -214,71 +245,6 @@ function createToolbox({
|
||||
return Array.isArray(toolbox) ? toolbox[0] : toolbox;
|
||||
}
|
||||
|
||||
function createColorPickerToolbox({
|
||||
triggerButton,
|
||||
tippyOptions,
|
||||
onSelectColor = () => {},
|
||||
}: {
|
||||
triggerButton: HTMLElement;
|
||||
tippyOptions: Partial<Props>;
|
||||
onSelectColor?: (color: string) => void;
|
||||
}) {
|
||||
const items = {
|
||||
Default: "rgb(var(--color-primary-100))",
|
||||
Orange: "#FFE5D1",
|
||||
Grey: "#F1F1F1",
|
||||
Yellow: "#FEF3C7",
|
||||
Green: "#DCFCE7",
|
||||
Red: "#FFDDDD",
|
||||
Blue: "#D9E4FF",
|
||||
Pink: "#FFE8FA",
|
||||
Purple: "#E8DAFB",
|
||||
};
|
||||
|
||||
const colorPicker = tippy(triggerButton, {
|
||||
...defaultTippyOptions,
|
||||
content: h(
|
||||
"div",
|
||||
{ className: "tableColorPickerToolbox" },
|
||||
Object.entries(items).map(([key, value]) =>
|
||||
h(
|
||||
"div",
|
||||
{
|
||||
className: "toolboxItem",
|
||||
itemType: "button",
|
||||
onClick: () => {
|
||||
onSelectColor(value);
|
||||
colorPicker.hide();
|
||||
},
|
||||
},
|
||||
[
|
||||
h("div", {
|
||||
className: "colorContainer",
|
||||
style: {
|
||||
backgroundColor: value,
|
||||
},
|
||||
}),
|
||||
h(
|
||||
"div",
|
||||
{
|
||||
className: "label",
|
||||
},
|
||||
key
|
||||
),
|
||||
]
|
||||
)
|
||||
)
|
||||
),
|
||||
onHidden: (instance) => {
|
||||
instance.destroy();
|
||||
},
|
||||
showOnCreate: true,
|
||||
...tippyOptions,
|
||||
});
|
||||
|
||||
return colorPicker;
|
||||
}
|
||||
|
||||
export class TableView implements NodeView {
|
||||
node: ProseMirrorNode;
|
||||
cellMinWidth: number;
|
||||
@ -347,10 +313,27 @@ export class TableView implements NodeView {
|
||||
this.rowsControl,
|
||||
this.columnsControl
|
||||
);
|
||||
const columnColors = {
|
||||
Blue: { backgroundColor: "#D9E4FF", textColor: "#171717" },
|
||||
Orange: { backgroundColor: "#FFEDD5", textColor: "#171717" },
|
||||
Grey: { backgroundColor: "#F1F1F1", textColor: "#171717" },
|
||||
Yellow: { backgroundColor: "#FEF3C7", textColor: "#171717" },
|
||||
Green: { backgroundColor: "#DCFCE7", textColor: "#171717" },
|
||||
Red: { backgroundColor: "#FFDDDD", textColor: "#171717" },
|
||||
Pink: { backgroundColor: "#FFE8FA", textColor: "#171717" },
|
||||
Purple: { backgroundColor: "#E8DAFB", textColor: "#171717" },
|
||||
None: {
|
||||
backgroundColor: "none",
|
||||
textColor: "none",
|
||||
icon: `<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" fill="none" stroke="gray" stroke-width="2" stroke-linecap="round" stroke-linejoin="round" class="lucide lucide-ban"><circle cx="12" cy="12" r="10"/><path d="m4.9 4.9 14.2 14.2"/></svg>`,
|
||||
},
|
||||
};
|
||||
|
||||
this.columnsToolbox = createToolbox({
|
||||
triggerButton: this.columnsControl.querySelector(".columnsControlDiv"),
|
||||
items: columnsToolboxItems,
|
||||
colors: columnColors,
|
||||
onSelectColor: (color) => setCellsBackgroundColor(this.editor, color),
|
||||
tippyOptions: {
|
||||
...defaultTippyOptions,
|
||||
appendTo: this.controls,
|
||||
@ -368,10 +351,12 @@ export class TableView implements NodeView {
|
||||
this.rowsToolbox = createToolbox({
|
||||
triggerButton: this.rowsControl.firstElementChild,
|
||||
items: rowsToolboxItems,
|
||||
colors: columnColors,
|
||||
tippyOptions: {
|
||||
...defaultTippyOptions,
|
||||
appendTo: this.controls,
|
||||
},
|
||||
onSelectColor: (color) => setTableRowBackgroundColor(editor, color),
|
||||
onClickItem: (item) => {
|
||||
item.action({
|
||||
editor: this.editor,
|
||||
@ -383,8 +368,6 @@ export class TableView implements NodeView {
|
||||
});
|
||||
}
|
||||
|
||||
// Table
|
||||
|
||||
this.colgroup = h(
|
||||
"colgroup",
|
||||
null,
|
||||
@ -437,16 +420,19 @@ export class TableView implements NodeView {
|
||||
}
|
||||
|
||||
updateControls() {
|
||||
const { hoveredTable: table, hoveredCell: cell } = Object.values(this.decorations).reduce((acc, curr) => {
|
||||
if (curr.spec.hoveredCell !== undefined) {
|
||||
acc["hoveredCell"] = curr.spec.hoveredCell;
|
||||
}
|
||||
const { hoveredTable: table, hoveredCell: cell } = Object.values(this.decorations).reduce(
|
||||
(acc, curr) => {
|
||||
if (curr.spec.hoveredCell !== undefined) {
|
||||
acc["hoveredCell"] = curr.spec.hoveredCell;
|
||||
}
|
||||
|
||||
if (curr.spec.hoveredTable !== undefined) {
|
||||
acc["hoveredTable"] = curr.spec.hoveredTable;
|
||||
}
|
||||
return acc;
|
||||
}, {} as Record<string, HTMLElement>) as any;
|
||||
if (curr.spec.hoveredTable !== undefined) {
|
||||
acc["hoveredTable"] = curr.spec.hoveredTable;
|
||||
}
|
||||
return acc;
|
||||
},
|
||||
{} as Record<string, HTMLElement>
|
||||
) as any;
|
||||
|
||||
if (table === undefined || cell === undefined) {
|
||||
return this.root.classList.add("controls--disabled");
|
||||
@ -457,12 +443,12 @@ export class TableView implements NodeView {
|
||||
|
||||
const cellDom = this.editor.view.nodeDOM(cell.pos) as HTMLElement;
|
||||
|
||||
if (!this.table) {
|
||||
if (!this.table || !cellDom) {
|
||||
return;
|
||||
}
|
||||
|
||||
const tableRect = this.table.getBoundingClientRect();
|
||||
const cellRect = cellDom.getBoundingClientRect();
|
||||
const tableRect = this.table?.getBoundingClientRect();
|
||||
const cellRect = cellDom?.getBoundingClientRect();
|
||||
|
||||
if (this.columnsControl) {
|
||||
this.columnsControl.style.left = `${cellRect.left - tableRect.left - this.table.parentElement!.scrollLeft}px`;
|
||||
|
@ -107,10 +107,9 @@ export const Table = Node.create({
|
||||
addCommands() {
|
||||
return {
|
||||
insertTable:
|
||||
({ rows = 3, cols = 3, withHeaderRow = true } = {}) =>
|
||||
({ rows = 3, cols = 3, withHeaderRow = false } = {}) =>
|
||||
({ tr, dispatch, editor }) => {
|
||||
const node = createTable(editor.schema, rows, cols, withHeaderRow);
|
||||
|
||||
if (dispatch) {
|
||||
const offset = tr.selection.anchor + 1;
|
||||
|
||||
|
109
packages/editor/core/src/ui/extensions/typography/index.ts
Normal file
109
packages/editor/core/src/ui/extensions/typography/index.ts
Normal file
@ -0,0 +1,109 @@
|
||||
import { Extension } from "@tiptap/core";
|
||||
import {
|
||||
TypographyOptions,
|
||||
emDash,
|
||||
ellipsis,
|
||||
leftArrow,
|
||||
rightArrow,
|
||||
copyright,
|
||||
trademark,
|
||||
servicemark,
|
||||
registeredTrademark,
|
||||
oneHalf,
|
||||
plusMinus,
|
||||
notEqual,
|
||||
laquo,
|
||||
raquo,
|
||||
multiplication,
|
||||
superscriptTwo,
|
||||
superscriptThree,
|
||||
oneQuarter,
|
||||
threeQuarters,
|
||||
impliesArrowRight,
|
||||
} from "src/ui/extensions/typography/inputRules";
|
||||
|
||||
export const CustomTypographyExtension = Extension.create<TypographyOptions>({
|
||||
name: "typography",
|
||||
|
||||
addInputRules() {
|
||||
const rules = [];
|
||||
|
||||
if (this.options.emDash !== false) {
|
||||
rules.push(emDash(this.options.emDash));
|
||||
}
|
||||
|
||||
if (this.options.impliesArrowRight !== false) {
|
||||
rules.push(impliesArrowRight(this.options.impliesArrowRight));
|
||||
}
|
||||
|
||||
if (this.options.ellipsis !== false) {
|
||||
rules.push(ellipsis(this.options.ellipsis));
|
||||
}
|
||||
|
||||
if (this.options.leftArrow !== false) {
|
||||
rules.push(leftArrow(this.options.leftArrow));
|
||||
}
|
||||
|
||||
if (this.options.rightArrow !== false) {
|
||||
rules.push(rightArrow(this.options.rightArrow));
|
||||
}
|
||||
|
||||
if (this.options.copyright !== false) {
|
||||
rules.push(copyright(this.options.copyright));
|
||||
}
|
||||
|
||||
if (this.options.trademark !== false) {
|
||||
rules.push(trademark(this.options.trademark));
|
||||
}
|
||||
|
||||
if (this.options.servicemark !== false) {
|
||||
rules.push(servicemark(this.options.servicemark));
|
||||
}
|
||||
|
||||
if (this.options.registeredTrademark !== false) {
|
||||
rules.push(registeredTrademark(this.options.registeredTrademark));
|
||||
}
|
||||
|
||||
if (this.options.oneHalf !== false) {
|
||||
rules.push(oneHalf(this.options.oneHalf));
|
||||
}
|
||||
|
||||
if (this.options.plusMinus !== false) {
|
||||
rules.push(plusMinus(this.options.plusMinus));
|
||||
}
|
||||
|
||||
if (this.options.notEqual !== false) {
|
||||
rules.push(notEqual(this.options.notEqual));
|
||||
}
|
||||
|
||||
if (this.options.laquo !== false) {
|
||||
rules.push(laquo(this.options.laquo));
|
||||
}
|
||||
|
||||
if (this.options.raquo !== false) {
|
||||
rules.push(raquo(this.options.raquo));
|
||||
}
|
||||
|
||||
if (this.options.multiplication !== false) {
|
||||
rules.push(multiplication(this.options.multiplication));
|
||||
}
|
||||
|
||||
if (this.options.superscriptTwo !== false) {
|
||||
rules.push(superscriptTwo(this.options.superscriptTwo));
|
||||
}
|
||||
|
||||
if (this.options.superscriptThree !== false) {
|
||||
rules.push(superscriptThree(this.options.superscriptThree));
|
||||
}
|
||||
|
||||
if (this.options.oneQuarter !== false) {
|
||||
rules.push(oneQuarter(this.options.oneQuarter));
|
||||
}
|
||||
|
||||
if (this.options.threeQuarters !== false) {
|
||||
rules.push(threeQuarters(this.options.threeQuarters));
|
||||
}
|
||||
|
||||
return rules;
|
||||
},
|
||||
});
|
137
packages/editor/core/src/ui/extensions/typography/inputRules.ts
Normal file
137
packages/editor/core/src/ui/extensions/typography/inputRules.ts
Normal file
@ -0,0 +1,137 @@
|
||||
import { textInputRule } from "@tiptap/core";
|
||||
|
||||
export interface TypographyOptions {
|
||||
emDash: false | string;
|
||||
ellipsis: false | string;
|
||||
leftArrow: false | string;
|
||||
rightArrow: false | string;
|
||||
copyright: false | string;
|
||||
trademark: false | string;
|
||||
servicemark: false | string;
|
||||
registeredTrademark: false | string;
|
||||
oneHalf: false | string;
|
||||
plusMinus: false | string;
|
||||
notEqual: false | string;
|
||||
laquo: false | string;
|
||||
raquo: false | string;
|
||||
multiplication: false | string;
|
||||
superscriptTwo: false | string;
|
||||
superscriptThree: false | string;
|
||||
oneQuarter: false | string;
|
||||
threeQuarters: false | string;
|
||||
impliesArrowRight: false | string;
|
||||
}
|
||||
|
||||
export const emDash = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /--$/,
|
||||
replace: override ?? "—",
|
||||
});
|
||||
|
||||
export const impliesArrowRight = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /=>$/,
|
||||
replace: override ?? "⇒",
|
||||
});
|
||||
|
||||
export const leftArrow = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /<-$/,
|
||||
replace: override ?? "←",
|
||||
});
|
||||
|
||||
export const rightArrow = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /->$/,
|
||||
replace: override ?? "→",
|
||||
});
|
||||
|
||||
export const ellipsis = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\.\.\.$/,
|
||||
replace: override ?? "…",
|
||||
});
|
||||
|
||||
export const copyright = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\(c\)$/,
|
||||
replace: override ?? "©",
|
||||
});
|
||||
|
||||
export const trademark = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\(tm\)$/,
|
||||
replace: override ?? "™",
|
||||
});
|
||||
|
||||
export const servicemark = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\(sm\)$/,
|
||||
replace: override ?? "℠",
|
||||
});
|
||||
|
||||
export const registeredTrademark = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\(r\)$/,
|
||||
replace: override ?? "®",
|
||||
});
|
||||
|
||||
export const oneHalf = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /(?:^|\s)(1\/2)\s$/,
|
||||
replace: override ?? "½",
|
||||
});
|
||||
|
||||
export const plusMinus = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\+\/-$/,
|
||||
replace: override ?? "±",
|
||||
});
|
||||
|
||||
export const notEqual = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /!=$/,
|
||||
replace: override ?? "≠",
|
||||
});
|
||||
|
||||
export const laquo = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /<<$/,
|
||||
replace: override ?? "«",
|
||||
});
|
||||
|
||||
export const raquo = (override?: string) =>
|
||||
textInputRule({
|
||||
find: />>$/,
|
||||
replace: override ?? "»",
|
||||
});
|
||||
|
||||
export const multiplication = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\d+\s?([*x])\s?\d+$/,
|
||||
replace: override ?? "×",
|
||||
});
|
||||
|
||||
export const superscriptTwo = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\^2$/,
|
||||
replace: override ?? "²",
|
||||
});
|
||||
|
||||
export const superscriptThree = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /\^3$/,
|
||||
replace: override ?? "³",
|
||||
});
|
||||
|
||||
export const oneQuarter = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /(?:^|\s)(1\/4)\s$/,
|
||||
replace: override ?? "¼",
|
||||
});
|
||||
|
||||
export const threeQuarters = (override?: string) =>
|
||||
textInputRule({
|
||||
find: /(?:^|\s)(3\/4)\s$/,
|
||||
replace: override ?? "¾",
|
||||
});
|
@ -42,15 +42,6 @@ export function CoreEditorProps(
|
||||
return false;
|
||||
},
|
||||
handleDrop: (view, event, _slice, moved) => {
|
||||
if (typeof window !== "undefined") {
|
||||
const selection: any = window?.getSelection();
|
||||
if (selection.rangeCount !== 0) {
|
||||
const range = selection.getRangeAt(0);
|
||||
if (findTableAncestor(range.startContainer)) {
|
||||
return;
|
||||
}
|
||||
}
|
||||
}
|
||||
if (!moved && event.dataTransfer && event.dataTransfer.files && event.dataTransfer.files[0]) {
|
||||
event.preventDefault();
|
||||
const file = event.dataTransfer.files[0];
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/document-editor",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"description": "Package that powers Plane's Pages Editor",
|
||||
"main": "./dist/index.mjs",
|
||||
"module": "./dist/index.mjs",
|
||||
|
@ -145,7 +145,7 @@ const IssueSuggestionList = ({
|
||||
<div
|
||||
id="issue-list-container"
|
||||
ref={commandListContainer}
|
||||
className=" fixed z-[10] max-h-80 w-60 overflow-y-auto overflow-x-hidden rounded-md border border-custom-border-100 bg-custom-background-100 px-1 shadow-custom-shadow-xs transition-all"
|
||||
className=" fixed z-[10] max-h-80 w-96 overflow-y-auto overflow-x-hidden rounded-md border border-custom-border-100 bg-custom-background-100 px-1 shadow-custom-shadow-xs transition-all"
|
||||
>
|
||||
{sections.map((section) => {
|
||||
const sectionItems = displayedItems[section];
|
||||
@ -175,8 +175,8 @@ const IssueSuggestionList = ({
|
||||
>
|
||||
<h5 className="whitespace-nowrap text-xs text-custom-text-300">{item.identifier}</h5>
|
||||
<PriorityIcon priority={item.priority} />
|
||||
<div>
|
||||
<p className="flex-grow truncate text-xs">{item.title}</p>
|
||||
<div className="w-full truncate">
|
||||
<p className="flex-grow w-full truncate text-xs">{item.title}</p>
|
||||
</div>
|
||||
</button>
|
||||
))}
|
||||
|
@ -48,34 +48,12 @@ export const FixedMenu = (props: EditorBubbleMenuProps) => {
|
||||
function getComplexItems(): BubbleMenuItem[] {
|
||||
const items: BubbleMenuItem[] = [TableItem(editor)];
|
||||
|
||||
if (shouldShowImageItem()) {
|
||||
items.push(ImageItem(editor, uploadFile, setIsSubmitting));
|
||||
}
|
||||
|
||||
items.push(ImageItem(editor, uploadFile, setIsSubmitting));
|
||||
return items;
|
||||
}
|
||||
|
||||
const complexItems: BubbleMenuItem[] = getComplexItems();
|
||||
|
||||
function shouldShowImageItem(): boolean {
|
||||
if (typeof window !== "undefined") {
|
||||
const selectionRange: any = window?.getSelection();
|
||||
const { selection } = props.editor.state;
|
||||
|
||||
if (selectionRange.rangeCount !== 0) {
|
||||
const range = selectionRange.getRangeAt(0);
|
||||
if (findTableAncestor(range.startContainer)) {
|
||||
return false;
|
||||
}
|
||||
if (isCellSelection(selection)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
return (
|
||||
<div className="flex flex-wrap items-center divide-x divide-custom-border-200">
|
||||
<div className="flex items-center gap-0.5 pr-2">
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/editor-extensions",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"description": "Package that powers Plane's Editor with extensions",
|
||||
"private": true,
|
||||
"main": "./dist/index.mjs",
|
||||
|
@ -35,7 +35,7 @@ export interface DragHandleOptions {
|
||||
}
|
||||
|
||||
function absoluteRect(node: Element) {
|
||||
const data = node.getBoundingClientRect();
|
||||
const data = node?.getBoundingClientRect();
|
||||
|
||||
return {
|
||||
top: data.top,
|
||||
@ -65,7 +65,7 @@ function nodeDOMAtCoords(coords: { x: number; y: number }) {
|
||||
}
|
||||
|
||||
function nodePosAtDOM(node: Element, view: EditorView) {
|
||||
const boundingRect = node.getBoundingClientRect();
|
||||
const boundingRect = node?.getBoundingClientRect();
|
||||
|
||||
if (node.nodeName === "IMG") {
|
||||
return view.posAtCoords({
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/lite-text-editor",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"description": "Package that powers Plane's Comment Editor",
|
||||
"private": true,
|
||||
"main": "./dist/index.mjs",
|
||||
|
@ -60,34 +60,13 @@ export const FixedMenu = (props: EditorBubbleMenuProps) => {
|
||||
function getComplexItems(): BubbleMenuItem[] {
|
||||
const items: BubbleMenuItem[] = [TableItem(props.editor)];
|
||||
|
||||
if (shouldShowImageItem()) {
|
||||
items.push(ImageItem(props.editor, props.uploadFile, props.setIsSubmitting));
|
||||
}
|
||||
items.push(ImageItem(props.editor, props.uploadFile, props.setIsSubmitting));
|
||||
|
||||
return items;
|
||||
}
|
||||
|
||||
const complexItems: BubbleMenuItem[] = getComplexItems();
|
||||
|
||||
function shouldShowImageItem(): boolean {
|
||||
if (typeof window !== "undefined") {
|
||||
const selectionRange: any = window?.getSelection();
|
||||
const { selection } = props.editor.state;
|
||||
|
||||
if (selectionRange.rangeCount !== 0) {
|
||||
const range = selectionRange.getRangeAt(0);
|
||||
if (findTableAncestor(range.startContainer)) {
|
||||
return false;
|
||||
}
|
||||
if (isCellSelection(selection)) {
|
||||
return false;
|
||||
}
|
||||
}
|
||||
return true;
|
||||
}
|
||||
return false;
|
||||
}
|
||||
|
||||
const handleAccessChange = (accessKey: string) => {
|
||||
props.commentAccessSpecifier?.onAccessChange(accessKey);
|
||||
};
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/rich-text-editor",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"description": "Rich Text Editor that powers Plane",
|
||||
"private": true,
|
||||
"main": "./dist/index.mjs",
|
||||
|
@ -16,6 +16,7 @@ import { EditorBubbleMenu } from "src/ui/menus/bubble-menu";
|
||||
|
||||
export type IRichTextEditor = {
|
||||
value: string;
|
||||
initialValue?: string;
|
||||
dragDropEnabled?: boolean;
|
||||
uploadFile: UploadImage;
|
||||
restoreFile: RestoreImage;
|
||||
@ -55,6 +56,7 @@ const RichTextEditor = ({
|
||||
setShouldShowAlert,
|
||||
editorContentCustomClassNames,
|
||||
value,
|
||||
initialValue,
|
||||
uploadFile,
|
||||
deleteFile,
|
||||
noBorder,
|
||||
@ -98,6 +100,10 @@ const RichTextEditor = ({
|
||||
customClassName,
|
||||
});
|
||||
|
||||
React.useEffect(() => {
|
||||
if (editor && initialValue && editor.getHTML() != initialValue) editor.commands.setContent(initialValue);
|
||||
}, [editor, initialValue]);
|
||||
|
||||
if (!editor) return null;
|
||||
|
||||
return (
|
||||
|
@ -1,7 +1,7 @@
|
||||
{
|
||||
"name": "eslint-config-custom",
|
||||
"private": true,
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"main": "index.js",
|
||||
"license": "MIT",
|
||||
"dependencies": {
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "tailwind-config-custom",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"description": "common tailwind configuration across monorepo",
|
||||
"main": "index.js",
|
||||
"private": true,
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "tsconfig",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"private": true,
|
||||
"files": [
|
||||
"base.json",
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "@plane/types",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"private": true,
|
||||
"main": "./src/index.d.ts"
|
||||
}
|
||||
|
10
packages/types/src/cycles.d.ts
vendored
10
packages/types/src/cycles.d.ts
vendored
@ -30,10 +30,9 @@ export interface ICycle {
|
||||
is_favorite: boolean;
|
||||
issue: string;
|
||||
name: string;
|
||||
owned_by: string;
|
||||
owned_by_id: string;
|
||||
progress_snapshot: TProgressSnapshot;
|
||||
project: string;
|
||||
project_detail: IProjectLite;
|
||||
project_id: string;
|
||||
status: TCycleGroups;
|
||||
sort_order: number;
|
||||
start_date: string | null;
|
||||
@ -42,12 +41,11 @@ export interface ICycle {
|
||||
unstarted_issues: number;
|
||||
updated_at: Date;
|
||||
updated_by: string;
|
||||
assignees: IUserLite[];
|
||||
assignee_ids: string[];
|
||||
view_props: {
|
||||
filters: IIssueFilterOptions;
|
||||
};
|
||||
workspace: string;
|
||||
workspace_detail: IWorkspaceLite;
|
||||
workspace_id: string;
|
||||
}
|
||||
|
||||
export type TProgressSnapshot = {
|
||||
|
3
packages/types/src/issues.d.ts
vendored
3
packages/types/src/issues.d.ts
vendored
@ -58,7 +58,6 @@ export interface IIssueLink {
|
||||
export interface ILinkDetails {
|
||||
created_at: Date;
|
||||
created_by: string;
|
||||
created_by_detail: IUserLite;
|
||||
id: string;
|
||||
metadata: any;
|
||||
title: string;
|
||||
@ -204,6 +203,8 @@ export interface ViewFlags {
|
||||
|
||||
export type GroupByColumnTypes =
|
||||
| "project"
|
||||
| "cycle"
|
||||
| "module"
|
||||
| "state"
|
||||
| "state_detail.group"
|
||||
| "priority"
|
||||
|
10
packages/types/src/issues/issue.d.ts
vendored
10
packages/types/src/issues/issue.d.ts
vendored
@ -1,4 +1,7 @@
|
||||
import { TIssuePriorities } from "../issues";
|
||||
import { TIssueAttachment } from "./issue_attachment";
|
||||
import { TIssueLink } from "./issue_link";
|
||||
import { TIssueReaction } from "./issue_reaction";
|
||||
|
||||
// new issue structure types
|
||||
export type TIssue = {
|
||||
@ -34,7 +37,12 @@ export type TIssue = {
|
||||
updated_by: string;
|
||||
|
||||
is_draft: boolean;
|
||||
is_subscribed: boolean;
|
||||
is_subscribed?: boolean;
|
||||
|
||||
parent?: partial<TIssue>;
|
||||
issue_reactions?: TIssueReaction[];
|
||||
issue_attachment?: TIssueAttachment[];
|
||||
issue_link?: TIssueLink[];
|
||||
|
||||
// tempId is used for optimistic updates. It is not a part of the API response.
|
||||
tempId?: string;
|
||||
|
10
packages/types/src/issues/issue_attachment.d.ts
vendored
10
packages/types/src/issues/issue_attachment.d.ts
vendored
@ -1,17 +1,15 @@
|
||||
export type TIssueAttachment = {
|
||||
id: string;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
attributes: {
|
||||
name: string;
|
||||
size: number;
|
||||
};
|
||||
asset: string;
|
||||
created_by: string;
|
||||
issue_id: string;
|
||||
|
||||
//need
|
||||
updated_at: string;
|
||||
updated_by: string;
|
||||
project: string;
|
||||
workspace: string;
|
||||
issue: string;
|
||||
};
|
||||
|
||||
export type TIssueAttachmentMap = {
|
||||
|
8
packages/types/src/issues/issue_link.d.ts
vendored
8
packages/types/src/issues/issue_link.d.ts
vendored
@ -4,11 +4,13 @@ export type TIssueLinkEditableFields = {
|
||||
};
|
||||
|
||||
export type TIssueLink = TIssueLinkEditableFields & {
|
||||
created_at: Date;
|
||||
created_by: string;
|
||||
created_by_detail: IUserLite;
|
||||
created_by_id: string;
|
||||
id: string;
|
||||
metadata: any;
|
||||
issue_id: string;
|
||||
|
||||
//need
|
||||
created_at: Date;
|
||||
};
|
||||
|
||||
export type TIssueLinkMap = {
|
||||
|
11
packages/types/src/issues/issue_reaction.d.ts
vendored
11
packages/types/src/issues/issue_reaction.d.ts
vendored
@ -1,15 +1,8 @@
|
||||
export type TIssueReaction = {
|
||||
actor: string;
|
||||
actor_detail: IUserLite;
|
||||
created_at: Date;
|
||||
created_by: string;
|
||||
actor_id: string;
|
||||
id: string;
|
||||
issue: string;
|
||||
project: string;
|
||||
issue_id: string;
|
||||
reaction: string;
|
||||
updated_at: Date;
|
||||
updated_by: string;
|
||||
workspace: string;
|
||||
};
|
||||
|
||||
export type TIssueReactionMap = {
|
||||
|
13
packages/types/src/modules.d.ts
vendored
13
packages/types/src/modules.d.ts
vendored
@ -27,16 +27,12 @@ export interface IModule {
|
||||
labels: TLabelsDistribution[];
|
||||
};
|
||||
id: string;
|
||||
lead: string | null;
|
||||
lead_detail: IUserLite | null;
|
||||
lead_id: string | null;
|
||||
link_module: ILinkDetails[];
|
||||
links_list: ModuleLink[];
|
||||
members: string[];
|
||||
members_detail: IUserLite[];
|
||||
member_ids: string[];
|
||||
is_favorite: boolean;
|
||||
name: string;
|
||||
project: string;
|
||||
project_detail: IProjectLite;
|
||||
project_id: string;
|
||||
sort_order: number;
|
||||
start_date: string | null;
|
||||
started_issues: number;
|
||||
@ -49,8 +45,7 @@ export interface IModule {
|
||||
view_props: {
|
||||
filters: IIssueFilterOptions;
|
||||
};
|
||||
workspace: string;
|
||||
workspace_detail: IWorkspaceLite;
|
||||
workspace_id: string;
|
||||
}
|
||||
|
||||
export interface ModuleIssueResponse {
|
||||
|
24
packages/types/src/notifications.d.ts
vendored
24
packages/types/src/notifications.d.ts
vendored
@ -12,27 +12,27 @@ export interface PaginatedUserNotification {
|
||||
}
|
||||
|
||||
export interface IUserNotification {
|
||||
id: string;
|
||||
created_at: Date;
|
||||
updated_at: Date;
|
||||
archived_at: string | null;
|
||||
created_at: string;
|
||||
created_by: null;
|
||||
data: Data;
|
||||
entity_identifier: string;
|
||||
entity_name: string;
|
||||
title: string;
|
||||
id: string;
|
||||
message: null;
|
||||
message_html: string;
|
||||
message_stripped: null;
|
||||
sender: string;
|
||||
read_at: Date | null;
|
||||
archived_at: Date | null;
|
||||
snoozed_till: Date | null;
|
||||
created_by: null;
|
||||
updated_by: null;
|
||||
workspace: string;
|
||||
project: string;
|
||||
read_at: Date | null;
|
||||
receiver: string;
|
||||
sender: string;
|
||||
snoozed_till: Date | null;
|
||||
title: string;
|
||||
triggered_by: string;
|
||||
triggered_by_details: IUserLite;
|
||||
receiver: string;
|
||||
updated_at: Date;
|
||||
updated_by: null;
|
||||
workspace: string;
|
||||
}
|
||||
|
||||
export interface Data {
|
||||
|
10
packages/types/src/projects.d.ts
vendored
10
packages/types/src/projects.d.ts
vendored
@ -1,5 +1,11 @@
|
||||
import { EUserProjectRoles } from "constants/project";
|
||||
import type { IUser, IUserLite, IWorkspace, IWorkspaceLite, TStateGroups } from ".";
|
||||
import type {
|
||||
IUser,
|
||||
IUserLite,
|
||||
IWorkspace,
|
||||
IWorkspaceLite,
|
||||
TStateGroups,
|
||||
} from ".";
|
||||
|
||||
export interface IProject {
|
||||
archive_in: number;
|
||||
@ -117,7 +123,7 @@ export type TProjectIssuesSearchParams = {
|
||||
parent?: boolean;
|
||||
issue_relation?: boolean;
|
||||
cycle?: boolean;
|
||||
module?: string[];
|
||||
module?: string;
|
||||
sub_issue?: boolean;
|
||||
issue_id?: string;
|
||||
workspace_search: boolean;
|
||||
|
12
packages/types/src/view-props.d.ts
vendored
12
packages/types/src/view-props.d.ts
vendored
@ -14,6 +14,8 @@ export type TIssueGroupByOptions =
|
||||
| "project"
|
||||
| "assignees"
|
||||
| "mentions"
|
||||
| "cycle"
|
||||
| "module"
|
||||
| null;
|
||||
|
||||
export type TIssueOrderByOptions =
|
||||
@ -30,6 +32,10 @@ export type TIssueOrderByOptions =
|
||||
| "-assignees__first_name"
|
||||
| "labels__name"
|
||||
| "-labels__name"
|
||||
| "modules__name"
|
||||
| "-modules__name"
|
||||
| "cycle__name"
|
||||
| "-cycle__name"
|
||||
| "target_date"
|
||||
| "-target_date"
|
||||
| "estimate_point"
|
||||
@ -56,6 +62,8 @@ export type TIssueParams =
|
||||
| "created_by"
|
||||
| "subscriber"
|
||||
| "labels"
|
||||
| "cycle"
|
||||
| "module"
|
||||
| "start_date"
|
||||
| "target_date"
|
||||
| "project"
|
||||
@ -75,6 +83,8 @@ export interface IIssueFilterOptions {
|
||||
labels?: string[] | null;
|
||||
priority?: string[] | null;
|
||||
project?: string[] | null;
|
||||
cycle?: string[] | null;
|
||||
module?: string[] | null;
|
||||
start_date?: string[] | null;
|
||||
state?: string[] | null;
|
||||
state_group?: string[] | null;
|
||||
@ -109,6 +119,8 @@ export interface IIssueDisplayProperties {
|
||||
estimate?: boolean;
|
||||
created_on?: boolean;
|
||||
updated_on?: boolean;
|
||||
modules?: boolean;
|
||||
cycle?: boolean;
|
||||
}
|
||||
|
||||
export type TIssueKanbanFilters = {
|
||||
|
@ -2,7 +2,7 @@
|
||||
"name": "@plane/ui",
|
||||
"description": "UI components shared across multiple apps internally",
|
||||
"private": true,
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"main": "./dist/index.js",
|
||||
"module": "./dist/index.mjs",
|
||||
"types": "./dist/index.d.ts",
|
||||
|
@ -5,10 +5,11 @@ export type TControlLink = React.AnchorHTMLAttributes<HTMLAnchorElement> & {
|
||||
onClick: () => void;
|
||||
children: React.ReactNode;
|
||||
target?: string;
|
||||
disabled?: boolean;
|
||||
};
|
||||
|
||||
export const ControlLink: React.FC<TControlLink> = (props) => {
|
||||
const { href, onClick, children, target = "_self", ...rest } = props;
|
||||
const { href, onClick, children, target = "_self", disabled = false, ...rest } = props;
|
||||
const LEFT_CLICK_EVENT_CODE = 0;
|
||||
|
||||
const _onClick = (event: React.MouseEvent<HTMLAnchorElement, MouseEvent>) => {
|
||||
@ -19,6 +20,8 @@ export const ControlLink: React.FC<TControlLink> = (props) => {
|
||||
}
|
||||
};
|
||||
|
||||
if (disabled) return <>{children}</>;
|
||||
|
||||
return (
|
||||
<a href={href} target={target} onClick={_onClick} {...rest}>
|
||||
{children}
|
||||
|
@ -27,6 +27,7 @@ const CustomMenu = (props: ICustomMenuDropdownProps) => {
|
||||
noBorder = false,
|
||||
noChevron = false,
|
||||
optionsClassName = "",
|
||||
menuItemsClassName = "",
|
||||
verticalEllipsis = false,
|
||||
portalElement,
|
||||
menuButtonOnClick,
|
||||
@ -70,7 +71,7 @@ const CustomMenu = (props: ICustomMenuDropdownProps) => {
|
||||
useOutsideClickDetector(dropdownRef, closeDropdown);
|
||||
|
||||
let menuItems = (
|
||||
<Menu.Items className="fixed z-10" static>
|
||||
<Menu.Items className={cn("fixed z-10", menuItemsClassName)} static>
|
||||
<div
|
||||
className={cn(
|
||||
"my-1 overflow-y-scroll rounded-md border-[0.5px] border-custom-border-300 bg-custom-background-100 px-2 py-2.5 text-xs shadow-custom-shadow-rg focus:outline-none min-w-[12rem] whitespace-nowrap",
|
||||
@ -177,17 +178,18 @@ const CustomMenu = (props: ICustomMenuDropdownProps) => {
|
||||
};
|
||||
|
||||
const MenuItem: React.FC<ICustomMenuItemProps> = (props) => {
|
||||
const { children, onClick, className = "" } = props;
|
||||
const { children, disabled = false, onClick, className } = props;
|
||||
|
||||
return (
|
||||
<Menu.Item as="div">
|
||||
<Menu.Item as="div" disabled={disabled}>
|
||||
{({ active, close }) => (
|
||||
<button
|
||||
type="button"
|
||||
className={cn(
|
||||
"w-full select-none truncate rounded px-1 py-1.5 text-left text-custom-text-200",
|
||||
{
|
||||
"bg-custom-background-80": active,
|
||||
"bg-custom-background-80": active && !disabled,
|
||||
"text-custom-text-400": disabled,
|
||||
},
|
||||
className
|
||||
)}
|
||||
@ -195,6 +197,7 @@ const MenuItem: React.FC<ICustomMenuItemProps> = (props) => {
|
||||
close();
|
||||
onClick && onClick(e);
|
||||
}}
|
||||
disabled={disabled}
|
||||
>
|
||||
{children}
|
||||
</button>
|
||||
|
@ -122,7 +122,7 @@ const Option = (props: ICustomSelectItemProps) => {
|
||||
value={value}
|
||||
className={({ active }) =>
|
||||
cn(
|
||||
"cursor-pointer select-none truncate rounded px-1 py-1.5 text-custom-text-200",
|
||||
"cursor-pointer select-none truncate rounded px-1 py-1.5 text-custom-text-200 flex items-center justify-between gap-2",
|
||||
{
|
||||
"bg-custom-background-80": active,
|
||||
},
|
||||
@ -131,10 +131,10 @@ const Option = (props: ICustomSelectItemProps) => {
|
||||
}
|
||||
>
|
||||
{({ selected }) => (
|
||||
<div className="flex items-center justify-between gap-2">
|
||||
<div className="flex items-center gap-2">{children}</div>
|
||||
<>
|
||||
{children}
|
||||
{selected && <Check className="h-3.5 w-3.5 flex-shrink-0" />}
|
||||
</div>
|
||||
</>
|
||||
)}
|
||||
</Listbox.Option>
|
||||
);
|
||||
|
@ -24,6 +24,7 @@ export interface ICustomMenuDropdownProps extends IDropdownProps {
|
||||
noBorder?: boolean;
|
||||
verticalEllipsis?: boolean;
|
||||
menuButtonOnClick?: (...args: any) => void;
|
||||
menuItemsClassName?: string;
|
||||
onMenuClose?: () => void;
|
||||
closeOnSelect?: boolean;
|
||||
portalElement?: Element | null;
|
||||
@ -64,6 +65,7 @@ export type ICustomSearchSelectProps = IDropdownProps &
|
||||
|
||||
export interface ICustomMenuItemProps {
|
||||
children: React.ReactNode;
|
||||
disabled?: boolean;
|
||||
onClick?: (args?: any) => void;
|
||||
className?: string;
|
||||
}
|
||||
|
@ -1,6 +1,6 @@
|
||||
{
|
||||
"name": "space",
|
||||
"version": "0.15.1",
|
||||
"version": "0.16.0",
|
||||
"private": true,
|
||||
"scripts": {
|
||||
"dev": "turbo run develop",
|
||||
|
@ -20,7 +20,8 @@ export const CustomAnalyticsSidebarHeader = observer(() => {
|
||||
const cycleDetails = cycleId ? getCycleById(cycleId.toString()) : undefined;
|
||||
const moduleDetails = moduleId ? getModuleById(moduleId.toString()) : undefined;
|
||||
const projectDetails = projectId ? getProjectById(projectId.toString()) : undefined;
|
||||
const cycleOwnerDetails = cycleDetails ? getUserDetails(cycleDetails.owned_by) : undefined;
|
||||
const cycleOwnerDetails = cycleDetails ? getUserDetails(cycleDetails.owned_by_id) : undefined;
|
||||
const moduleLeadDetails = moduleDetails && moduleDetails.lead_id ? getUserDetails(moduleDetails.lead_id) : undefined;
|
||||
|
||||
return (
|
||||
<>
|
||||
@ -57,7 +58,7 @@ export const CustomAnalyticsSidebarHeader = observer(() => {
|
||||
<div className="mt-4 space-y-4">
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-custom-text-200">Lead</h6>
|
||||
<span>{moduleDetails.lead_detail?.display_name}</span>
|
||||
{moduleLeadDetails && <span>{moduleLeadDetails?.display_name}</span>}
|
||||
</div>
|
||||
<div className="flex items-center gap-2 text-xs">
|
||||
<h6 className="text-custom-text-200">Start Date</h6>
|
||||
|
@ -5,7 +5,7 @@ import { mutate } from "swr";
|
||||
// services
|
||||
import { AnalyticsService } from "services/analytics.service";
|
||||
// hooks
|
||||
import { useCycle, useModule, useProject, useUser } from "hooks/store";
|
||||
import { useCycle, useModule, useProject, useUser, useWorkspace } from "hooks/store";
|
||||
import useToast from "hooks/use-toast";
|
||||
// components
|
||||
import { CustomAnalyticsSidebarHeader, CustomAnalyticsSidebarProjectsList } from "components/analytics";
|
||||
@ -39,6 +39,8 @@ export const CustomAnalyticsSidebar: React.FC<Props> = observer((props) => {
|
||||
// store hooks
|
||||
const { currentUser } = useUser();
|
||||
const { workspaceProjectIds, getProjectById } = useProject();
|
||||
const { getWorkspaceById } = useWorkspace();
|
||||
|
||||
const { fetchCycleDetails, getCycleById } = useCycle();
|
||||
const { fetchModuleDetails, getModuleById } = useModule();
|
||||
|
||||
@ -70,11 +72,14 @@ export const CustomAnalyticsSidebar: React.FC<Props> = observer((props) => {
|
||||
if (cycleDetails || moduleDetails) {
|
||||
const details = cycleDetails || moduleDetails;
|
||||
|
||||
eventPayload.workspaceId = details?.workspace_detail?.id;
|
||||
eventPayload.workspaceName = details?.workspace_detail?.name;
|
||||
eventPayload.projectId = details?.project_detail.id;
|
||||
eventPayload.projectIdentifier = details?.project_detail.identifier;
|
||||
eventPayload.projectName = details?.project_detail.name;
|
||||
const currentProjectDetails = getProjectById(details?.project_id || "");
|
||||
const currentWorkspaceDetails = getWorkspaceById(details?.workspace_id || "");
|
||||
|
||||
eventPayload.workspaceId = details?.workspace_id;
|
||||
eventPayload.workspaceName = currentWorkspaceDetails?.name;
|
||||
eventPayload.projectId = details?.project_id;
|
||||
eventPayload.projectIdentifier = currentProjectDetails?.identifier;
|
||||
eventPayload.projectName = currentProjectDetails?.name;
|
||||
}
|
||||
|
||||
if (cycleDetails) {
|
||||
@ -138,14 +143,18 @@ export const CustomAnalyticsSidebar: React.FC<Props> = observer((props) => {
|
||||
|
||||
const selectedProjects = params.project && params.project.length > 0 ? params.project : workspaceProjectIds;
|
||||
|
||||
|
||||
return (
|
||||
<div className={cn("relative h-full flex w-full gap-2 justify-between items-start px-5 py-4 bg-custom-sidebar-background-100", !isProjectLevel ? "flex-col" : "")}
|
||||
<div
|
||||
className={cn(
|
||||
"relative h-full flex w-full gap-2 justify-between items-start px-5 py-4 bg-custom-sidebar-background-100",
|
||||
!isProjectLevel ? "flex-col" : ""
|
||||
)}
|
||||
>
|
||||
<div className="flex flex-wrap items-center gap-2">
|
||||
<div className="flex items-center gap-1 rounded-md bg-custom-background-80 px-3 py-1 text-xs text-custom-text-200">
|
||||
<LayersIcon height={14} width={14} />
|
||||
{analytics ? analytics.total : "..."} <div className={cn(isProjectLevel ? "hidden md:block" : "")}>Issues</div>
|
||||
{analytics ? analytics.total : "..."}
|
||||
<div className={cn(isProjectLevel ? "hidden md:block" : "")}>Issues</div>
|
||||
</div>
|
||||
{isProjectLevel && (
|
||||
<div className="flex items-center gap-1 rounded-md bg-custom-background-80 px-3 py-1 text-xs text-custom-text-200">
|
||||
@ -154,8 +163,8 @@ export const CustomAnalyticsSidebar: React.FC<Props> = observer((props) => {
|
||||
(cycleId
|
||||
? cycleDetails?.created_at
|
||||
: moduleId
|
||||
? moduleDetails?.created_at
|
||||
: projectDetails?.created_at) ?? ""
|
||||
? moduleDetails?.created_at
|
||||
: projectDetails?.created_at) ?? ""
|
||||
)}
|
||||
</div>
|
||||
)}
|
||||
|
@ -47,7 +47,7 @@ export const ScopeAndDemand: React.FC<Props> = (props) => {
|
||||
<>
|
||||
{!defaultAnalyticsError ? (
|
||||
defaultAnalytics ? (
|
||||
<div className="h-full overflow-y-auto p-5 text-sm">
|
||||
<div className="h-full overflow-y-auto p-5 text-sm vertical-scrollbar scrollbar-lg">
|
||||
<div className={`grid grid-cols-1 gap-5 ${fullScreen ? "md:grid-cols-2" : ""}`}>
|
||||
<AnalyticsDemand defaultAnalytics={defaultAnalytics} />
|
||||
<AnalyticsScope defaultAnalytics={defaultAnalytics} />
|
||||
|
@ -1,11 +1,10 @@
|
||||
import { useState } from "react";
|
||||
import { add } from "date-fns";
|
||||
import { Controller, useForm } from "react-hook-form";
|
||||
import { DateDropdown } from "components/dropdowns";
|
||||
import { Calendar } from "lucide-react";
|
||||
// hooks
|
||||
import useToast from "hooks/use-toast";
|
||||
// components
|
||||
import { CustomDatePicker } from "components/ui";
|
||||
// ui
|
||||
import { Button, CustomSelect, Input, TextArea, ToggleSwitch } from "@plane/ui";
|
||||
// helpers
|
||||
@ -167,7 +166,7 @@ export const CreateApiTokenForm: React.FC<Props> = (props) => {
|
||||
<CustomSelect
|
||||
customButton={
|
||||
<div
|
||||
className={`flex items-center gap-2 rounded border-[0.5px] border-custom-border-200 px-2 py-1 ${
|
||||
className={`flex items-center gap-2 rounded border-[0.5px] border-custom-border-300 px-2 py-0.5 ${
|
||||
neverExpires ? "text-custom-text-400" : ""
|
||||
}`}
|
||||
>
|
||||
@ -194,20 +193,13 @@ export const CreateApiTokenForm: React.FC<Props> = (props) => {
|
||||
}}
|
||||
/>
|
||||
{watch("expired_at") === "custom" && (
|
||||
<CustomDatePicker
|
||||
<DateDropdown
|
||||
value={customDate}
|
||||
onChange={(date) => setCustomDate(date ? new Date(date) : null)}
|
||||
onChange={(date) => setCustomDate(date)}
|
||||
minDate={tomorrow}
|
||||
customInput={
|
||||
<div
|
||||
className={`flex cursor-pointer items-center gap-2 !rounded border-[0.5px] border-custom-border-200 px-2 py-1 text-xs !shadow-none !duration-0 ${
|
||||
customDate ? "w-[7.5rem]" : ""
|
||||
} ${neverExpires ? "!cursor-not-allowed text-custom-text-400" : "hover:bg-custom-background-80"}`}
|
||||
>
|
||||
<Calendar className="h-3 w-3" />
|
||||
{customDate ? renderFormattedDate(customDate) : "Set date"}
|
||||
</div>
|
||||
}
|
||||
icon={<Calendar className="h-3 w-3" />}
|
||||
buttonVariant="border-with-text"
|
||||
placeholder="Set date"
|
||||
disabled={neverExpires}
|
||||
/>
|
||||
)}
|
||||
|
@ -48,7 +48,7 @@ export const AutoArchiveAutomation: React.FC<Props> = observer((props) => {
|
||||
<div className="">
|
||||
<h4 className="text-sm font-medium">Auto-archive closed issues</h4>
|
||||
<p className="text-sm tracking-tight text-custom-text-200">
|
||||
Plane will auto archive issues that have been completed or cancelled.
|
||||
Plane will auto archive issues that have been completed or canceled.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@ -73,7 +73,7 @@ export const AutoArchiveAutomation: React.FC<Props> = observer((props) => {
|
||||
<CustomSelect
|
||||
value={currentProjectDetails?.archive_in}
|
||||
label={`${currentProjectDetails?.archive_in} ${
|
||||
currentProjectDetails?.archive_in === 1 ? "Month" : "Months"
|
||||
currentProjectDetails?.archive_in === 1 ? "month" : "months"
|
||||
}`}
|
||||
onChange={(val: number) => {
|
||||
handleChange({ archive_in: val });
|
||||
@ -93,7 +93,7 @@ export const AutoArchiveAutomation: React.FC<Props> = observer((props) => {
|
||||
className="flex w-full select-none items-center rounded px-1 py-1.5 text-sm text-custom-text-200 hover:bg-custom-background-80"
|
||||
onClick={() => setmonthModal(true)}
|
||||
>
|
||||
Customise Time Range
|
||||
Customize time range
|
||||
</button>
|
||||
</>
|
||||
</CustomSelect>
|
||||
|
@ -74,7 +74,7 @@ export const AutoCloseAutomation: React.FC<Props> = observer((props) => {
|
||||
<div className="">
|
||||
<h4 className="text-sm font-medium">Auto-close issues</h4>
|
||||
<p className="text-sm tracking-tight text-custom-text-200">
|
||||
Plane will automatically close issue that haven{"'"}t been completed or cancelled.
|
||||
Plane will automatically close issue that haven{"'"}t been completed or canceled.
|
||||
</p>
|
||||
</div>
|
||||
</div>
|
||||
@ -100,7 +100,7 @@ export const AutoCloseAutomation: React.FC<Props> = observer((props) => {
|
||||
<CustomSelect
|
||||
value={currentProjectDetails?.close_in}
|
||||
label={`${currentProjectDetails?.close_in} ${
|
||||
currentProjectDetails?.close_in === 1 ? "Month" : "Months"
|
||||
currentProjectDetails?.close_in === 1 ? "month" : "months"
|
||||
}`}
|
||||
onChange={(val: number) => {
|
||||
handleChange({ close_in: val });
|
||||
@ -119,7 +119,7 @@ export const AutoCloseAutomation: React.FC<Props> = observer((props) => {
|
||||
className="flex w-full select-none items-center rounded px-1 py-1.5 text-custom-text-200 hover:bg-custom-background-80"
|
||||
onClick={() => setmonthModal(true)}
|
||||
>
|
||||
Customize Time Range
|
||||
Customize time range
|
||||
</button>
|
||||
</>
|
||||
</CustomSelect>
|
||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user