Merge branch 'develop' into dev/api_logging

This commit is contained in:
Nikhil 2023-11-12 00:13:45 +05:30 committed by GitHub
commit 4fb9ab3998
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
883 changed files with 21067 additions and 25377 deletions

17
.deepsource.toml Normal file
View File

@ -0,0 +1,17 @@
version = 1
[[analyzers]]
name = "shell"
[[analyzers]]
name = "javascript"
[analyzers.meta]
plugins = ["react"]
environment = ["nodejs"]
[[analyzers]]
name = "python"
[analyzers.meta]
runtime_version = "3.x.x"

213
.github/workflows/build-branch.yml vendored Normal file
View File

@ -0,0 +1,213 @@
name: Branch Build
on:
pull_request:
types:
- closed
branches:
- master
- release
- qa
- develop
env:
TARGET_BRANCH: ${{ github.event.pull_request.base.ref }}
jobs:
branch_build_and_push:
if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }}
name: Build-Push Web/Space/API/Proxy Docker Image
runs-on: ubuntu-20.04
steps:
- name: Check out the repo
uses: actions/checkout@v3.3.0
# - name: Set Target Branch Name on PR close
# if: ${{ github.event_name == 'pull_request' && github.event.action =='closed' }}
# run: echo "TARGET_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
# - name: Set Target Branch Name on other than PR close
# if: ${{ github.event_name == 'push' }}
# run: echo "TARGET_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV
- uses: ASzc/change-string-case-action@v2
id: gh_branch_upper_lower
with:
string: ${{env.TARGET_BRANCH}}
- uses: mad9000/actions-find-and-replace-string@2
id: gh_branch_replace_slash
with:
source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }}
find: '/'
replace: '-'
- uses: mad9000/actions-find-and-replace-string@2
id: gh_branch_replace_dot
with:
source: ${{ steps.gh_branch_replace_slash.outputs.value }}
find: '.'
replace: ''
- uses: mad9000/actions-find-and-replace-string@2
id: gh_branch_clean
with:
source: ${{ steps.gh_branch_replace_dot.outputs.value }}
find: '_'
replace: ''
- name: Uploading Proxy Source
uses: actions/upload-artifact@v3
with:
name: proxy-src-code
path: ./nginx
- name: Uploading Backend Source
uses: actions/upload-artifact@v3
with:
name: backend-src-code
path: ./apiserver
- name: Uploading Web Source
uses: actions/upload-artifact@v3
with:
name: web-src-code
path: |
./
!./apiserver
!./nginx
!./deploy
!./space
- name: Uploading Space Source
uses: actions/upload-artifact@v3
with:
name: space-src-code
path: |
./
!./apiserver
!./nginx
!./deploy
!./web
outputs:
gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }}
branch_build_push_frontend:
runs-on: ubuntu-20.04
needs: [ branch_build_and_push ]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Downloading Web Source Code
uses: actions/download-artifact@v3
with:
name: web-src-code
- name: Build and Push Frontend to Docker Container Registry
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./web/Dockerfile.web
platforms: linux/amd64
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_space:
runs-on: ubuntu-20.04
needs: [ branch_build_and_push ]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Downloading Space Source Code
uses: actions/download-artifact@v3
with:
name: space-src-code
- name: Build and Push Space to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./space/Dockerfile.space
platforms: linux/amd64
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_backend:
runs-on: ubuntu-20.04
needs: [ branch_build_and_push ]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Downloading Backend Source Code
uses: actions/download-artifact@v3
with:
name: backend-src-code
- name: Build and Push Backend to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./Dockerfile.api
platforms: linux/amd64
push: true
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_proxy:
runs-on: ubuntu-20.04
needs: [ branch_build_and_push ]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Downloading Proxy Source Code
uses: actions/download-artifact@v3
with:
name: proxy-src-code
- name: Build and Push Plane-Proxy to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./Dockerfile
platforms: linux/amd64
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}

2
.gitignore vendored
View File

@ -76,7 +76,7 @@ pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
.secrets
tmp/
## packages
dist

View File

@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
hello@plane.so.
squawk@plane.so.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

View File

@ -1,8 +1,10 @@
# Environment Variables
Environment variables are distributed in various files. Please refer them carefully.
## {PROJECT_FOLDER}/.env
File is available in the project root folder
```
@ -41,25 +43,37 @@ USE_MINIO=1
# Nginx Configuration
NGINX_PORT=80
```
## {PROJECT_FOLDER}/web/.env.example
```
# Enable/Disable OAUTH - default 0 for selfhosted instance
NEXT_PUBLIC_ENABLE_OAUTH=0
# Public boards deploy URL
NEXT_PUBLIC_DEPLOY_URL="http://localhost/spaces"
```
## {PROJECT_FOLDER}/spaces/.env.example
```
# Flag to toggle OAuth
NEXT_PUBLIC_ENABLE_OAUTH=0
```
## {PROJECT_FOLDER}/apiserver/.env
```
# Backend
# Debug value for api server use it as 0 for production use
@ -126,7 +140,9 @@ ENABLE_SIGNUP="1"
# Email Redirection URL
WEB_URL="http://localhost"
```
## Updates
- The environment variable NEXT_PUBLIC_API_BASE_URL has been removed from both the web and space projects.
- The naming convention for containers and images has been updated.
- The plane-worker image will no longer be maintained, as it has been merged with plane-backend.

View File

@ -7,7 +7,7 @@
</p>
<h3 align="center"><b>Plane</b></h3>
<p align="center"><b>Open-source, self-hosted project planning tool</b></p>
<p align="center"><b>Flexible, extensible open-source project management</b></p>
<p align="center">
<a href="https://discord.com/invite/A92xrEGCge">

View File

@ -69,3 +69,6 @@ ENABLE_MAGIC_LINK_LOGIN="0"
# Email redirections and minio domain settings
WEB_URL="http://localhost"
# Gunicorn Workers
GUNICORN_WORKERS=2

View File

@ -6,4 +6,4 @@ python manage.py migrate
# Create a Default User
python bin/user_script.py
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@ -1,4 +1,4 @@
import os, sys, random, string
import os, sys
import uuid
sys.path.append("/code")

View File

@ -17,7 +17,7 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
@ -25,6 +25,6 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@ -1,6 +1,3 @@
# Django imports
from django.db.models.functions import TruncDate
# Third party imports
from rest_framework import serializers

View File

@ -6,7 +6,6 @@ from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer
from .project import ProjectLiteSerializer
from .user import UserLiteSerializer
from plane.db.models import Inbox, InboxIssue, Issue

View File

@ -5,11 +5,10 @@ from django.utils import timezone
from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .base import BaseSerializer, DynamicBaseSerializer
from .user import UserLiteSerializer
from .state import StateSerializer, StateLiteSerializer
from .user import UserLiteSerializer
from .project import ProjectSerializer, ProjectLiteSerializer
from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
@ -232,25 +231,6 @@ class IssueActivitySerializer(BaseSerializer):
fields = "__all__"
class IssueCommentSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
class Meta:
model = IssueComment
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssuePropertySerializer(BaseSerializer):
class Meta:
@ -287,7 +267,6 @@ class LabelLiteSerializer(BaseSerializer):
class IssueLabelSerializer(BaseSerializer):
# label_details = LabelSerializer(read_only=True, source="label")
class Meta:
model = IssueLabel
@ -569,7 +548,7 @@ class IssueSerializer(BaseSerializer):
]
class IssueLiteSerializer(BaseSerializer):
class IssueLiteSerializer(DynamicBaseSerializer):
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
state_detail = StateLiteSerializer(read_only=True, source="state")

View File

@ -4,9 +4,8 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from .project import ProjectSerializer, ProjectLiteSerializer
from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .issue import IssueStateSerializer
from plane.db.models import (
User,
@ -19,7 +18,7 @@ from plane.db.models import (
class ModuleWriteSerializer(BaseSerializer):
members_list = serializers.ListField(
members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
@ -40,13 +39,18 @@ class ModuleWriteSerializer(BaseSerializer):
"updated_at",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data['members'] = [str(member.id) for member in instance.members.all()]
return data
def validate(self, data):
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
raise serializers.ValidationError("Start date cannot exceed target date")
return data
def create(self, validated_data):
members = validated_data.pop("members_list", None)
members = validated_data.pop("members", None)
project = self.context["project"]
@ -72,7 +76,7 @@ class ModuleWriteSerializer(BaseSerializer):
return module
def update(self, instance, validated_data):
members = validated_data.pop("members_list", None)
members = validated_data.pop("members", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()

View File

@ -33,7 +33,7 @@ class PageBlockLiteSerializer(BaseSerializer):
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
labels_list = serializers.ListField(
labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
@ -50,9 +50,13 @@ class PageSerializer(BaseSerializer):
"project",
"owned_by",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data['labels'] = [str(label.id) for label in instance.labels.all()]
return data
def create(self, validated_data):
labels = validated_data.pop("labels_list", None)
labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
page = Page.objects.create(
@ -77,7 +81,7 @@ class PageSerializer(BaseSerializer):
return page
def update(self, instance, validated_data):
labels = validated_data.pop("labels_list", None)
labels = validated_data.pop("labels", None)
if labels is not None:
PageLabel.objects.filter(page=instance).delete()
PageLabel.objects.bulk_create(

View File

@ -7,8 +7,6 @@ from plane.db.models import State
class StateSerializer(BaseSerializer):
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
class Meta:
model = State

View File

@ -79,14 +79,14 @@ class UserMeSettingsSerializer(BaseSerializer):
email=obj.email
).count()
if obj.last_workspace_id is not None:
workspace = Workspace.objects.get(
workspace = Workspace.objects.filter(
pk=obj.last_workspace_id, workspace_member__member=obj.id
)
).first()
return {
"last_workspace_id": obj.last_workspace_id,
"last_workspace_slug": workspace.slug,
"last_workspace_slug": workspace.slug if workspace is not None else "",
"fallback_workspace_id": obj.last_workspace_id,
"fallback_workspace_slug": workspace.slug,
"fallback_workspace_slug": workspace.slug if workspace is not None else "",
"invites": workspace_invites,
}
else:

View File

@ -57,7 +57,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
return IssueView.objects.create(**validated_data)
def update(self, instance, validated_data):
@ -65,7 +65,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@ -110,7 +110,6 @@ class TeamSerializer(BaseSerializer):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return team
else:
team = Team.objects.create(**validated_data)
return team
@ -124,7 +123,6 @@ class TeamSerializer(BaseSerializer):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return super().update(instance, validated_data)
else:
return super().update(instance, validated_data)

View File

@ -1,10 +1,10 @@
from .analytic import urlpatterns as analytic_urls
from .asset import urlpatterns as asset_urls
from .authentication import urlpatterns as authentication_urls
from .configuration import urlpatterns as configuration_urls
from .config import urlpatterns as configuration_urls
from .cycle import urlpatterns as cycle_urls
from .estimate import urlpatterns as estimate_urls
from .gpt import urlpatterns as gpt_urls
from .external import urlpatterns as external_urls
from .importer import urlpatterns as importer_urls
from .inbox import urlpatterns as inbox_urls
from .integration import urlpatterns as integration_urls
@ -14,10 +14,8 @@ from .notification import urlpatterns as notification_urls
from .page import urlpatterns as page_urls
from .project import urlpatterns as project_urls
from .public_board import urlpatterns as public_board_urls
from .release_note import urlpatterns as release_note_urls
from .search import urlpatterns as search_urls
from .state import urlpatterns as state_urls
from .unsplash import urlpatterns as unsplash_urls
from .user import urlpatterns as user_urls
from .views import urlpatterns as view_urls
from .workspace import urlpatterns as workspace_urls
@ -30,7 +28,7 @@ urlpatterns = [
*configuration_urls,
*cycle_urls,
*estimate_urls,
*gpt_urls,
*external_urls,
*importer_urls,
*inbox_urls,
*integration_urls,
@ -40,10 +38,8 @@ urlpatterns = [
*page_urls,
*project_urls,
*public_board_urls,
*release_note_urls,
*search_urls,
*state_urls,
*unsplash_urls,
*user_urls,
*view_urls,
*workspace_urls,

View File

@ -0,0 +1,25 @@
from django.urls import path
from plane.api.views import UnsplashEndpoint
from plane.api.views import ReleaseNotesEndpoint
from plane.api.views import GPTIntegrationEndpoint
urlpatterns = [
path(
"unsplash/",
UnsplashEndpoint.as_view(),
name="unsplash",
),
path(
"release-notes/",
ReleaseNotesEndpoint.as_view(),
name="release-notes",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),
name="importer",
),
]

View File

@ -1,13 +0,0 @@
from django.urls import path
from plane.api.views import GPTIntegrationEndpoint
urlpatterns = [
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/ai-assistant/",
GPTIntegrationEndpoint.as_view(),
name="importer",
),
]

View File

@ -3,6 +3,8 @@ from django.urls import path
from plane.api.views import (
IssueViewSet,
IssueListEndpoint,
IssueListGroupedEndpoint,
LabelViewSet,
BulkCreateIssueLabelsEndpoint,
BulkDeleteIssuesEndpoint,
@ -35,6 +37,16 @@ urlpatterns = [
),
name="project-issue",
),
path(
"v2/workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
IssueListEndpoint.as_view(),
name="project-issue",
),
path(
"v3/workspaces/<str:slug>/projects/<uuid:project_id>/issues/",
IssueListGroupedEndpoint.as_view(),
name="project-issue",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/issues/<uuid:pk>/",
IssueViewSet.as_view(

View File

@ -4,17 +4,15 @@ from plane.api.views import (
ProjectViewSet,
InviteProjectEndpoint,
ProjectMemberViewSet,
ProjectMemberEndpoint,
ProjectMemberInvitationsViewset,
ProjectMemberUserEndpoint,
AddMemberToProjectEndpoint,
ProjectJoinEndpoint,
AddTeamToProjectEndpoint,
ProjectUserViewsEndpoint,
ProjectIdentifierEndpoint,
ProjectFavoritesViewSet,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint
ProjectPublicCoverImagesEndpoint,
)
@ -53,7 +51,7 @@ urlpatterns = [
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
ProjectMemberViewSet.as_view({"get": "list"}),
ProjectMemberViewSet.as_view({"get": "list", "post": "create"}),
name="project-member",
),
path(
@ -67,16 +65,6 @@ urlpatterns = [
),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
ProjectMemberEndpoint.as_view(),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/add/",
AddMemberToProjectEndpoint.as_view(),
name="project",
),
path(
"workspaces/<str:slug>/projects/join/",
ProjectJoinEndpoint.as_view(),

View File

@ -1,13 +0,0 @@
from django.urls import path
from plane.api.views import ReleaseNotesEndpoint
urlpatterns = [
path(
"release-notes/",
ReleaseNotesEndpoint.as_view(),
name="release-notes",
),
]

View File

@ -20,11 +20,19 @@ urlpatterns = [
StateViewSet.as_view(
{
"get": "retrieve",
"put": "update",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-state",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/states/<uuid:pk>/mark-default/",
StateViewSet.as_view(
{
"post": "mark_as_default",
}
),
name="project-state",
),
]

View File

@ -1,13 +0,0 @@
from django.urls import path
from plane.api.views import UnsplashEndpoint
urlpatterns = [
path(
"unsplash/",
UnsplashEndpoint.as_view(),
name="unsplash",
),
]

View File

@ -5,7 +5,6 @@ from plane.api.views import (
WorkSpaceViewSet,
InviteWorkspaceEndpoint,
WorkSpaceMemberViewSet,
WorkspaceMembersEndpoint,
WorkspaceInvitationsViewset,
WorkspaceMemberUserEndpoint,
WorkspaceMemberUserViewsEndpoint,
@ -86,11 +85,6 @@ urlpatterns = [
),
name="workspace-member",
),
path(
"workspaces/<str:slug>/workspace-members/",
WorkspaceMembersEndpoint.as_view(),
name="workspace-members",
),
path(
"workspaces/<str:slug>/teams/",
TeamMemberViewSet.as_view(

View File

@ -28,7 +28,6 @@ from plane.api.views import (
## End User
# Workspaces
WorkSpaceViewSet,
UserWorkspaceInvitationsEndpoint,
UserWorkSpacesEndpoint,
InviteWorkspaceEndpoint,
JoinWorkspaceEndpoint,

View File

@ -7,14 +7,12 @@ from .project import (
ProjectMemberInvitationsViewset,
ProjectMemberInviteDetailViewSet,
ProjectIdentifierEndpoint,
AddMemberToProjectEndpoint,
ProjectJoinEndpoint,
ProjectUserViewsEndpoint,
ProjectMemberUserEndpoint,
ProjectFavoritesViewSet,
ProjectDeployBoardViewSet,
ProjectDeployBoardPublicSettingsEndpoint,
ProjectMemberEndpoint,
WorkspaceProjectDeployBoardEndpoint,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint,
@ -53,11 +51,15 @@ from .workspace import (
WorkspaceUserProfileEndpoint,
WorkspaceUserProfileIssuesEndpoint,
WorkspaceLabelsEndpoint,
WorkspaceMembersEndpoint,
LeaveWorkspaceEndpoint,
)
from .state import StateViewSet
from .view import GlobalViewViewSet, GlobalViewIssuesViewSet, IssueViewViewSet, IssueViewFavoriteViewSet
from .view import (
GlobalViewViewSet,
GlobalViewIssuesViewSet,
IssueViewViewSet,
IssueViewFavoriteViewSet,
)
from .cycle import (
CycleViewSet,
CycleIssueViewSet,
@ -68,6 +70,8 @@ from .cycle import (
from .asset import FileAssetEndpoint, UserAssetsEndpoint
from .issue import (
IssueViewSet,
IssueListEndpoint,
IssueListGroupedEndpoint,
WorkSpaceIssuesEndpoint,
IssueActivityEndpoint,
IssueCommentViewSet,
@ -165,7 +169,11 @@ from .analytic import (
DefaultAnalyticsEndpoint,
)
from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkAllReadNotificationViewSet
from .notification import (
NotificationViewSet,
UnreadNotificationEndpoint,
MarkAllReadNotificationViewSet,
)
from .exporter import ExportIssuesEndpoint

View File

@ -55,11 +55,11 @@ class VerifyEmailEndpoint(BaseAPIView):
return Response(
{"email": "Successfully activated"}, status=status.HTTP_200_OK
)
except jwt.ExpiredSignatureError as indentifier:
except jwt.ExpiredSignatureError as _indentifier:
return Response(
{"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
)
except jwt.exceptions.DecodeError as indentifier:
except jwt.exceptions.DecodeError as _indentifier:
return Response(
{"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
)

View File

@ -249,11 +249,11 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
## Generate a random token
token = (
"".join(random.choices(string.ascii_lowercase + string.digits, k=4))
"".join(random.choices(string.ascii_lowercase, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "".join(random.choices(string.ascii_lowercase, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "".join(random.choices(string.ascii_lowercase, k=4))
)
ri = redis_instance()

View File

@ -21,8 +21,8 @@ class ConfigurationEndpoint(BaseAPIView):
def get(self, request):
data = {}
data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
data["google_client_id"] = os.environ.get("GOOGLE_CLIENT_ID", None)
data["github_client_id"] = os.environ.get("GITHUB_CLIENT_ID", None)
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
data["magic_login"] = (
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
@ -30,4 +30,8 @@ class ConfigurationEndpoint(BaseAPIView):
data["email_password_login"] = (
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
)
data["slack_client_id"] = os.environ.get("SLACK_CLIENT_ID", None)
data["posthog_api_key"] = os.environ.get("POSTHOG_API_KEY", None)
data["posthog_host"] = os.environ.get("POSTHOG_HOST", None)
data["has_unsplash_configured"] = bool(settings.UNSPLASH_ACCESS_KEY)
return Response(data, status=status.HTTP_200_OK)

View File

@ -3,7 +3,6 @@ import json
# Django imports
from django.db.models import (
OuterRef,
Func,
F,
Q,
@ -177,9 +176,8 @@ class CycleViewSet(BaseViewSet):
def list(self, request, slug, project_id):
queryset = self.get_queryset()
cycle_view = request.GET.get("cycle_view", "all")
order_by = request.GET.get("order_by", "sort_order")
queryset = queryset.order_by(order_by)
queryset = queryset.order_by("-is_favorite","-created_at")
# Current Cycle
if cycle_view == "current":
@ -480,13 +478,13 @@ class CycleViewSet(BaseViewSet):
)
)
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
# Delete the cycle
cycle.delete()
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(pk),
"cycle_name": str(cycle.name),
"issues": [str(issue_id) for issue_id in cycle_issues],
}
),
@ -496,6 +494,8 @@ class CycleViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
# Delete the cycle
cycle.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -512,12 +512,6 @@ class CycleIssueViewSet(BaseViewSet):
"issue__assignees__id",
]
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
cycle_id=self.kwargs.get("cycle_id"),
)
def get_queryset(self):
return self.filter_queryset(
super()
@ -670,7 +664,7 @@ class CycleIssueViewSet(BaseViewSet):
type="cycle.activity.created",
requested_data=json.dumps({"cycles_list": issues}),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{

View File

@ -89,4 +89,4 @@ class UnsplashEndpoint(BaseAPIView):
}
resp = requests.get(url=url, headers=headers)
return Response(resp.json(), status=status.HTTP_200_OK)
return Response(resp.json(), status=resp.status_code)

View File

@ -39,6 +39,7 @@ from plane.utils.integrations.github import get_github_repo_details
from plane.utils.importers.jira import jira_project_issue_summary
from plane.bgtasks.importer_task import service_importer
from plane.utils.html_processor import strip_tags
from plane.api.permissions import WorkSpaceAdminPermission
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
@ -119,6 +120,9 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
class ImportServiceEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def post(self, request, slug, service):
project_id = request.data.get("project_id", False)

View File

@ -360,7 +360,6 @@ class InboxIssuePublicViewSet(BaseViewSet):
)
.select_related("issue", "workspace", "project")
)
else:
return InboxIssue.objects.none()
def list(self, request, slug, project_id, inbox_id):

View File

@ -1,6 +1,6 @@
# Python improts
import uuid
import requests
# Django imports
from django.contrib.auth.hashers import make_password
@ -25,7 +25,7 @@ from plane.utils.integrations.github import (
delete_github_installation,
)
from plane.api.permissions import WorkSpaceAdminPermission
from plane.utils.integrations.slack import slack_oauth
class IntegrationViewSet(BaseViewSet):
serializer_class = IntegrationSerializer
@ -98,12 +98,19 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
config = {"installation_id": installation_id}
if provider == "slack":
metadata = request.data.get("metadata", {})
code = request.data.get("code", False)
if not code:
return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST)
slack_response = slack_oauth(code=code)
metadata = slack_response
access_token = metadata.get("access_token", False)
team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id:
return Response(
{"error": "Access token and team id is required"},
{"error": "Slack could not be installed. Please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
config = {"team_id": team_id, "access_token": access_token}

View File

@ -11,6 +11,7 @@ from plane.api.views import BaseViewSet, BaseAPIView
from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember
from plane.api.serializers import SlackProjectSyncSerializer
from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission
from plane.utils.integrations.slack import slack_oauth
class SlackProjectSyncViewSet(BaseViewSet):
@ -32,25 +33,47 @@ class SlackProjectSyncViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, workspace_integration_id):
serializer = SlackProjectSyncSerializer(data=request.data)
try:
code = request.data.get("code", False)
if not code:
return Response(
{"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST
)
slack_response = slack_oauth(code=code)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
if serializer.is_valid():
serializer.save(
project_id=project_id,
workspace_integration_id=workspace_integration_id,
)
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id, workspace__slug=slug
)
slack_project_sync = SlackProjectSync.objects.create(
access_token=slack_response.get("access_token"),
scopes=slack_response.get("scope"),
bot_user_id=slack_response.get("bot_user_id"),
webhook_url=slack_response.get("incoming_webhook", {}).get("url"),
data=slack_response,
team_id=slack_response.get("team", {}).get("id"),
team_name=slack_response.get("team", {}).get("name"),
workspace_integration=workspace_integration,
project_id=project_id,
)
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
serializer = SlackProjectSyncSerializer(slack_project_sync)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "Slack is already installed for the project"},
status=status.HTTP_410_GONE,
)
capture_exception(e)
return Response(
{"error": "Slack could not be installed. Please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -39,7 +39,6 @@ from plane.api.serializers import (
IssueActivitySerializer,
IssueCommentSerializer,
IssuePropertySerializer,
LabelSerializer,
IssueSerializer,
LabelSerializer,
IssueFlatSerializer,
@ -235,10 +234,7 @@ class IssueViewSet(BaseViewSet):
status=status.HTTP_200_OK,
)
return Response(
issues, status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
project = Project.objects.get(pk=project_id)
@ -316,6 +312,104 @@ class IssueViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
class IssueListEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get(self, request, slug, project_id):
fields = [field for field in request.GET.get("fields", "").split(",") if field]
filters = issue_filters(request.query_params, "GET")
issue_queryset = (
Issue.objects.filter(workspace__slug=slug, project_id=project_id)
.select_related("project")
.select_related("workspace")
.select_related("state")
.select_related("parent")
.prefetch_related("assignees")
.prefetch_related("labels")
.prefetch_related(
Prefetch(
"issue_reactions",
queryset=IssueReaction.objects.select_related("actor"),
)
)
.filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.distinct()
)
serializer = IssueLiteSerializer(
issue_queryset, many=True, fields=fields if fields else None
)
return Response(serializer.data, status=status.HTTP_200_OK)
class IssueListGroupedEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET")
fields = [field for field in request.GET.get("fields", "").split(",") if field]
issue_queryset = (
Issue.objects.filter(workspace__slug=slug, project_id=project_id)
.select_related("project")
.select_related("workspace")
.select_related("state")
.select_related("parent")
.prefetch_related("assignees")
.prefetch_related("labels")
.prefetch_related(
Prefetch(
"issue_reactions",
queryset=IssueReaction.objects.select_related("actor"),
)
)
.filter(**filters)
.annotate(cycle_id=F("issue_cycle__cycle_id"))
.annotate(module_id=F("issue_module__module_id"))
.annotate(
link_count=IssueLink.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.annotate(
attachment_count=IssueAttachment.objects.filter(issue=OuterRef("id"))
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.distinct()
)
issues = IssueLiteSerializer(issue_queryset, many=True, fields=fields if fields else None).data
issue_dict = {str(issue["id"]): issue for issue in issues}
return Response(
issue_dict,
status=status.HTTP_200_OK,
)
class UserWorkSpaceIssues(BaseAPIView):
@method_decorator(gzip_page)
def get(self, request, slug):
@ -443,9 +537,7 @@ class UserWorkSpaceIssues(BaseAPIView):
status=status.HTTP_200_OK,
)
return Response(
issues, status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
class WorkSpaceIssuesEndpoint(BaseAPIView):
@ -623,7 +715,6 @@ class IssueUserDisplayPropertyEndpoint(BaseAPIView):
serializer = IssuePropertySerializer(issue_property)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def get(self, request, slug, project_id):
issue_property, _ = IssueProperty.objects.get_or_create(
user=request.user, project_id=project_id
@ -780,6 +871,20 @@ class SubIssuesEndpoint(BaseAPIView):
updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids)
# Track the issue
_ = [
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"parent": str(issue_id)}),
actor_id=str(request.user.id),
issue_id=str(sub_issue_id),
project_id=str(project_id),
current_instance=json.dumps({"parent": str(sub_issue_id)}),
epoch=int(timezone.now().timestamp()),
)
for sub_issue_id in sub_issue_ids
]
return Response(
IssueFlatSerializer(updated_sub_issues, many=True).data,
status=status.HTTP_200_OK,
@ -1092,17 +1197,19 @@ class IssueArchiveViewSet(BaseViewSet):
archived_at__isnull=False,
pk=pk,
)
issue.archived_at = None
issue.save()
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"archived_at": None}),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
current_instance=json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp()),
)
issue.archived_at = None
issue.save()
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
@ -1396,7 +1503,6 @@ class IssueCommentPublicViewSet(BaseViewSet):
)
.distinct()
).order_by("created_at")
else:
return IssueComment.objects.none()
except ProjectDeployBoard.DoesNotExist:
return IssueComment.objects.none()
@ -1522,7 +1628,6 @@ class IssueReactionPublicViewSet(BaseViewSet):
.order_by("-created_at")
.distinct()
)
else:
return IssueReaction.objects.none()
except ProjectDeployBoard.DoesNotExist:
return IssueReaction.objects.none()
@ -1618,7 +1723,6 @@ class CommentReactionPublicViewSet(BaseViewSet):
.order_by("-created_at")
.distinct()
)
else:
return CommentReaction.objects.none()
except ProjectDeployBoard.DoesNotExist:
return CommentReaction.objects.none()
@ -1713,7 +1817,6 @@ class IssueVotePublicViewSet(BaseViewSet):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
)
else:
return IssueVote.objects.none()
except ProjectDeployBoard.DoesNotExist:
return IssueVote.objects.none()
@ -2160,9 +2263,7 @@ class IssueDraftViewSet(BaseViewSet):
status=status.HTTP_200_OK,
)
return Response(
issues, status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
project = Project.objects.get(pk=project_id)
@ -2227,7 +2328,7 @@ class IssueDraftViewSet(BaseViewSet):
def destroy(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
current_instance = json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
IssueSerializer(issue).data, cls=DjangoJSONEncoder
)
issue.delete()
issue_activity.delay(

View File

@ -55,7 +55,6 @@ class ModuleViewSet(BaseViewSet):
)
def get_queryset(self):
order_by = self.request.GET.get("order_by", "sort_order")
subquery = ModuleFavorite.objects.filter(
user=self.request.user,
@ -138,7 +137,7 @@ class ModuleViewSet(BaseViewSet):
),
)
)
.order_by(order_by, "name")
.order_by("-is_favorite","-created_at")
)
def create(self, request, slug, project_id):
@ -266,12 +265,12 @@ class ModuleViewSet(BaseViewSet):
module_issues = list(
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
)
module.delete()
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(pk),
"module_name": str(module.name),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
@ -281,6 +280,7 @@ class ModuleViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
module.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -297,12 +297,6 @@ class ModuleIssueViewSet(BaseViewSet):
ProjectEntityPermission,
]
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
module_id=self.kwargs.get("module_id"),
)
def get_queryset(self):
return self.filter_queryset(
super()
@ -446,7 +440,7 @@ class ModuleIssueViewSet(BaseViewSet):
type="module.activity.created",
requested_data=json.dumps({"modules_list": issues}),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{

View File

@ -12,7 +12,6 @@ from django.conf import settings
from rest_framework.response import Response
from rest_framework import exceptions
from rest_framework.permissions import AllowAny
from rest_framework.views import APIView
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework import status
from sentry_sdk import capture_exception
@ -116,7 +115,7 @@ def get_user_data(access_token: str) -> dict:
url="https://api.github.com/user/emails", headers=headers
).json()
[
_ = [
user_data.update({"email": item.get("email")})
for item in response
if item.get("primary") is True
@ -150,7 +149,7 @@ class OauthEndpoint(BaseAPIView):
data = get_user_data(access_token)
email = data.get("email", None)
if email == None:
if email is None:
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
@ -161,7 +160,6 @@ class OauthEndpoint(BaseAPIView):
if "@" in email:
user = User.objects.get(email=email)
email = data["email"]
channel = "email"
mobile_number = uuid.uuid4().hex
email_verified = True
else:
@ -185,7 +183,7 @@ class OauthEndpoint(BaseAPIView):
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_medium = f"oauth"
user.last_login_medium = "oauth"
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.is_email_verified = email_verified
user.save()
@ -236,7 +234,6 @@ class OauthEndpoint(BaseAPIView):
if "@" in email:
email = data["email"]
mobile_number = uuid.uuid4().hex
channel = "email"
email_verified = True
else:
return Response(

View File

@ -1,5 +1,5 @@
# Python imports
from datetime import timedelta, datetime, date
from datetime import timedelta, date
# Django imports
from django.db.models import Exists, OuterRef, Q, Prefetch

View File

@ -11,7 +11,6 @@ from django.db.models import (
Q,
Exists,
OuterRef,
Func,
F,
Func,
Subquery,
@ -35,7 +34,6 @@ from plane.api.serializers import (
ProjectDetailSerializer,
ProjectMemberInviteSerializer,
ProjectFavoriteSerializer,
IssueLiteSerializer,
ProjectDeployBoardSerializer,
ProjectMemberAdminSerializer,
)
@ -84,7 +82,7 @@ class ProjectViewSet(BaseViewSet):
]
def get_serializer_class(self, *args, **kwargs):
if self.action == "update" or self.action == "partial_update":
if self.action in ["update", "partial_update"]:
return ProjectSerializer
return ProjectDetailSerializer
@ -336,7 +334,7 @@ class ProjectViewSet(BaseViewSet):
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
except Project.DoesNotExist or Workspace.DoesNotExist as e:
except (Project.DoesNotExist, Workspace.DoesNotExist):
return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
)
@ -482,6 +480,83 @@ class ProjectMemberViewSet(BaseViewSet):
.select_related("workspace", "workspace__owner")
)
def create(self, request, slug, project_id):
members = request.data.get("members", [])
# get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
if not len(members):
return Response(
{"error": "Atleast one member is required"},
status=status.HTTP_400_BAD_REQUEST,
)
bulk_project_members = []
bulk_issue_props = []
project_members = (
ProjectMember.objects.filter(
workspace__slug=slug,
member_id__in=[member.get("member_id") for member in members],
)
.values("member_id", "sort_order")
.order_by("sort_order")
)
for member in members:
sort_order = [
project_member.get("sort_order")
for project_member in project_members
if str(project_member.get("member_id")) == str(member.get("member_id"))
]
bulk_project_members.append(
ProjectMember(
member_id=member.get("member_id"),
role=member.get("role", 10),
project_id=project_id,
workspace_id=project.workspace_id,
sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
)
)
bulk_issue_props.append(
IssueProperty(
user_id=member.get("member_id"),
project_id=project_id,
workspace_id=project.workspace_id,
)
)
project_members = ProjectMember.objects.bulk_create(
bulk_project_members,
batch_size=10,
ignore_conflicts=True,
)
_ = IssueProperty.objects.bulk_create(
bulk_issue_props, batch_size=10, ignore_conflicts=True
)
serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def list(self, request, slug, project_id):
project_member = ProjectMember.objects.get(
member=request.user, workspace__slug=slug, project_id=project_id
)
project_members = ProjectMember.objects.filter(
project_id=project_id,
workspace__slug=slug,
member__is_bot=False,
).select_related("project", "member", "workspace")
if project_member.role > 10:
serializer = ProjectMemberAdminSerializer(project_members, many=True)
else:
serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, pk):
project_member = ProjectMember.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id
@ -567,73 +642,6 @@ class ProjectMemberViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
class AddMemberToProjectEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
members = request.data.get("members", [])
# get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
if not len(members):
return Response(
{"error": "Atleast one member is required"},
status=status.HTTP_400_BAD_REQUEST,
)
bulk_project_members = []
bulk_issue_props = []
project_members = (
ProjectMember.objects.filter(
workspace__slug=slug,
member_id__in=[member.get("member_id") for member in members],
)
.values("member_id", "sort_order")
.order_by("sort_order")
)
for member in members:
sort_order = [
project_member.get("sort_order")
for project_member in project_members
if str(project_member.get("member_id"))
== str(member.get("member_id"))
]
bulk_project_members.append(
ProjectMember(
member_id=member.get("member_id"),
role=member.get("role", 10),
project_id=project_id,
workspace_id=project.workspace_id,
sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
)
)
bulk_issue_props.append(
IssueProperty(
user_id=member.get("member_id"),
project_id=project_id,
workspace_id=project.workspace_id,
)
)
project_members = ProjectMember.objects.bulk_create(
bulk_project_members,
batch_size=10,
ignore_conflicts=True,
)
_ = IssueProperty.objects.bulk_create(
bulk_issue_props, batch_size=10, ignore_conflicts=True
)
serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class AddTeamToProjectEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
@ -933,21 +941,6 @@ class ProjectDeployBoardViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_200_OK)
class ProjectMemberEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get(self, request, slug, project_id):
project_members = ProjectMember.objects.filter(
project_id=project_id,
workspace__slug=slug,
member__is_bot=False,
).select_related("project", "member", "workspace")
serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
permission_classes = [
AllowAny,

View File

@ -47,36 +47,45 @@ class StateViewSet(BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def list(self, request, slug, project_id):
state_dict = dict()
states = StateSerializer(self.get_queryset(), many=True).data
grouped = request.GET.get("grouped", False)
if grouped == "true":
state_dict = {}
for key, value in groupby(
sorted(states, key=lambda state: state["group"]),
lambda state: state.get("group"),
):
state_dict[str(key)] = list(value)
return Response(state_dict, status=status.HTTP_200_OK)
return Response(states, status=status.HTTP_200_OK)
def mark_as_default(self, request, slug, project_id, pk):
# Select all the states which are marked as default
_ = State.objects.filter(
workspace__slug=slug, project_id=project_id, default=True
).update(default=False)
_ = State.objects.filter(
workspace__slug=slug, project_id=project_id, pk=pk
).update(default=True)
return Response(status=status.HTTP_204_NO_CONTENT)
def destroy(self, request, slug, project_id, pk):
state = State.objects.get(
~Q(name="Triage"),
pk=pk, project_id=project_id, workspace__slug=slug,
pk=pk,
project_id=project_id,
workspace__slug=slug,
)
if state.default:
return Response(
{"error": "Default state cannot be deleted"}, status=False
)
return Response({"error": "Default state cannot be deleted"}, status=False)
# Check for any issues in the state
issue_exist = Issue.issue_objects.filter(state=pk).exists()
if issue_exist:
return Response(
{
"error": "The state is not empty, only empty states can be deleted"
},
{"error": "The state is not empty, only empty states can be deleted"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -19,7 +19,6 @@ from plane.db.models import (
WorkspaceMemberInvite,
Issue,
IssueActivity,
WorkspaceMember,
)
from plane.utils.paginator import BasePaginator

View File

@ -6,12 +6,10 @@ from uuid import uuid4
# Django imports
from django.db import IntegrityError
from django.db.models import Prefetch
from django.conf import settings
from django.utils import timezone
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.sites.shortcuts import get_current_site
from django.db.models import (
Prefetch,
OuterRef,
@ -55,7 +53,6 @@ from . import BaseViewSet
from plane.db.models import (
User,
Workspace,
WorkspaceMember,
WorkspaceMemberInvite,
Team,
ProjectMember,
@ -472,7 +469,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
model = WorkspaceMember
permission_classes = [
WorkSpaceAdminPermission,
WorkspaceEntityPermission,
]
search_fields = [
@ -489,6 +486,25 @@ class WorkSpaceMemberViewSet(BaseViewSet):
.select_related("member")
)
def list(self, request, slug):
workspace_member = WorkspaceMember.objects.get(
member=request.user, workspace__slug=slug
)
workspace_members = WorkspaceMember.objects.filter(
workspace__slug=slug,
member__is_bot=False,
).select_related("workspace", "member")
if workspace_member.role > 10:
serializer = WorkspaceMemberAdminSerializer(workspace_members, many=True)
else:
serializer = WorkSpaceMemberSerializer(
workspace_members,
many=True,
)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, pk):
workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug)
if request.user.id == workspace_member.member_id:
@ -1252,20 +1268,6 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
return Response(labels, status=status.HTTP_200_OK)
class WorkspaceMembersEndpoint(BaseAPIView):
permission_classes = [
WorkspaceEntityPermission,
]
def get(self, request, slug):
workspace_members = WorkspaceMember.objects.filter(
workspace__slug=slug,
member__is_bot=False,
).select_related("workspace", "member")
serialzier = WorkSpaceMemberSerializer(workspace_members, many=True)
return Response(serialzier.data, status=status.HTTP_200_OK)
class LeaveWorkspaceEndpoint(BaseAPIView):
permission_classes = [
WorkspaceEntityPermission,

View File

@ -409,7 +409,6 @@ def analytic_export_task(email, data, slug):
distribution,
x_axis,
y_axis,
segment,
key,
assignee_details,
label_details,

View File

@ -26,7 +26,7 @@ def email_verification(first_name, email, token, current_site):
from_email_string = settings.EMAIL_FROM
subject = f"Verify your Email!"
subject = "Verify your Email!"
context = {
"first_name": first_name,

View File

@ -11,8 +11,6 @@ from django.conf import settings
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.db.models import User
@shared_task
@ -24,7 +22,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
from_email_string = settings.EMAIL_FROM
subject = f"Reset Your Password - Plane"
subject = "Reset Your Password - Plane"
context = {
"first_name": first_name,

View File

@ -6,6 +6,7 @@ import jwt
import logging
from datetime import datetime
# Django imports
from django.conf import settings
from django.core.serializers.json import DjangoJSONEncoder
@ -28,7 +29,6 @@ from plane.db.models import (
User,
IssueProperty,
)
from .workspace_invitation_task import workspace_invitation
from plane.bgtasks.user_welcome_task import send_welcome_slack
@ -59,7 +59,7 @@ def service_importer(service, importer_id):
ignore_conflicts=True,
)
[
_ = [
send_welcome_slack.delay(
str(user.id),
True,
@ -158,7 +158,7 @@ def service_importer(service, importer_id):
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
_ = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
@ -180,7 +180,7 @@ def service_importer(service, importer_id):
ImporterSerializer(importer).data,
cls=DjangoJSONEncoder,
)
res = requests.post(
_ = requests.post(
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
json=import_data_json,
headers=headers,

View File

@ -83,7 +83,7 @@ def track_description(
if (
last_activity is not None
and last_activity.field == "description"
and actor_id == last_activity.actor_id
and actor_id == str(last_activity.actor_id)
):
last_activity.created_at = timezone.now()
last_activity.save(update_fields=["created_at"])
@ -132,7 +132,7 @@ def track_parent(
else "",
field="parent",
project_id=project_id,
workspace=workspace_id,
workspace_id=workspace_id,
comment=f"updated the parent issue to",
old_identifier=old_parent.id if old_parent is not None else None,
new_identifier=new_parent.id if new_parent is not None else None,
@ -277,7 +277,7 @@ def track_labels(
issue_activities,
epoch,
):
requested_labels = set([str(lab) for lab in requested_data.get("labels_list", [])])
requested_labels = set([str(lab) for lab in requested_data.get("labels", [])])
current_labels = set([str(lab) for lab in current_instance.get("labels", [])])
added_labels = requested_labels - current_labels
@ -335,9 +335,7 @@ def track_assignees(
issue_activities,
epoch,
):
requested_assignees = set(
[str(asg) for asg in requested_data.get("assignees_list", [])]
)
requested_assignees = set([str(asg) for asg in requested_data.get("assignees", [])])
current_assignees = set([str(asg) for asg in current_instance.get("assignees", [])])
added_assignees = requested_assignees - current_assignees
@ -364,6 +362,7 @@ def track_assignees(
for dropped_assignee in dropped_assginees:
assignee = User.objects.get(pk=dropped_assignee)
issue_activities.append(
IssueActivity(
issue_id=issue_id,
actor_id=actor_id,
verb="updated",
@ -376,6 +375,7 @@ def track_assignees(
old_identifier=assignee.id,
epoch=epoch,
)
)
def track_estimate_points(
@ -419,13 +419,14 @@ def track_archive_at(
issue_activities,
epoch,
):
if current_instance.get("archived_at") != requested_data.get("archived_at"):
if requested_data.get("archived_at") is None:
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project_id=project_id,
workspace_id=workspace_id,
comment=f"has restored the issue",
comment="has restored the issue",
verb="updated",
actor_id=actor_id,
field="archived_at",
@ -440,7 +441,7 @@ def track_archive_at(
issue_id=issue_id,
project_id=project_id,
workspace_id=workspace_id,
comment=f"Plane has archived the issue",
comment="Plane has archived the issue",
verb="updated",
actor_id=actor_id,
field="archived_at",
@ -524,8 +525,8 @@ def update_issue_activity(
"description_html": track_description,
"target_date": track_target_date,
"start_date": track_start_date,
"labels_list": track_labels,
"assignees_list": track_assignees,
"labels": track_labels,
"assignees": track_assignees,
"estimate_point": track_estimate_points,
"archived_at": track_archive_at,
"closed_to": track_closed_to,
@ -537,7 +538,7 @@ def update_issue_activity(
)
for key in requested_data:
func = ISSUE_ACTIVITY_MAPPER.get(key, None)
func = ISSUE_ACTIVITY_MAPPER.get(key)
if func is not None:
func(
requested_data=requested_data,
@ -691,6 +692,10 @@ def create_cycle_issue_activity(
new_cycle = Cycle.objects.filter(
pk=updated_record.get("new_cycle_id", None)
).first()
issue = Issue.objects.filter(pk=updated_record.get("issue_id")).first()
if issue:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
@ -713,6 +718,10 @@ def create_cycle_issue_activity(
cycle = Cycle.objects.filter(
pk=created_record.get("fields").get("cycle")
).first()
issue = Issue.objects.filter(pk=created_record.get("fields").get("issue")).first()
if issue:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
@ -747,22 +756,27 @@ def delete_cycle_issue_activity(
)
cycle_id = requested_data.get("cycle_id", "")
cycle_name = requested_data.get("cycle_name", "")
cycle = Cycle.objects.filter(pk=cycle_id).first()
issues = requested_data.get("issues")
for issue in issues:
current_issue = Issue.objects.filter(pk=issue).first()
if issue:
current_issue.updated_at = timezone.now()
current_issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
issue_id=issue,
actor_id=actor_id,
verb="deleted",
old_value=cycle.name if cycle is not None else "",
old_value=cycle.name if cycle is not None else cycle_name,
new_value="",
field="cycles",
project_id=project_id,
workspace_id=workspace_id,
comment=f"removed this issue from {cycle.name if cycle is not None else None}",
old_identifier=cycle.id if cycle is not None else None,
comment=f"removed this issue from {cycle.name if cycle is not None else cycle_name}",
old_identifier=cycle_id if cycle_id is not None else None,
epoch=epoch,
)
)
@ -794,6 +808,10 @@ def create_module_issue_activity(
new_module = Module.objects.filter(
pk=updated_record.get("new_module_id", None)
).first()
issue = Issue.objects.filter(pk=updated_record.get("issue_id")).first()
if issue:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
@ -816,6 +834,10 @@ def create_module_issue_activity(
module = Module.objects.filter(
pk=created_record.get("fields").get("module")
).first()
issue = Issue.objects.filter(pk=created_record.get("fields").get("issue")).first()
if issue:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
issue_id=created_record.get("fields").get("issue"),
@ -849,22 +871,27 @@ def delete_module_issue_activity(
)
module_id = requested_data.get("module_id", "")
module_name = requested_data.get("module_name", "")
module = Module.objects.filter(pk=module_id).first()
issues = requested_data.get("issues")
for issue in issues:
current_issue = Issue.objects.filter(pk=issue).first()
if issue:
current_issue.updated_at = timezone.now()
current_issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
issue_id=issue,
actor_id=actor_id,
verb="deleted",
old_value=module.name if module is not None else "",
old_value=module.name if module is not None else module_name,
new_value="",
field="modules",
project_id=project_id,
workspace_id=workspace_id,
comment=f"removed this issue from ",
old_identifier=module.id if module is not None else None,
comment=f"removed this issue from {module.name if module is not None else module_name}",
old_identifier=module_id if module_id is not None else None,
epoch=epoch,
)
)
@ -1452,10 +1479,11 @@ def issue_activity(
issue_activities = []
project = Project.objects.get(pk=project_id)
issue = Issue.objects.filter(pk=issue_id).first()
workspace_id = project.workspace_id
if issue is not None:
if issue_id is not None:
issue = Issue.objects.filter(pk=issue_id).first()
if issue:
try:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
@ -1535,6 +1563,8 @@ def issue_activity(
IssueActivitySerializer(issue_activities_created, many=True).data,
cls=DjangoJSONEncoder,
),
requested_data=requested_data,
current_instance=current_instance,
)
return

View File

@ -60,7 +60,7 @@ def archive_old_issues():
# Check if Issues
if issues:
# Set the archive time to current time
archive_at = timezone.now()
archive_at = timezone.now().date()
issues_to_update = []
for issue in issues:
@ -72,14 +72,14 @@ def archive_old_issues():
Issue.objects.bulk_update(
issues_to_update, ["archived_at"], batch_size=100
)
[
_ = [
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"archived_at": str(archive_at)}),
actor_id=str(project.created_by_id),
issue_id=issue.id,
project_id=project_id,
current_instance=None,
current_instance=json.dumps({"archived_at": None}),
subscriber=False,
epoch=int(timezone.now().timestamp())
)

View File

@ -20,7 +20,7 @@ def magic_link(email, key, token, current_site):
from_email_string = settings.EMAIL_FROM
subject = f"Login for Plane"
subject = "Login for Plane"
context = {"magic_url": abs_url, "code": token}

View File

@ -1,20 +1,193 @@
# Python imports
import json
# Django imports
from django.utils import timezone
import uuid
# Module imports
from plane.db.models import IssueSubscriber, Project, IssueAssignee, Issue, Notification
from plane.db.models import (
IssueMention,
IssueSubscriber,
Project,
User,
IssueAssignee,
Issue,
Notification,
IssueComment,
IssueActivity
)
# Third Party imports
from celery import shared_task
from bs4 import BeautifulSoup
# =========== Issue Description Html Parsing and Notification Functions ======================
def update_mentions_for_issue(issue, project, new_mentions, removed_mention):
aggregated_issue_mentions = []
for mention_id in new_mentions:
aggregated_issue_mentions.append(
IssueMention(
mention_id=mention_id,
issue=issue,
project=project,
workspace_id=project.workspace_id
)
)
IssueMention.objects.bulk_create(
aggregated_issue_mentions, batch_size=100)
IssueMention.objects.filter(
issue=issue, mention__in=removed_mention).delete()
def get_new_mentions(requested_instance, current_instance):
# requested_data is the newer instance of the current issue
# current_instance is the older instance of the current issue, saved in the database
# extract mentions from both the instance of data
mentions_older = extract_mentions(current_instance)
mentions_newer = extract_mentions(requested_instance)
# Getting Set Difference from mentions_newer
new_mentions = [
mention for mention in mentions_newer if mention not in mentions_older]
return new_mentions
# Get Removed Mention
def get_removed_mentions(requested_instance, current_instance):
# requested_data is the newer instance of the current issue
# current_instance is the older instance of the current issue, saved in the database
# extract mentions from both the instance of data
mentions_older = extract_mentions(current_instance)
mentions_newer = extract_mentions(requested_instance)
# Getting Set Difference from mentions_newer
removed_mentions = [
mention for mention in mentions_older if mention not in mentions_newer]
return removed_mentions
# Adds mentions as subscribers
def extract_mentions_as_subscribers(project_id, issue_id, mentions):
# mentions is an array of User IDs representing the FILTERED set of mentioned users
bulk_mention_subscribers = []
for mention_id in mentions:
# If the particular mention has not already been subscribed to the issue, he must be sent the mentioned notification
if not IssueSubscriber.objects.filter(
issue_id=issue_id,
subscriber_id=mention_id,
project_id=project_id,
).exists() and not IssueAssignee.objects.filter(
project_id=project_id, issue_id=issue_id,
assignee_id=mention_id
).exists() and not Issue.objects.filter(
project_id=project_id, pk=issue_id, created_by_id=mention_id
).exists():
project = Project.objects.get(pk=project_id)
bulk_mention_subscribers.append(IssueSubscriber(
workspace_id=project.workspace_id,
project_id=project_id,
issue_id=issue_id,
subscriber_id=mention_id,
))
return bulk_mention_subscribers
# Parse Issue Description & extracts mentions
def extract_mentions(issue_instance):
try:
# issue_instance has to be a dictionary passed, containing the description_html and other set of activity data.
mentions = []
# Convert string to dictionary
data = json.loads(issue_instance)
html = data.get("description_html")
soup = BeautifulSoup(html, 'html.parser')
mention_tags = soup.find_all(
'mention-component', attrs={'target': 'users'})
mentions = [mention_tag['id'] for mention_tag in mention_tags]
return list(set(mentions))
except Exception as e:
return []
# =========== Comment Parsing and Notification Functions ======================
def extract_comment_mentions(comment_value):
try:
mentions = []
soup = BeautifulSoup(comment_value, 'html.parser')
mentions_tags = soup.find_all(
'mention-component', attrs={'target': 'users'}
)
for mention_tag in mentions_tags:
mentions.append(mention_tag['id'])
return list(set(mentions))
except Exception as e:
return []
def get_new_comment_mentions(new_value, old_value):
mentions_newer = extract_comment_mentions(new_value)
if old_value is None:
return mentions_newer
mentions_older = extract_comment_mentions(old_value)
# Getting Set Difference from mentions_newer
new_mentions = [
mention for mention in mentions_newer if mention not in mentions_older]
return new_mentions
def createMentionNotification(project, notification_comment, issue, actor_id, mention_id, issue_id, activity):
return Notification(
workspace=project.workspace,
sender="in_app:issue_activities:mentioned",
triggered_by_id=actor_id,
receiver_id=mention_id,
entity_identifier=issue_id,
entity_name="issue",
project=project,
message=notification_comment,
data={
"issue": {
"id": str(issue_id),
"name": str(issue.name),
"identifier": str(issue.project.identifier),
"sequence_id": issue.sequence_id,
"state_name": issue.state.name,
"state_group": issue.state.group,
},
"issue_activity": {
"id": str(activity.get("id")),
"verb": str(activity.get("verb")),
"field": str(activity.get("field")),
"actor": str(activity.get("actor_id")),
"new_value": str(activity.get("new_value")),
"old_value": str(activity.get("old_value")),
}
},
)
@shared_task
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created):
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance):
issue_activities_created = (
json.loads(issue_activities_created) if issue_activities_created is not None else None
json.loads(
issue_activities_created) if issue_activities_created is not None else None
)
if type not in [
"cycle.activity.created",
@ -33,39 +206,99 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
]:
# Create Notifications
bulk_notifications = []
issue_subscribers = list(
IssueSubscriber.objects.filter(project_id=project_id, issue_id=issue_id)
.exclude(subscriber_id=actor_id)
.values_list("subscriber", flat=True)
)
"""
Mention Tasks
1. Perform Diffing and Extract the mentions, that mention notification needs to be sent
2. From the latest set of mentions, extract the users which are not a subscribers & make them subscribers
"""
# Get new mentions from the newer instance
new_mentions = get_new_mentions(
requested_instance=requested_data, current_instance=current_instance)
removed_mention = get_removed_mentions(
requested_instance=requested_data, current_instance=current_instance)
comment_mentions = []
all_comment_mentions = []
# Get New Subscribers from the mentions of the newer instance
requested_mentions = extract_mentions(
issue_instance=requested_data)
mention_subscribers = extract_mentions_as_subscribers(
project_id=project_id, issue_id=issue_id, mentions=requested_mentions)
for issue_activity in issue_activities_created:
issue_comment = issue_activity.get("issue_comment")
issue_comment_new_value = issue_activity.get("new_value")
issue_comment_old_value = issue_activity.get("old_value")
if issue_comment is not None:
# TODO: Maybe save the comment mentions, so that in future, we can filter out the issues based on comment mentions as well.
all_comment_mentions = all_comment_mentions + extract_comment_mentions(issue_comment_new_value)
new_comment_mentions = get_new_comment_mentions(old_value=issue_comment_old_value, new_value=issue_comment_new_value)
comment_mentions = comment_mentions + new_comment_mentions
comment_mention_subscribers = extract_mentions_as_subscribers( project_id=project_id, issue_id=issue_id, mentions=all_comment_mentions)
"""
We will not send subscription activity notification to the below mentioned user sets
- Those who have been newly mentioned in the issue description, we will send mention notification to them.
- When the activity is a comment_created and there exist a mention in the comment, then we have to send the "mention_in_comment" notification
- When the activity is a comment_updated and there exist a mention change, then also we have to send the "mention_in_comment" notification
"""
issue_assignees = list(
IssueAssignee.objects.filter(project_id=project_id, issue_id=issue_id)
.exclude(assignee_id=actor_id)
IssueAssignee.objects.filter(
project_id=project_id, issue_id=issue_id)
.exclude(assignee_id__in=list(new_mentions + comment_mentions))
.values_list("assignee", flat=True)
)
issue_subscribers = issue_subscribers + issue_assignees
issue_subscribers = list(
IssueSubscriber.objects.filter(
project_id=project_id, issue_id=issue_id)
.exclude(subscriber_id__in=list(new_mentions + comment_mentions + [actor_id]))
.values_list("subscriber", flat=True)
)
issue = Issue.objects.filter(pk=issue_id).first()
if (issue.created_by_id is not None and str(issue.created_by_id) != str(actor_id)):
issue_subscribers = issue_subscribers + [issue.created_by_id]
if subscriber:
# add the user to issue subscriber
try:
if str(issue.created_by_id) != str(actor_id) and uuid.UUID(actor_id) not in issue_assignees:
_ = IssueSubscriber.objects.get_or_create(
issue_id=issue_id, subscriber_id=actor_id
project_id=project_id, issue_id=issue_id, subscriber_id=actor_id
)
except Exception as e:
pass
project = Project.objects.get(pk=project_id)
for subscriber in list(set(issue_subscribers)):
issue_subscribers = list(set(issue_subscribers + issue_assignees) - {uuid.UUID(actor_id)})
for subscriber in issue_subscribers:
if subscriber in issue_subscribers:
sender = "in_app:issue_activities:subscribed"
if issue.created_by_id is not None and subscriber == issue.created_by_id:
sender = "in_app:issue_activities:created"
if subscriber in issue_assignees:
sender = "in_app:issue_activities:assigned"
for issue_activity in issue_activities_created:
issue_comment = issue_activity.get("issue_comment")
if issue_comment is not None:
issue_comment = IssueComment.objects.get(
id=issue_comment, issue_id=issue_id, project_id=project_id, workspace_id=project.workspace_id)
bulk_notifications.append(
Notification(
workspace=project.workspace,
sender="in_app:issue_activities",
sender=sender,
triggered_by_id=actor_id,
receiver_id=subscriber,
entity_identifier=issue_id,
@ -89,7 +322,7 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
"new_value": str(issue_activity.get("new_value")),
"old_value": str(issue_activity.get("old_value")),
"issue_comment": str(
issue_activity.get("issue_comment").comment_stripped
issue_comment.comment_stripped
if issue_activity.get("issue_comment") is not None
else ""
),
@ -98,5 +331,88 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
)
)
# Add Mentioned as Issue Subscribers
IssueSubscriber.objects.bulk_create(
mention_subscribers + comment_mention_subscribers, batch_size=100)
last_activity = (
IssueActivity.objects.filter(issue_id=issue_id)
.order_by("-created_at")
.first()
)
actor = User.objects.get(pk=actor_id)
for mention_id in comment_mentions:
if (mention_id != actor_id):
for issue_activity in issue_activities_created:
notification = createMentionNotification(
project=project,
issue=issue,
notification_comment=f"{actor.display_name} has mentioned you in a comment in issue {issue.name}",
actor_id=actor_id,
mention_id=mention_id,
issue_id=issue_id,
activity=issue_activity
)
bulk_notifications.append(notification)
for mention_id in new_mentions:
if (mention_id != actor_id):
if (
last_activity is not None
and last_activity.field == "description"
and actor_id == str(last_activity.actor_id)
):
bulk_notifications.append(
Notification(
workspace=project.workspace,
sender="in_app:issue_activities:mentioned",
triggered_by_id=actor_id,
receiver_id=mention_id,
entity_identifier=issue_id,
entity_name="issue",
project=project,
message=f"You have been mentioned in the issue {issue.name}",
data={
"issue": {
"id": str(issue_id),
"name": str(issue.name),
"identifier": str(issue.project.identifier),
"sequence_id": issue.sequence_id,
"state_name": issue.state.name,
"state_group": issue.state.group,
},
"issue_activity": {
"id": str(last_activity.id),
"verb": str(last_activity.verb),
"field": str(last_activity.field),
"actor": str(last_activity.actor_id),
"new_value": str(last_activity.new_value),
"old_value": str(last_activity.old_value),
},
},
)
)
else:
for issue_activity in issue_activities_created:
notification = createMentionNotification(
project=project,
issue=issue,
notification_comment=f"You have been mentioned in the issue {issue.name}",
actor_id=actor_id,
mention_id=mention_id,
issue_id=issue_id,
activity=issue_activity
)
bulk_notifications.append(notification)
# save new mentions for the particular issue and remove the mentions that has been deleted from the description
update_mentions_for_issue(issue=issue, project=project, new_mentions=new_mentions,
removed_mention=removed_mention)
# Bulk create notifications
Notification.objects.bulk_create(bulk_notifications, batch_size=100)

View File

@ -14,7 +14,7 @@ from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
# Module imports
from plane.db.models import Workspace, User, WorkspaceMemberInvite
from plane.db.models import Workspace, WorkspaceMemberInvite
@shared_task

View File

@ -4,7 +4,7 @@ from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import plane.db.models.issue
import uuid
class Migration(migrations.Migration):
@ -13,6 +13,26 @@ class Migration(migrations.Migration):
]
operations = [
migrations.CreateModel(
name="issue_mentions",
fields=[
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
('id', models.UUIDField(db_index=True, default=uuid.uuid4,editable=False, primary_key=True, serialize=False, unique=True)),
('mention', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL,related_name='issuemention_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to='db.issue')),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_issuemention', to='db.project')),
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issuemention_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_issuemention', to='db.workspace')),
],
options={
'verbose_name': 'IssueMention',
'verbose_name_plural': 'IssueMentions',
'db_table': 'issue_mentions',
'ordering': ('-created_at',),
},
),
migrations.AlterField(
model_name='issueproperty',
name='properties',

View File

@ -27,12 +27,12 @@ from .issue import (
IssueActivity,
IssueProperty,
IssueComment,
IssueBlocker,
IssueLabel,
IssueAssignee,
Label,
IssueBlocker,
IssueRelation,
IssueMention,
IssueLink,
IssueSequence,
IssueAttachment,

View File

@ -6,7 +6,6 @@ from django.db import models
# Module imports
from plane.db.models import ProjectBaseModel
from plane.db.mixins import AuditModel
class GithubRepository(ProjectBaseModel):

View File

@ -228,6 +228,25 @@ class IssueRelation(ProjectBaseModel):
def __str__(self):
return f"{self.issue.name} {self.related_issue.name}"
class IssueMention(ProjectBaseModel):
issue = models.ForeignKey(
Issue, on_delete=models.CASCADE, related_name="issue_mention"
)
mention = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="issue_mention",
)
class Meta:
unique_together = ["issue", "mention"]
verbose_name = "Issue Mention"
verbose_name_plural = "Issue Mentions"
db_table = "issue_mentions"
ordering = ("-created_at",)
def __str__(self):
return f"{self.issue.name} {self.mention.email}"
class IssueAssignee(ProjectBaseModel):
issue = models.ForeignKey(

View File

@ -4,9 +4,6 @@ from uuid import uuid4
# Django imports
from django.db import models
from django.conf import settings
from django.template.defaultfilters import slugify
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.core.validators import MinValueValidator, MaxValueValidator
# Modeule imports

View File

@ -1,33 +0,0 @@
import jwt
import pytz
from django.conf import settings
from django.utils import timezone
from plane.db.models import User
class UserMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
try:
if request.headers.get("Authorization"):
authorization_header = request.headers.get("Authorization")
access_token = authorization_header.split(" ")[1]
decoded = jwt.decode(
access_token, settings.SECRET_KEY, algorithms=["HS256"]
)
id = decoded['user_id']
user = User.objects.get(id=id)
user.last_active = timezone.now()
user.token_updated_at = None
user.save()
timezone.activate(pytz.timezone(user.user_timezone))
except Exception as e:
print(e)
response = self.get_response(request)
return response

View File

@ -4,7 +4,6 @@ import ssl
import certifi
import dj_database_url
from urllib.parse import urlparse
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration

View File

@ -2,16 +2,13 @@
"""
# from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic import TemplateView
from django.conf import settings
# from django.conf.urls.static import static
urlpatterns = [
# path("admin/", admin.site.urls),
path("", TemplateView.as_view(template_name="index.html")),
path("api/", include("plane.api.urls")),
path("", include("plane.web.urls")),

View File

@ -12,19 +12,19 @@ from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Conc
from plane.db.models import Issue
def annotate_with_monthly_dimension(queryset, field_name):
def annotate_with_monthly_dimension(queryset, field_name, attribute):
# Get the year and the months
year = ExtractYear(field_name)
month = ExtractMonth(field_name)
# Concat the year and month
dimension = Concat(year, Value("-"), month, output_field=CharField())
# Annotate the dimension
return queryset.annotate(dimension=dimension)
return queryset.annotate(**{attribute: dimension})
def extract_axis(queryset, x_axis):
# Format the dimension when the axis is in date
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]:
queryset = annotate_with_monthly_dimension(queryset, x_axis)
queryset = annotate_with_monthly_dimension(queryset, x_axis, "dimension")
return queryset, "dimension"
else:
return queryset.annotate(dimension=F(x_axis)), "dimension"
@ -47,7 +47,7 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None):
#
if segment in ["created_at", "start_date", "target_date", "completed_at"]:
queryset = annotate_with_monthly_dimension(queryset, segment)
queryset = annotate_with_monthly_dimension(queryset, segment, "segmented")
segment = "segmented"
queryset = queryset.values(x_axis)
@ -81,7 +81,6 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
# Total Issues in Cycle or Module
total_issues = queryset.total_issues
if cycle_id:
# Get all dates between the two dates
date_range = [
@ -126,16 +125,13 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
.order_by("date")
)
for date in date_range:
cumulative_pending_issues = total_issues
total_completed = 0
total_completed = sum(
[
item["total_completed"]
for item in completed_issues_distribution
if item["date"] is not None and item["date"] <= date
]
)
cumulative_pending_issues -= total_completed
chart_data[str(date)] = cumulative_pending_issues

View File

@ -127,7 +127,7 @@ def group_results(results_data, group_by, sub_group_by=False):
return main_responsive_dict
else:
response_dict = dict()
response_dict = {}
if group_by == "priority":
response_dict = {

View File

@ -0,0 +1,20 @@
import os
import requests
def slack_oauth(code):
SLACK_OAUTH_URL = os.environ.get("SLACK_OAUTH_URL", False)
SLACK_CLIENT_ID = os.environ.get("SLACK_CLIENT_ID", False)
SLACK_CLIENT_SECRET = os.environ.get("SLACK_CLIENT_SECRET", False)
# Oauth Slack
if SLACK_OAUTH_URL and SLACK_CLIENT_ID and SLACK_CLIENT_SECRET:
response = requests.get(
SLACK_OAUTH_URL,
params={
"code": code,
"client_id": SLACK_CLIENT_ID,
"client_secret": SLACK_CLIENT_SECRET,
},
)
return response.json()
return {}

View File

@ -150,6 +150,17 @@ def filter_assignees(params, filter, method):
filter["assignees__in"] = params.get("assignees")
return filter
def filter_mentions(params, filter, method):
if method == "GET":
mentions = [item for item in params.get("mentions").split(",") if item != 'null']
mentions = filter_valid_uuids(mentions)
if len(mentions) and "" not in mentions:
filter["issue_mention__mention__id__in"] = mentions
else:
if params.get("mentions", None) and len(params.get("mentions")) and params.get("mentions") != 'null':
filter["issue_mention__mention__id__in"] = params.get("mentions")
return filter
def filter_created_by(params, filter, method):
if method == "GET":
@ -198,7 +209,7 @@ def filter_start_date(params, filter, method):
date_filter(filter=filter, date_term="start_date", queries=start_dates)
else:
if params.get("start_date", None) and len(params.get("start_date")):
date_filter(filter=filter, date_term="start_date", queries=params.get("start_date", []))
filter["start_date"] = params.get("start_date")
return filter
@ -209,7 +220,7 @@ def filter_target_date(params, filter, method):
date_filter(filter=filter, date_term="target_date", queries=target_dates)
else:
if params.get("target_date", None) and len(params.get("target_date")):
date_filter(filter=filter, date_term="target_date", queries=params.get("target_date", []))
filter["target_date"] = params.get("target_date")
return filter
@ -316,7 +327,7 @@ def filter_start_target_date_issues(params, filter, method):
def issue_filters(query_params, method):
filter = dict()
filter = {}
ISSUE_FILTER = {
"state": filter_state,
@ -326,6 +337,7 @@ def issue_filters(query_params, method):
"parent": filter_parent,
"labels": filter_labels,
"assignees": filter_assignees,
"mentions": filter_mentions,
"created_by": filter_created_by,
"name": filter_name,
"created_at": filter_created_at,

View File

@ -21,12 +21,7 @@ class Cursor:
)
def __repr__(self):
return "<{}: value={} offset={} is_prev={}>".format(
type(self).__name__,
self.value,
self.offset,
int(self.is_prev),
)
return f"{type(self).__name__,}: value={self.value} offset={self.offset}, is_prev={int(self.is_prev)}"
def __bool__(self):
return bool(self.has_results)
@ -176,10 +171,6 @@ class BasePaginator:
**paginator_kwargs,
):
"""Paginate the request"""
assert (paginator and not paginator_kwargs) or (
paginator_cls and paginator_kwargs
)
per_page = self.get_per_page(request, default_per_page, max_per_page)
# Convert the cursor value to integer and float from string

View File

@ -35,3 +35,4 @@ psycopg-c==3.1.10
scout-apm==2.26.1
openpyxl==3.1.2
python-json-logger==2.0.7
beautifulsoup4==4.12.2

View File

@ -10,6 +10,8 @@ x-app-env : &app-env
- SENTRY_DSN=${SENTRY_DSN:-""}
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
- DOCKERIZED=${DOCKERIZED:-1}
# Gunicorn Workers
- GUNICORN_WORKERS=${GUNICORN_WORKERS:-2}
#DB SETTINGS
- PGHOST=${PGHOST:-plane-db}
- PGDATABASE=${PGDATABASE:-plane}

View File

@ -61,3 +61,5 @@ MINIO_ROOT_PASSWORD="secret-key"
BUCKET_NAME=uploads
FILE_SIZE_LIMIT=5242880
# Gunicorn Workers
GUNICORN_WORKERS=2

View File

@ -27,7 +27,7 @@
"prettier": "latest",
"prettier-plugin-tailwindcss": "^0.5.4",
"tailwindcss": "^3.3.3",
"turbo": "^1.10.14"
"turbo": "^1.10.16"
},
"resolutions": {
"@types/react": "18.2.0"

View File

@ -20,7 +20,7 @@ This allows for extensive customization and flexibility in the Editors created u
1. useEditor - A hook that you can use to extend the Plane editor.
| Prop | Type | Description |
| --- | --- | --- |
| ------------------------- | ---------------------------------------------------------------- | ----------------------------------------------------------------------------------------------------------------------------------------------------------------------- |
| `extensions` | `Extension[]` | An array of custom extensions you want to add into the editor to extend it's core features |
| `editorProps` | `EditorProps` | Extend the editor props by passing in a custom props object |
| `uploadFile` | `(file: File) => Promise<string>` | A function that handles file upload. It takes a file as input and handles the process of uploading that file. |
@ -35,7 +35,7 @@ This allows for extensive customization and flexibility in the Editors created u
2. useReadOnlyEditor - A hook that can be used to extend a Read Only instance of the core editor.
| Prop | Type | Description |
| --- | --- | --- |
| -------------- | ------------- | ------------------------------------------------------------------------------------------ |
| `value` | `string` | The initial content of the editor. |
| `forwardedRef` | `any` | Pass this in whenever you want to control the editor's state from an external component |
| `extensions` | `Extension[]` | An array of custom extensions you want to add into the editor to extend it's core features |
@ -51,7 +51,11 @@ This allows for extensive customization and flexibility in the Editors created u
5. Extending with Custom Styles
```ts
const customEditorClassNames = getEditorClassNames({ noBorder, borderOnFocus, customClassName });
const customEditorClassNames = getEditorClassNames({
noBorder,
borderOnFocus,
customClassName,
});
```
## Core features

View File

@ -2,6 +2,7 @@
"name": "@plane/editor-core",
"version": "0.0.1",
"description": "Core Editor that powers Plane",
"private": true,
"main": "./dist/index.mjs",
"module": "./dist/index.mjs",
"types": "./dist/index.d.mts",
@ -21,18 +22,18 @@
"check-types": "tsc --noEmit"
},
"peerDependencies": {
"react": "^18.2.0",
"react-dom": "18.2.0",
"next": "12.3.2",
"next-themes": "^0.2.1"
"next-themes": "^0.2.1",
"react": "^18.2.0",
"react-dom": "18.2.0"
},
"dependencies": {
"react-moveable" : "^0.54.2",
"@blueprintjs/popover2": "^2.0.10",
"@tiptap/core": "^2.1.7",
"@tiptap/extension-color": "^2.1.11",
"@tiptap/extension-image": "^2.1.7",
"@tiptap/extension-link": "^2.1.7",
"@tiptap/extension-mention": "^2.1.12",
"@tiptap/extension-table": "^2.1.6",
"@tiptap/extension-table-cell": "^2.1.6",
"@tiptap/extension-table-header": "^2.1.6",
@ -41,12 +42,15 @@
"@tiptap/extension-task-list": "^2.1.7",
"@tiptap/extension-text-style": "^2.1.11",
"@tiptap/extension-underline": "^2.1.7",
"@tiptap/prosemirror-tables": "^1.1.4",
"jsx-dom-cjs": "^8.0.3",
"@tiptap/pm": "^2.1.7",
"@tiptap/react": "^2.1.7",
"@tiptap/starter-kit": "^2.1.10",
"@tiptap/suggestion": "^2.0.4",
"@types/node": "18.15.3",
"@types/react": "^18.2.5",
"@types/react-dom": "18.0.11",
"@types/node": "18.15.3",
"class-variance-authority": "^0.7.0",
"clsx": "^1.2.1",
"eslint": "8.36.0",
@ -54,6 +58,7 @@
"eventsource-parser": "^0.1.0",
"lucide-react": "^0.244.0",
"react-markdown": "^8.0.7",
"react-moveable": "^0.54.2",
"tailwind-merge": "^1.14.0",
"tippy.js": "^6.3.7",
"tiptap-markdown": "^0.8.2",

View File

@ -2,8 +2,11 @@
// import "./styles/tailwind.css";
// import "./styles/editor.css";
export { isCellSelection } from "./ui/extensions/table/table/utilities/is-cell-selection";
// utils
export * from "./lib/utils";
export * from "./ui/extensions/table/table";
export { startImageUpload } from "./ui/plugins/upload-image";
// components

View File

@ -3,18 +3,36 @@ import { UploadImage } from "../types/upload-image";
import { startImageUpload } from "../ui/plugins/upload-image";
export const toggleHeadingOne = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 1 }).run();
else editor.chain().focus().toggleHeading({ level: 1 }).run()
if (range)
editor
.chain()
.focus()
.deleteRange(range)
.setNode("heading", { level: 1 })
.run();
else editor.chain().focus().toggleHeading({ level: 1 }).run();
};
export const toggleHeadingTwo = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 2 }).run();
else editor.chain().focus().toggleHeading({ level: 2 }).run()
if (range)
editor
.chain()
.focus()
.deleteRange(range)
.setNode("heading", { level: 2 })
.run();
else editor.chain().focus().toggleHeading({ level: 2 }).run();
};
export const toggleHeadingThree = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 3 }).run();
else editor.chain().focus().toggleHeading({ level: 3 }).run()
if (range)
editor
.chain()
.focus()
.deleteRange(range)
.setNode("heading", { level: 3 })
.run();
else editor.chain().focus().toggleHeading({ level: 3 }).run();
};
export const toggleBold = (editor: Editor, range?: Range) => {
@ -37,7 +55,8 @@ export const toggleCode = (editor: Editor, range?: Range) => {
else editor.chain().focus().toggleCode().run();
};
export const toggleOrderedList = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).toggleOrderedList().run();
if (range)
editor.chain().focus().deleteRange(range).toggleOrderedList().run();
else editor.chain().focus().toggleOrderedList().run();
};
@ -48,7 +67,7 @@ export const toggleBulletList = (editor: Editor, range?: Range) => {
export const toggleTaskList = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).toggleTaskList().run();
else editor.chain().focus().toggleTaskList().run()
else editor.chain().focus().toggleTaskList().run();
};
export const toggleStrike = (editor: Editor, range?: Range) => {
@ -57,13 +76,37 @@ export const toggleStrike = (editor: Editor, range?: Range) => {
};
export const toggleBlockquote = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).toggleNode("paragraph", "paragraph").toggleBlockquote().run();
else editor.chain().focus().toggleNode("paragraph", "paragraph").toggleBlockquote().run();
if (range)
editor
.chain()
.focus()
.deleteRange(range)
.toggleNode("paragraph", "paragraph")
.toggleBlockquote()
.run();
else
editor
.chain()
.focus()
.toggleNode("paragraph", "paragraph")
.toggleBlockquote()
.run();
};
export const insertTableCommand = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run();
else editor.chain().focus().insertTable({ rows: 3, cols: 3, withHeaderRow: true }).run();
if (range)
editor
.chain()
.focus()
.deleteRange(range)
.insertTable({ rows: 3, cols: 3, withHeaderRow: true })
.run();
else
editor
.chain()
.focus()
.insertTable({ rows: 3, cols: 3, withHeaderRow: true })
.run();
};
export const unsetLinkEditor = (editor: Editor) => {
@ -74,7 +117,14 @@ export const setLinkEditor = (editor: Editor, url: string) => {
editor.chain().focus().setLink({ href: url }).run();
};
export const insertImageCommand = (editor: Editor, uploadFile: UploadImage, setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void, range?: Range) => {
export const insertImageCommand = (
editor: Editor,
uploadFile: UploadImage,
setIsSubmitting?: (
isSubmitting: "submitting" | "submitted" | "saved",
) => void,
range?: Range,
) => {
if (range) editor.chain().focus().deleteRange(range).run();
const input = document.createElement("input");
input.type = "file";
@ -88,4 +138,3 @@ export const insertImageCommand = (editor: Editor, uploadFile: UploadImage, setI
};
input.click();
};

View File

@ -6,19 +6,24 @@ interface EditorClassNames {
customClassName?: string;
}
export const getEditorClassNames = ({ noBorder, borderOnFocus, customClassName }: EditorClassNames) => cn(
'relative w-full max-w-full sm:rounded-lg mt-2 p-3 relative focus:outline-none rounded-md',
noBorder ? '' : 'border border-custom-border-200',
borderOnFocus ? 'focus:border border-custom-border-300' : 'focus:border-0',
customClassName
);
export const getEditorClassNames = ({
noBorder,
borderOnFocus,
customClassName,
}: EditorClassNames) =>
cn(
"relative w-full max-w-full sm:rounded-lg mt-2 p-3 relative focus:outline-none rounded-md",
noBorder ? "" : "border border-custom-border-200",
borderOnFocus ? "focus:border border-custom-border-300" : "focus:border-0",
customClassName,
);
export function cn(...inputs: ClassValue[]) {
return twMerge(clsx(inputs));
}
export const findTableAncestor = (
node: Node | null
node: Node | null,
): HTMLTableElement | null => {
while (node !== null && node.nodeName !== "TABLE") {
node = node.parentNode;
@ -27,10 +32,10 @@ export const findTableAncestor = (
};
export const getTrimmedHTML = (html: string) => {
html = html.replace(/^(<p><\/p>)+/, '');
html = html.replace(/(<p><\/p>)+$/, '');
html = html.replace(/^(<p><\/p>)+/, "");
html = html.replace(/(<p><\/p>)+$/, "");
return html;
}
};
export const isValidHttpUrl = (string: string): boolean => {
let url: URL;
@ -42,4 +47,4 @@ export const isValidHttpUrl = (string: string): boolean => {
}
return url.protocol === "http:" || url.protocol === "https:";
}
};

View File

@ -0,0 +1,10 @@
export type IMentionSuggestion = {
id: string;
type: string;
avatar: string;
title: string;
subtitle: string;
redirect_uri: string;
};
export type IMentionHighlight = string;

View File

@ -7,7 +7,11 @@ interface EditorContainerProps {
children: ReactNode;
}
export const EditorContainer = ({ editor, editorClassNames, children }: EditorContainerProps) => (
export const EditorContainer = ({
editor,
editorClassNames,
children,
}: EditorContainerProps) => (
<div
id="editor-container"
onClick={() => {

View File

@ -1,7 +1,6 @@
import { Editor, EditorContent } from "@tiptap/react";
import { ReactNode } from "react";
import { ImageResizer } from "../extensions/image/image-resize";
import { TableMenu } from "../menus/table-menu";
interface EditorContentProps {
editor: Editor | null;
@ -9,12 +8,16 @@ interface EditorContentProps {
children?: ReactNode;
}
export const EditorContentWrapper = ({ editor, editorContentCustomClassNames = '', children }: EditorContentProps) => (
<div className={`${editorContentCustomClassNames}`}>
{/* @ts-ignore */}
export const EditorContentWrapper = ({
editor,
editorContentCustomClassNames = "",
children,
}: EditorContentProps) => (
<div className={`contentEditor ${editorContentCustomClassNames}`}>
<EditorContent editor={editor} />
{editor?.isEditable && <TableMenu editor={editor} />}
{(editor?.isActive("image") && editor?.isEditable) && <ImageResizer editor={editor} />}
{editor?.isActive("image") && editor?.isEditable && (
<ImageResizer editor={editor} />
)}
{children}
</div>
);

View File

@ -3,7 +3,9 @@ import Moveable from "react-moveable";
export const ImageResizer = ({ editor }: { editor: Editor }) => {
const updateMediaSize = () => {
const imageInfo = document.querySelector(".ProseMirror-selectednode") as HTMLImageElement;
const imageInfo = document.querySelector(
".ProseMirror-selectednode",
) as HTMLImageElement;
if (imageInfo) {
const selection = editor.state.selection;
editor.commands.setImage({
@ -23,8 +25,8 @@ export const ImageResizer = ({ editor }: { editor: Editor }) => {
origin={false}
edge={false}
throttleDrag={0}
keepRatio={true}
resizable={true}
keepRatio
resizable
throttleResize={0}
onResize={({ target, width, height, delta }: any) => {
delta[0] && (target!.style.width = `${width}px`);
@ -33,7 +35,7 @@ export const ImageResizer = ({ editor }: { editor: Editor }) => {
onResizeEnd={() => {
updateMediaSize();
}}
scalable={true}
scalable
renderDirections={["w", "e"]}
onScale={({ target, transform }: any) => {
target!.style.transform = transform;

View File

@ -3,9 +3,16 @@ import TrackImageDeletionPlugin from "../../plugins/delete-image";
import UploadImagesPlugin from "../../plugins/upload-image";
import { DeleteImage } from "../../../types/delete-image";
const ImageExtension = (deleteImage: DeleteImage) => Image.extend({
const ImageExtension = (
deleteImage: DeleteImage,
cancelUploadImage?: () => any,
) =>
Image.extend({
addProseMirrorPlugins() {
return [UploadImagesPlugin(), TrackImageDeletionPlugin(deleteImage)];
return [
UploadImagesPlugin(cancelUploadImage),
TrackImageDeletionPlugin(deleteImage),
];
},
addAttributes() {
return {
@ -18,6 +25,6 @@ const ImageExtension = (deleteImage: DeleteImage) => Image.extend({
},
};
},
});
});
export default ImageExtension;

View File

@ -8,19 +8,25 @@ import TaskList from "@tiptap/extension-task-list";
import { Markdown } from "tiptap-markdown";
import Gapcursor from "@tiptap/extension-gapcursor";
import { CustomTableCell } from "./table/table-cell";
import { Table } from "./table";
import { TableHeader } from "./table/table-header";
import { TableRow } from "@tiptap/extension-table-row";
import TableHeader from "./table/table-header/table-header";
import Table from "./table/table";
import TableCell from "./table/table-cell/table-cell";
import TableRow from "./table/table-row/table-row";
import ImageExtension from "./image";
import { DeleteImage } from "../../types/delete-image";
import { isValidHttpUrl } from "../../lib/utils";
import { IMentionSuggestion } from "../../types/mention-suggestion";
import { Mentions } from "../mentions";
export const CoreEditorExtensions = (
mentionConfig: {
mentionSuggestions: IMentionSuggestion[];
mentionHighlights: string[];
},
deleteFile: DeleteImage,
cancelUploadImage?: () => any,
) => [
StarterKit.configure({
bulletList: {
@ -67,7 +73,7 @@ export const CoreEditorExtensions = (
"text-custom-primary-300 underline underline-offset-[3px] hover:text-custom-primary-500 transition-colors cursor-pointer",
},
}),
ImageExtension(deleteFile).configure({
ImageExtension(deleteFile, cancelUploadImage).configure({
HTMLAttributes: {
class: "rounded-lg border border-custom-border-300",
},
@ -92,6 +98,11 @@ export const CoreEditorExtensions = (
}),
Table,
TableHeader,
CustomTableCell,
TableCell,
TableRow,
];
Mentions(
mentionConfig.mentionSuggestions,
mentionConfig.mentionHighlights,
false,
),
];

View File

@ -1,9 +0,0 @@
import { Table as BaseTable } from "@tiptap/extension-table";
const Table = BaseTable.configure({
resizable: true,
cellMinWidth: 100,
allowTableNodeSelection: true,
});
export { Table };

View File

@ -1,32 +0,0 @@
import { TableCell } from "@tiptap/extension-table-cell";
export const CustomTableCell = TableCell.extend({
addAttributes() {
return {
...this.parent?.(),
isHeader: {
default: false,
parseHTML: (element) => {
isHeader: element.tagName === "TD";
},
renderHTML: (attributes) => {
tag: attributes.isHeader ? "th" : "td";
},
},
};
},
renderHTML({ HTMLAttributes }) {
if (HTMLAttributes.isHeader) {
return [
"th",
{
...HTMLAttributes,
class: `relative ${HTMLAttributes.class}`,
},
["span", { class: "absolute top-0 right-0" }],
0,
];
}
return ["td", HTMLAttributes, 0];
},
});

View File

@ -0,0 +1 @@
export { default as default } from "./table-cell";

View File

@ -0,0 +1,58 @@
import { mergeAttributes, Node } from "@tiptap/core";
export interface TableCellOptions {
HTMLAttributes: Record<string, any>;
}
export default Node.create<TableCellOptions>({
name: "tableCell",
addOptions() {
return {
HTMLAttributes: {},
};
},
content: "paragraph+",
addAttributes() {
return {
colspan: {
default: 1,
},
rowspan: {
default: 1,
},
colwidth: {
default: null,
parseHTML: (element) => {
const colwidth = element.getAttribute("colwidth");
const value = colwidth ? [parseInt(colwidth, 10)] : null;
return value;
},
},
background: {
default: "none",
},
};
},
tableRole: "cell",
isolating: true,
parseHTML() {
return [{ tag: "td" }];
},
renderHTML({ node, HTMLAttributes }) {
return [
"td",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, {
style: `background-color: ${node.attrs.background}`,
}),
0,
];
},
});

View File

@ -1,7 +0,0 @@
import { TableHeader as BaseTableHeader } from "@tiptap/extension-table-header";
const TableHeader = BaseTableHeader.extend({
content: "paragraph",
});
export { TableHeader };

View File

@ -0,0 +1 @@
export { default as default } from "./table-header";

View File

@ -0,0 +1,57 @@
import { mergeAttributes, Node } from "@tiptap/core";
export interface TableHeaderOptions {
HTMLAttributes: Record<string, any>;
}
export default Node.create<TableHeaderOptions>({
name: "tableHeader",
addOptions() {
return {
HTMLAttributes: {},
};
},
content: "paragraph+",
addAttributes() {
return {
colspan: {
default: 1,
},
rowspan: {
default: 1,
},
colwidth: {
default: null,
parseHTML: (element) => {
const colwidth = element.getAttribute("colwidth");
const value = colwidth ? [parseInt(colwidth, 10)] : null;
return value;
},
},
background: {
default: "rgb(var(--color-primary-100))",
},
};
},
tableRole: "header_cell",
isolating: true,
parseHTML() {
return [{ tag: "th" }];
},
renderHTML({ node, HTMLAttributes }) {
return [
"th",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, {
style: `background-color: ${node.attrs.background}`,
}),
0,
];
},
});

View File

@ -0,0 +1 @@
export { default as default } from "./table-row";

View File

@ -0,0 +1,31 @@
import { mergeAttributes, Node } from "@tiptap/core";
export interface TableRowOptions {
HTMLAttributes: Record<string, any>;
}
export default Node.create<TableRowOptions>({
name: "tableRow",
addOptions() {
return {
HTMLAttributes: {},
};
},
content: "(tableCell | tableHeader)*",
tableRole: "row",
parseHTML() {
return [{ tag: "tr" }];
},
renderHTML({ HTMLAttributes }) {
return [
"tr",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes),
0,
];
},
});

View File

@ -0,0 +1,55 @@
const icons = {
colorPicker: `<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" style="transform: ;msFilter:;"><path fill="rgb(var(--color-text-300))" d="M20 14c-.092.064-2 2.083-2 3.5 0 1.494.949 2.448 2 2.5.906.044 2-.891 2-2.5 0-1.5-1.908-3.436-2-3.5zM9.586 20c.378.378.88.586 1.414.586s1.036-.208 1.414-.586l7-7-.707-.707L11 4.586 8.707 2.293 7.293 3.707 9.586 6 4 11.586c-.378.378-.586.88-.586 1.414s.208 1.036.586 1.414L9.586 20zM11 7.414 16.586 13H5.414L11 7.414z"></path></svg>`,
deleteColumn: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="24" height="24"><path fill="#e53e3e" d="M0 0H24V24H0z"/><path d="M12 3c.552 0 1 .448 1 1v8c.835-.628 1.874-1 3-1 2.761 0 5 2.239 5 5s-2.239 5-5 5c-1.032 0-1.99-.313-2.787-.848L13 20c0 .552-.448 1-1 1H6c-.552 0-1-.448-1-1V4c0-.552.448-1 1-1h6zm-1 2H7v14h4V5zm8 10h-6v2h6v-2z"/></svg>`,
deleteRow: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="24" height="24"><path fill="#e53e3e" d="M0 0H24V24H0z"/><path d="M20 5c.552 0 1 .448 1 1v6c0 .552-.448 1-1 1 .628.835 1 1.874 1 3 0 2.761-2.239 5-5 5s-5-2.239-5-5c0-1.126.372-2.165 1-3H4c-.552 0-1-.448-1-1V6c0-.552.448-1 1-1h16zm-7 10v2h6v-2h-6zm6-8H5v4h14V7z"/></svg>`,
insertLeftTableIcon: `<svg
xmlns="http://www.w3.org/2000/svg"
width={24}
height={24}
viewBox="0 -960 960 960"
>
<path
d="M224.617-140.001q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21H360q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H224.617Zm375.383 0q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21h135.383q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H600Zm147.691-607.69q0-4.616-3.846-8.463-3.846-3.846-8.462-3.846H600q-4.616 0-8.462 3.846-3.847 3.847-3.847 8.463v535.382q0 4.616 3.847 8.463Q595.384-200 600-200h135.383q4.616 0 8.462-3.846 3.846-3.847 3.846-8.463v-535.382ZM587.691-200h160-160Z"
fill="rgb(var(--color-text-300))"
/>
</svg>
`,
insertRightTableIcon: `<svg
xmlns="http://www.w3.org/2000/svg"
width={24}
height={24}
viewBox="0 -960 960 960"
>
<path
d="M600-140.001q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21h135.383q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H600Zm-375.383 0q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21H360q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H224.617Zm-12.308-607.69v535.382q0 4.616 3.846 8.463 3.846 3.846 8.462 3.846H360q4.616 0 8.462-3.846 3.847-3.847 3.847-8.463v-535.382q0-4.616-3.847-8.463Q364.616-760 360-760H224.617q-4.616 0-8.462 3.846-3.846 3.847-3.846 8.463Zm160 547.691h-160 160Z"
fill="rgb(var(--color-text-300))"
/>
</svg>
`,
insertTopTableIcon: `<svg
xmlns="http://www.w3.org/2000/svg"
width={24}
height={24}
viewBox="0 -960 960 960"
>
<path
d="M212.309-527.693q-30.308 0-51.308-21t-21-51.307v-135.383q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307V-600q0 30.307-21 51.307-21 21-51.308 21H212.309Zm0 375.383q-30.308 0-51.308-21t-21-51.307V-360q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307v135.383q0 30.307-21 51.307-21 21-51.308 21H212.309Zm0-59.999h535.382q4.616 0 8.463-3.846 3.846-3.846 3.846-8.462V-360q0-4.616-3.846-8.462-3.847-3.847-8.463-3.847H212.309q-4.616 0-8.463 3.847Q200-364.616 200-360v135.383q0 4.616 3.846 8.462 3.847 3.846 8.463 3.846Zm-12.309-160v160-160Z"
fill="rgb(var(--color-text-300))"
/>
</svg>
`,
insertBottomTableIcon: `<svg
xmlns="http://www.w3.org/2000/svg"
width={24}
height={24}
viewBox="0 -960 960 960"
>
<path
d="M212.309-152.31q-30.308 0-51.308-21t-21-51.307V-360q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307v135.383q0 30.307-21 51.307-21 21-51.308 21H212.309Zm0-375.383q-30.308 0-51.308-21t-21-51.307v-135.383q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307V-600q0 30.307-21 51.307-21 21-51.308 21H212.309Zm535.382-219.998H212.309q-4.616 0-8.463 3.846-3.846 3.846-3.846 8.462V-600q0 4.616 3.846 8.462 3.847 3.847 8.463 3.847h535.382q4.616 0 8.463-3.847Q760-595.384 760-600v-135.383q0-4.616-3.846-8.462-3.847-3.846-8.463-3.846ZM200-587.691v-160 160Z"
fill="rgb(var(--color-text-300))"
/>
</svg>
`,
};
export default icons;

View File

@ -0,0 +1 @@
export { default as default } from "./table";

View File

@ -0,0 +1,122 @@
import { Plugin, PluginKey, TextSelection } from "@tiptap/pm/state";
import { findParentNode } from "@tiptap/core";
import { DecorationSet, Decoration } from "@tiptap/pm/view";
const key = new PluginKey("tableControls");
export function tableControls() {
return new Plugin({
key,
state: {
init() {
return new TableControlsState();
},
apply(tr, prev) {
return prev.apply(tr);
},
},
props: {
handleDOMEvents: {
mousemove: (view, event) => {
const pluginState = key.getState(view.state);
if (
!(event.target as HTMLElement).closest(".tableWrapper") &&
pluginState.values.hoveredTable
) {
return view.dispatch(
view.state.tr.setMeta(key, {
setHoveredTable: null,
setHoveredCell: null,
}),
);
}
const pos = view.posAtCoords({
left: event.clientX,
top: event.clientY,
});
if (!pos) return;
const table = findParentNode((node) => node.type.name === "table")(
TextSelection.create(view.state.doc, pos.pos),
);
const cell = findParentNode(
(node) =>
node.type.name === "tableCell" ||
node.type.name === "tableHeader",
)(TextSelection.create(view.state.doc, pos.pos));
if (!table || !cell) return;
if (pluginState.values.hoveredCell?.pos !== cell.pos) {
return view.dispatch(
view.state.tr.setMeta(key, {
setHoveredTable: table,
setHoveredCell: cell,
}),
);
}
},
},
decorations: (state) => {
const pluginState = key.getState(state);
if (!pluginState) {
return null;
}
const { hoveredTable, hoveredCell } = pluginState.values;
const docSize = state.doc.content.size;
if (
hoveredTable &&
hoveredCell &&
hoveredTable.pos < docSize &&
hoveredCell.pos < docSize
) {
const decorations = [
Decoration.node(
hoveredTable.pos,
hoveredTable.pos + hoveredTable.node.nodeSize,
{},
{
hoveredTable,
hoveredCell,
},
),
];
return DecorationSet.create(state.doc, decorations);
}
return null;
},
},
});
}
class TableControlsState {
values;
constructor(props = {}) {
this.values = {
hoveredTable: null,
hoveredCell: null,
...props,
};
}
apply(tr: any) {
const actions = tr.getMeta(key);
if (actions?.setHoveredTable !== undefined) {
this.values.hoveredTable = actions.setHoveredTable;
}
if (actions?.setHoveredCell !== undefined) {
this.values.hoveredCell = actions.setHoveredCell;
}
return this;
}
}

View File

@ -0,0 +1,536 @@
import { h } from "jsx-dom-cjs";
import { Node as ProseMirrorNode } from "@tiptap/pm/model";
import { Decoration, NodeView } from "@tiptap/pm/view";
import tippy, { Instance, Props } from "tippy.js";
import { Editor } from "@tiptap/core";
import {
CellSelection,
TableMap,
updateColumnsOnResize,
} from "@tiptap/prosemirror-tables";
import icons from "./icons";
export function updateColumns(
node: ProseMirrorNode,
colgroup: HTMLElement,
table: HTMLElement,
cellMinWidth: number,
overrideCol?: number,
overrideValue?: any,
) {
let totalWidth = 0;
let fixedWidth = true;
let nextDOM = colgroup.firstChild as HTMLElement;
const row = node.firstChild;
if (!row) return;
for (let i = 0, col = 0; i < row.childCount; i += 1) {
const { colspan, colwidth } = row.child(i).attrs;
for (let j = 0; j < colspan; j += 1, col += 1) {
const hasWidth =
overrideCol === col ? overrideValue : colwidth && colwidth[j];
const cssWidth = hasWidth ? `${hasWidth}px` : "";
totalWidth += hasWidth || cellMinWidth;
if (!hasWidth) {
fixedWidth = false;
}
if (!nextDOM) {
colgroup.appendChild(document.createElement("col")).style.width =
cssWidth;
} else {
if (nextDOM.style.width !== cssWidth) {
nextDOM.style.width = cssWidth;
}
nextDOM = nextDOM.nextSibling as HTMLElement;
}
}
}
while (nextDOM) {
const after = nextDOM.nextSibling;
nextDOM.parentNode?.removeChild(nextDOM);
nextDOM = after as HTMLElement;
}
if (fixedWidth) {
table.style.width = `${totalWidth}px`;
table.style.minWidth = "";
} else {
table.style.width = "";
table.style.minWidth = `${totalWidth}px`;
}
}
const defaultTippyOptions: Partial<Props> = {
allowHTML: true,
arrow: false,
trigger: "click",
animation: "scale-subtle",
theme: "light-border no-padding",
interactive: true,
hideOnClick: true,
placement: "right",
};
function setCellsBackgroundColor(editor: Editor, backgroundColor) {
return editor
.chain()
.focus()
.updateAttributes("tableCell", {
background: backgroundColor,
})
.updateAttributes("tableHeader", {
background: backgroundColor,
})
.run();
}
const columnsToolboxItems = [
{
label: "Add Column Before",
icon: icons.insertLeftTableIcon,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().addColumnBefore().run(),
},
{
label: "Add Column After",
icon: icons.insertRightTableIcon,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().addColumnAfter().run(),
},
{
label: "Pick Column Color",
icon: icons.colorPicker,
action: ({
editor,
triggerButton,
controlsContainer,
}: {
editor: Editor;
triggerButton: HTMLElement;
controlsContainer;
}) => {
createColorPickerToolbox({
triggerButton,
tippyOptions: {
appendTo: controlsContainer,
},
onSelectColor: (color) => setCellsBackgroundColor(editor, color),
});
},
},
{
label: "Delete Column",
icon: icons.deleteColumn,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().deleteColumn().run(),
},
];
const rowsToolboxItems = [
{
label: "Add Row Above",
icon: icons.insertTopTableIcon,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().addRowBefore().run(),
},
{
label: "Add Row Below",
icon: icons.insertBottomTableIcon,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().addRowAfter().run(),
},
{
label: "Pick Row Color",
icon: icons.colorPicker,
action: ({
editor,
triggerButton,
controlsContainer,
}: {
editor: Editor;
triggerButton: HTMLButtonElement;
controlsContainer:
| Element
| "parent"
| ((ref: Element) => Element)
| undefined;
}) => {
createColorPickerToolbox({
triggerButton,
tippyOptions: {
appendTo: controlsContainer,
},
onSelectColor: (color) => setCellsBackgroundColor(editor, color),
});
},
},
{
label: "Delete Row",
icon: icons.deleteRow,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().deleteRow().run(),
},
];
function createToolbox({
triggerButton,
items,
tippyOptions,
onClickItem,
}: {
triggerButton: HTMLElement;
items: { icon: string; label: string }[];
tippyOptions: any;
onClickItem: any;
}): Instance<Props> {
const toolbox = tippy(triggerButton, {
content: h(
"div",
{ className: "tableToolbox" },
items.map((item) =>
h(
"div",
{
className: "toolboxItem",
itemType: "button",
onClick() {
onClickItem(item);
},
},
[
h("div", {
className: "iconContainer",
innerHTML: item.icon,
}),
h("div", { className: "label" }, item.label),
],
),
),
),
...tippyOptions,
});
return Array.isArray(toolbox) ? toolbox[0] : toolbox;
}
function createColorPickerToolbox({
triggerButton,
tippyOptions,
onSelectColor = () => {},
}: {
triggerButton: HTMLElement;
tippyOptions: Partial<Props>;
onSelectColor?: (color: string) => void;
}) {
const items = {
Default: "rgb(var(--color-primary-100))",
Orange: "#FFE5D1",
Grey: "#F1F1F1",
Yellow: "#FEF3C7",
Green: "#DCFCE7",
Red: "#FFDDDD",
Blue: "#D9E4FF",
Pink: "#FFE8FA",
Purple: "#E8DAFB",
};
const colorPicker = tippy(triggerButton, {
...defaultTippyOptions,
content: h(
"div",
{ className: "tableColorPickerToolbox" },
Object.entries(items).map(([key, value]) =>
h(
"div",
{
className: "toolboxItem",
itemType: "button",
onClick: () => {
onSelectColor(value);
colorPicker.hide();
},
},
[
h("div", {
className: "colorContainer",
style: {
backgroundColor: value,
},
}),
h(
"div",
{
className: "label",
},
key,
),
],
),
),
),
onHidden: (instance) => {
instance.destroy();
},
showOnCreate: true,
...tippyOptions,
});
return colorPicker;
}
export class TableView implements NodeView {
node: ProseMirrorNode;
cellMinWidth: number;
decorations: Decoration[];
editor: Editor;
getPos: () => number;
hoveredCell;
map: TableMap;
root: HTMLElement;
table: HTMLElement;
colgroup: HTMLElement;
tbody: HTMLElement;
rowsControl?: HTMLElement;
columnsControl?: HTMLElement;
columnsToolbox?: Instance<Props>;
rowsToolbox?: Instance<Props>;
controls?: HTMLElement;
get dom() {
return this.root;
}
get contentDOM() {
return this.tbody;
}
constructor(
node: ProseMirrorNode,
cellMinWidth: number,
decorations: Decoration[],
editor: Editor,
getPos: () => number,
) {
this.node = node;
this.cellMinWidth = cellMinWidth;
this.decorations = decorations;
this.editor = editor;
this.getPos = getPos;
this.hoveredCell = null;
this.map = TableMap.get(node);
if (editor.isEditable) {
this.rowsControl = h(
"div",
{ className: "rowsControl" },
h("div", {
itemType: "button",
className: "rowsControlDiv",
onClick: () => this.selectRow(),
}),
);
this.columnsControl = h(
"div",
{ className: "columnsControl" },
h("div", {
itemType: "button",
className: "columnsControlDiv",
onClick: () => this.selectColumn(),
}),
);
this.controls = h(
"div",
{ className: "tableControls", contentEditable: "false" },
this.rowsControl,
this.columnsControl,
);
this.columnsToolbox = createToolbox({
triggerButton: this.columnsControl.querySelector(".columnsControlDiv"),
items: columnsToolboxItems,
tippyOptions: {
...defaultTippyOptions,
appendTo: this.controls,
},
onClickItem: (item) => {
item.action({
editor: this.editor,
triggerButton: this.columnsControl?.firstElementChild,
controlsContainer: this.controls,
});
this.columnsToolbox?.hide();
},
});
this.rowsToolbox = createToolbox({
triggerButton: this.rowsControl.firstElementChild,
items: rowsToolboxItems,
tippyOptions: {
...defaultTippyOptions,
appendTo: this.controls,
},
onClickItem: (item) => {
item.action({
editor: this.editor,
triggerButton: this.rowsControl?.firstElementChild,
controlsContainer: this.controls,
});
this.rowsToolbox?.hide();
},
});
}
// Table
this.colgroup = h(
"colgroup",
null,
Array.from({ length: this.map.width }, () => 1).map(() => h("col")),
);
this.tbody = h("tbody");
this.table = h("table", null, this.colgroup, this.tbody);
this.root = h(
"div",
{
className: "tableWrapper controls--disabled",
},
this.controls,
this.table,
);
this.render();
}
update(node: ProseMirrorNode, decorations) {
if (node.type !== this.node.type) {
return false;
}
this.node = node;
this.decorations = decorations;
this.map = TableMap.get(this.node);
if (this.editor.isEditable) {
this.updateControls();
}
this.render();
return true;
}
render() {
if (this.colgroup.children.length !== this.map.width) {
const cols = Array.from({ length: this.map.width }, () => 1).map(() =>
h("col"),
);
this.colgroup.replaceChildren(...cols);
}
updateColumnsOnResize(
this.node,
this.colgroup,
this.table,
this.cellMinWidth,
);
}
ignoreMutation() {
return true;
}
updateControls() {
const { hoveredTable: table, hoveredCell: cell } = Object.values(
this.decorations,
).reduce(
(acc, curr) => {
if (curr.spec.hoveredCell !== undefined) {
acc["hoveredCell"] = curr.spec.hoveredCell;
}
if (curr.spec.hoveredTable !== undefined) {
acc["hoveredTable"] = curr.spec.hoveredTable;
}
return acc;
},
{} as Record<string, HTMLElement>,
) as any;
if (table === undefined || cell === undefined) {
return this.root.classList.add("controls--disabled");
}
this.root.classList.remove("controls--disabled");
this.hoveredCell = cell;
const cellDom = this.editor.view.nodeDOM(cell.pos) as HTMLElement;
const tableRect = this.table.getBoundingClientRect();
const cellRect = cellDom.getBoundingClientRect();
this.columnsControl.style.left = `${
cellRect.left - tableRect.left - this.table.parentElement!.scrollLeft
}px`;
this.columnsControl.style.width = `${cellRect.width}px`;
this.rowsControl.style.top = `${cellRect.top - tableRect.top}px`;
this.rowsControl.style.height = `${cellRect.height}px`;
}
selectColumn() {
if (!this.hoveredCell) return;
const colIndex = this.map.colCount(
this.hoveredCell.pos - (this.getPos() + 1),
);
const anchorCellPos = this.hoveredCell.pos;
const headCellPos =
this.map.map[colIndex + this.map.width * (this.map.height - 1)] +
(this.getPos() + 1);
const cellSelection = CellSelection.create(
this.editor.view.state.doc,
anchorCellPos,
headCellPos,
);
this.editor.view.dispatch(
// @ts-ignore
this.editor.state.tr.setSelection(cellSelection),
);
}
selectRow() {
if (!this.hoveredCell) return;
const anchorCellPos = this.hoveredCell.pos;
const anchorCellIndex = this.map.map.indexOf(
anchorCellPos - (this.getPos() + 1),
);
const headCellPos =
this.map.map[anchorCellIndex + (this.map.width - 1)] +
(this.getPos() + 1);
const cellSelection = CellSelection.create(
this.editor.state.doc,
anchorCellPos,
headCellPos,
);
this.editor.view.dispatch(
// @ts-ignore
this.editor.view.state.tr.setSelection(cellSelection),
);
}
}

View File

@ -0,0 +1,312 @@
import { TextSelection } from "@tiptap/pm/state";
import {
callOrReturn,
getExtensionField,
mergeAttributes,
Node,
ParentConfig,
} from "@tiptap/core";
import {
addColumnAfter,
addColumnBefore,
addRowAfter,
addRowBefore,
CellSelection,
columnResizing,
deleteColumn,
deleteRow,
deleteTable,
fixTables,
goToNextCell,
mergeCells,
setCellAttr,
splitCell,
tableEditing,
toggleHeader,
toggleHeaderCell,
} from "@tiptap/prosemirror-tables";
import { tableControls } from "./table-controls";
import { TableView } from "./table-view";
import { createTable } from "./utilities/create-table";
import { deleteTableWhenAllCellsSelected } from "./utilities/delete-table-when-all-cells-selected";
export interface TableOptions {
HTMLAttributes: Record<string, any>;
resizable: boolean;
handleWidth: number;
cellMinWidth: number;
lastColumnResizable: boolean;
allowTableNodeSelection: boolean;
}
declare module "@tiptap/core" {
interface Commands<ReturnType> {
table: {
insertTable: (options?: {
rows?: number;
cols?: number;
withHeaderRow?: boolean;
}) => ReturnType;
addColumnBefore: () => ReturnType;
addColumnAfter: () => ReturnType;
deleteColumn: () => ReturnType;
addRowBefore: () => ReturnType;
addRowAfter: () => ReturnType;
deleteRow: () => ReturnType;
deleteTable: () => ReturnType;
mergeCells: () => ReturnType;
splitCell: () => ReturnType;
toggleHeaderColumn: () => ReturnType;
toggleHeaderRow: () => ReturnType;
toggleHeaderCell: () => ReturnType;
mergeOrSplit: () => ReturnType;
setCellAttribute: (name: string, value: any) => ReturnType;
goToNextCell: () => ReturnType;
goToPreviousCell: () => ReturnType;
fixTables: () => ReturnType;
setCellSelection: (position: {
anchorCell: number;
headCell?: number;
}) => ReturnType;
};
}
interface NodeConfig<Options, Storage> {
tableRole?:
| string
| ((this: {
name: string;
options: Options;
storage: Storage;
parent: ParentConfig<NodeConfig<Options>>["tableRole"];
}) => string);
}
}
export default Node.create({
name: "table",
addOptions() {
return {
HTMLAttributes: {},
resizable: true,
handleWidth: 5,
cellMinWidth: 100,
lastColumnResizable: true,
allowTableNodeSelection: true,
};
},
content: "tableRow+",
tableRole: "table",
isolating: true,
group: "block",
allowGapCursor: false,
parseHTML() {
return [{ tag: "table" }];
},
renderHTML({ HTMLAttributes }) {
return [
"table",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes),
["tbody", 0],
];
},
addCommands() {
return {
insertTable:
({ rows = 3, cols = 3, withHeaderRow = true } = {}) =>
({ tr, dispatch, editor }) => {
const node = createTable(editor.schema, rows, cols, withHeaderRow);
if (dispatch) {
const offset = tr.selection.anchor + 1;
tr.replaceSelectionWith(node)
.scrollIntoView()
.setSelection(TextSelection.near(tr.doc.resolve(offset)));
}
return true;
},
addColumnBefore:
() =>
({ state, dispatch }) =>
addColumnBefore(state, dispatch),
addColumnAfter:
() =>
({ state, dispatch }) =>
addColumnAfter(state, dispatch),
deleteColumn:
() =>
({ state, dispatch }) =>
deleteColumn(state, dispatch),
addRowBefore:
() =>
({ state, dispatch }) =>
addRowBefore(state, dispatch),
addRowAfter:
() =>
({ state, dispatch }) =>
addRowAfter(state, dispatch),
deleteRow:
() =>
({ state, dispatch }) =>
deleteRow(state, dispatch),
deleteTable:
() =>
({ state, dispatch }) =>
deleteTable(state, dispatch),
mergeCells:
() =>
({ state, dispatch }) =>
mergeCells(state, dispatch),
splitCell:
() =>
({ state, dispatch }) =>
splitCell(state, dispatch),
toggleHeaderColumn:
() =>
({ state, dispatch }) =>
toggleHeader("column")(state, dispatch),
toggleHeaderRow:
() =>
({ state, dispatch }) =>
toggleHeader("row")(state, dispatch),
toggleHeaderCell:
() =>
({ state, dispatch }) =>
toggleHeaderCell(state, dispatch),
mergeOrSplit:
() =>
({ state, dispatch }) => {
if (mergeCells(state, dispatch)) {
return true;
}
return splitCell(state, dispatch);
},
setCellAttribute:
(name, value) =>
({ state, dispatch }) =>
setCellAttr(name, value)(state, dispatch),
goToNextCell:
() =>
({ state, dispatch }) =>
goToNextCell(1)(state, dispatch),
goToPreviousCell:
() =>
({ state, dispatch }) =>
goToNextCell(-1)(state, dispatch),
fixTables:
() =>
({ state, dispatch }) => {
if (dispatch) {
fixTables(state);
}
return true;
},
setCellSelection:
(position) =>
({ tr, dispatch }) => {
if (dispatch) {
const selection = CellSelection.create(
tr.doc,
position.anchorCell,
position.headCell,
);
// @ts-ignore
tr.setSelection(selection);
}
return true;
},
};
},
addKeyboardShortcuts() {
return {
Tab: () => {
if (this.editor.commands.goToNextCell()) {
return true;
}
if (!this.editor.can().addRowAfter()) {
return false;
}
return this.editor.chain().addRowAfter().goToNextCell().run();
},
"Shift-Tab": () => this.editor.commands.goToPreviousCell(),
Backspace: deleteTableWhenAllCellsSelected,
"Mod-Backspace": deleteTableWhenAllCellsSelected,
Delete: deleteTableWhenAllCellsSelected,
"Mod-Delete": deleteTableWhenAllCellsSelected,
};
},
addNodeView() {
return ({ editor, getPos, node, decorations }) => {
const { cellMinWidth } = this.options;
return new TableView(
node,
cellMinWidth,
decorations,
editor,
getPos as () => number,
);
};
},
addProseMirrorPlugins() {
const isResizable = this.options.resizable && this.editor.isEditable;
const plugins = [
tableEditing({
allowTableNodeSelection: this.options.allowTableNodeSelection,
}),
tableControls(),
];
if (isResizable) {
plugins.unshift(
columnResizing({
handleWidth: this.options.handleWidth,
cellMinWidth: this.options.cellMinWidth,
// View: TableView,
// @ts-ignore
lastColumnResizable: this.options.lastColumnResizable,
}),
);
}
return plugins;
},
extendNodeSchema(extension) {
const context = {
name: extension.name,
options: extension.options,
storage: extension.storage,
};
return {
tableRole: callOrReturn(
getExtensionField(extension, "tableRole", context),
),
};
},
});

View File

@ -0,0 +1,12 @@
import { Fragment, Node as ProsemirrorNode, NodeType } from "prosemirror-model";
export function createCell(
cellType: NodeType,
cellContent?: Fragment | ProsemirrorNode | Array<ProsemirrorNode>,
): ProsemirrorNode | null | undefined {
if (cellContent) {
return cellType.createChecked(null, cellContent);
}
return cellType.createAndFill();
}

Some files were not shown because too many files have changed in this diff Show More