Merge branch 'feat/self_hosted_instance' of github.com:makeplane/plane into feat/self_hosted_instance

This commit is contained in:
pablohashescobar 2023-11-08 11:14:26 +00:00
commit 39f69b7ed2
731 changed files with 19309 additions and 24630 deletions

17
.deepsource.toml Normal file
View File

@ -0,0 +1,17 @@
version = 1
[[analyzers]]
name = "shell"
[[analyzers]]
name = "javascript"
[analyzers.meta]
plugins = ["react"]
environment = ["nodejs"]
[[analyzers]]
name = "python"
[analyzers.meta]
runtime_version = "3.x.x"

213
.github/workflows/build-branch.yml vendored Normal file
View File

@ -0,0 +1,213 @@
name: Branch Build
on:
pull_request:
types:
- closed
branches:
- master
- release
- qa
- develop
env:
TARGET_BRANCH: ${{ github.event.pull_request.base.ref }}
jobs:
branch_build_and_push:
if: ${{ (github.event_name == 'pull_request' && github.event.action =='closed' && github.event.pull_request.merged == true) }}
name: Build-Push Web/Space/API/Proxy Docker Image
runs-on: ubuntu-20.04
steps:
- name: Check out the repo
uses: actions/checkout@v3.3.0
# - name: Set Target Branch Name on PR close
# if: ${{ github.event_name == 'pull_request' && github.event.action =='closed' }}
# run: echo "TARGET_BRANCH=${{ github.event.pull_request.base.ref }}" >> $GITHUB_ENV
# - name: Set Target Branch Name on other than PR close
# if: ${{ github.event_name == 'push' }}
# run: echo "TARGET_BRANCH=${{ github.ref_name }}" >> $GITHUB_ENV
- uses: ASzc/change-string-case-action@v2
id: gh_branch_upper_lower
with:
string: ${{env.TARGET_BRANCH}}
- uses: mad9000/actions-find-and-replace-string@2
id: gh_branch_replace_slash
with:
source: ${{ steps.gh_branch_upper_lower.outputs.lowercase }}
find: '/'
replace: '-'
- uses: mad9000/actions-find-and-replace-string@2
id: gh_branch_replace_dot
with:
source: ${{ steps.gh_branch_replace_slash.outputs.value }}
find: '.'
replace: ''
- uses: mad9000/actions-find-and-replace-string@2
id: gh_branch_clean
with:
source: ${{ steps.gh_branch_replace_dot.outputs.value }}
find: '_'
replace: ''
- name: Uploading Proxy Source
uses: actions/upload-artifact@v3
with:
name: proxy-src-code
path: ./nginx
- name: Uploading Backend Source
uses: actions/upload-artifact@v3
with:
name: backend-src-code
path: ./apiserver
- name: Uploading Web Source
uses: actions/upload-artifact@v3
with:
name: web-src-code
path: |
./
!./apiserver
!./nginx
!./deploy
!./space
- name: Uploading Space Source
uses: actions/upload-artifact@v3
with:
name: space-src-code
path: |
./
!./apiserver
!./nginx
!./deploy
!./web
outputs:
gh_branch_name: ${{ steps.gh_branch_clean.outputs.value }}
branch_build_push_frontend:
runs-on: ubuntu-20.04
needs: [ branch_build_and_push ]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Downloading Web Source Code
uses: actions/download-artifact@v3
with:
name: web-src-code
- name: Build and Push Frontend to Docker Container Registry
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./web/Dockerfile.web
platforms: linux/amd64
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-frontend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_space:
runs-on: ubuntu-20.04
needs: [ branch_build_and_push ]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Downloading Space Source Code
uses: actions/download-artifact@v3
with:
name: space-src-code
- name: Build and Push Space to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./space/Dockerfile.space
platforms: linux/amd64
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-space-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_backend:
runs-on: ubuntu-20.04
needs: [ branch_build_and_push ]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Downloading Backend Source Code
uses: actions/download-artifact@v3
with:
name: backend-src-code
- name: Build and Push Backend to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./Dockerfile.api
platforms: linux/amd64
push: true
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-backend-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}
branch_build_push_proxy:
runs-on: ubuntu-20.04
needs: [ branch_build_and_push ]
steps:
- name: Set up Docker Buildx
uses: docker/setup-buildx-action@v2.5.0
- name: Login to Docker Hub
uses: docker/login-action@v2.1.0
with:
username: ${{ secrets.DOCKERHUB_USERNAME }}
password: ${{ secrets.DOCKERHUB_TOKEN }}
- name: Downloading Proxy Source Code
uses: actions/download-artifact@v3
with:
name: proxy-src-code
- name: Build and Push Plane-Proxy to Docker Hub
uses: docker/build-push-action@v4.0.0
with:
context: .
file: ./Dockerfile
platforms: linux/amd64
tags: ${{ secrets.DOCKERHUB_USERNAME }}/plane-proxy-private:${{ needs.branch_build_and_push.outputs.gh_branch_name }}
push: true
env:
DOCKER_BUILDKIT: 1
DOCKER_USERNAME: ${{ secrets.DOCKERHUB_USERNAME }}
DOCKER_PASSWORD: ${{ secrets.DOCKERHUB_TOKEN }}

2
.gitignore vendored
View File

@ -75,7 +75,7 @@ pnpm-lock.yaml
pnpm-workspace.yaml
.npmrc
.secrets
tmp/
## packages
dist

View File

@ -60,7 +60,7 @@ representative at an online or offline event.
Instances of abusive, harassing, or otherwise unacceptable behavior may be
reported to the community leaders responsible for enforcement at
hello@plane.so.
squawk@plane.so.
All complaints will be reviewed and investigated promptly and fairly.
All community leaders are obligated to respect the privacy and security of the

View File

@ -7,7 +7,7 @@
</p>
<h3 align="center"><b>Plane</b></h3>
<p align="center"><b>Open-source, self-hosted project planning tool</b></p>
<p align="center"><b>Flexible, extensible open-source project management</b></p>
<p align="center">
<a href="https://discord.com/invite/A92xrEGCge">

View File

@ -67,3 +67,6 @@ ENABLE_MAGIC_LINK_LOGIN="0"
# Email redirections and minio domain settings
WEB_URL="http://localhost"
# Gunicorn Workers
GUNICORN_WORKERS=2

View File

@ -5,4 +5,4 @@ python manage.py migrate
python bin/instance_configuration.py
exec gunicorn -w 8 -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -
exec gunicorn -w $GUNICORN_WORKERS -k uvicorn.workers.UvicornWorker plane.asgi:application --bind 0.0.0.0:8000 --max-requests 1200 --max-requests-jitter 1000 --access-logfile -

View File

@ -3,4 +3,4 @@ from psycogreen.gevent import patch_psycopg
def post_fork(server, worker):
patch_psycopg()
worker.log.info("Made Psycopg2 Green")
worker.log.info("Made Psycopg2 Green")

View File

@ -101,4 +101,4 @@ class ProjectLitePermission(BasePermission):
workspace__slug=view.workspace_slug,
member=request.user,
project_id=view.project_id,
).exists()
).exists()

View File

@ -17,7 +17,7 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
return AnalyticView.objects.create(**validated_data)
def update(self, instance, validated_data):
@ -25,6 +25,6 @@ class AnalyticViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@ -1,6 +1,3 @@
# Django imports
from django.db.models.functions import TruncDate
# Third party imports
from rest_framework import serializers

View File

@ -6,7 +6,6 @@ from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer
from .project import ProjectLiteSerializer
from .state import StateLiteSerializer
from .project import ProjectLiteSerializer
from .user import UserLiteSerializer
from plane.db.models import Inbox, InboxIssue, Issue

View File

@ -5,4 +5,4 @@ from .github import (
GithubIssueSyncSerializer,
GithubCommentSyncSerializer,
)
from .slack import SlackProjectSyncSerializer
from .slack import SlackProjectSyncSerializer

View File

@ -8,8 +8,7 @@ from rest_framework import serializers
from .base import BaseSerializer
from .user import UserLiteSerializer
from .state import StateSerializer, StateLiteSerializer
from .user import UserLiteSerializer
from .project import ProjectSerializer, ProjectLiteSerializer
from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from plane.db.models import (
User,
@ -232,25 +231,6 @@ class IssueActivitySerializer(BaseSerializer):
fields = "__all__"
class IssueCommentSerializer(BaseSerializer):
actor_detail = UserLiteSerializer(read_only=True, source="actor")
issue_detail = IssueFlatSerializer(read_only=True, source="issue")
project_detail = ProjectLiteSerializer(read_only=True, source="project")
workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace")
class Meta:
model = IssueComment
fields = "__all__"
read_only_fields = [
"workspace",
"project",
"issue",
"created_by",
"updated_by",
"created_at",
"updated_at",
]
class IssuePropertySerializer(BaseSerializer):
class Meta:
@ -287,7 +267,6 @@ class LabelLiteSerializer(BaseSerializer):
class IssueLabelSerializer(BaseSerializer):
# label_details = LabelSerializer(read_only=True, source="label")
class Meta:
model = IssueLabel

View File

@ -4,9 +4,8 @@ from rest_framework import serializers
# Module imports
from .base import BaseSerializer
from .user import UserLiteSerializer
from .project import ProjectSerializer, ProjectLiteSerializer
from .project import ProjectLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .issue import IssueStateSerializer
from plane.db.models import (
User,
@ -19,7 +18,7 @@ from plane.db.models import (
class ModuleWriteSerializer(BaseSerializer):
members_list = serializers.ListField(
members = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=User.objects.all()),
write_only=True,
required=False,
@ -39,6 +38,11 @@ class ModuleWriteSerializer(BaseSerializer):
"created_at",
"updated_at",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data['members'] = [str(member.id) for member in instance.members.all()]
return data
def validate(self, data):
if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None):
@ -46,7 +50,7 @@ class ModuleWriteSerializer(BaseSerializer):
return data
def create(self, validated_data):
members = validated_data.pop("members_list", None)
members = validated_data.pop("members", None)
project = self.context["project"]
@ -72,7 +76,7 @@ class ModuleWriteSerializer(BaseSerializer):
return module
def update(self, instance, validated_data):
members = validated_data.pop("members_list", None)
members = validated_data.pop("members", None)
if members is not None:
ModuleMember.objects.filter(module=instance).delete()

View File

@ -33,7 +33,7 @@ class PageBlockLiteSerializer(BaseSerializer):
class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(read_only=True, source="labels", many=True)
labels_list = serializers.ListField(
labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True,
required=False,
@ -50,9 +50,13 @@ class PageSerializer(BaseSerializer):
"project",
"owned_by",
]
def to_representation(self, instance):
data = super().to_representation(instance)
data['labels'] = [str(label.id) for label in instance.labels.all()]
return data
def create(self, validated_data):
labels = validated_data.pop("labels_list", None)
labels = validated_data.pop("labels", None)
project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"]
page = Page.objects.create(
@ -77,7 +81,7 @@ class PageSerializer(BaseSerializer):
return page
def update(self, instance, validated_data):
labels = validated_data.pop("labels_list", None)
labels = validated_data.pop("labels", None)
if labels is not None:
PageLabel.objects.filter(page=instance).delete()
PageLabel.objects.bulk_create(

View File

@ -79,14 +79,14 @@ class UserMeSettingsSerializer(BaseSerializer):
email=obj.email
).count()
if obj.last_workspace_id is not None:
workspace = Workspace.objects.get(
workspace = Workspace.objects.filter(
pk=obj.last_workspace_id, workspace_member__member=obj.id
)
).first()
return {
"last_workspace_id": obj.last_workspace_id,
"last_workspace_slug": workspace.slug,
"last_workspace_slug": workspace.slug if workspace is not None else "",
"fallback_workspace_id": obj.last_workspace_id,
"fallback_workspace_slug": workspace.slug,
"fallback_workspace_slug": workspace.slug if workspace is not None else "",
"invites": workspace_invites,
}
else:

View File

@ -57,7 +57,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
return IssueView.objects.create(**validated_data)
def update(self, instance, validated_data):
@ -65,7 +65,7 @@ class IssueViewSerializer(BaseSerializer):
if bool(query_params):
validated_data["query"] = issue_filters(query_params, "POST")
else:
validated_data["query"] = dict()
validated_data["query"] = {}
validated_data["query"] = issue_filters(query_params, "PATCH")
return super().update(instance, validated_data)

View File

@ -110,9 +110,8 @@ class TeamSerializer(BaseSerializer):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return team
else:
team = Team.objects.create(**validated_data)
return team
team = Team.objects.create(**validated_data)
return team
def update(self, instance, validated_data):
if "members" in validated_data:
@ -124,8 +123,7 @@ class TeamSerializer(BaseSerializer):
]
TeamMember.objects.bulk_create(team_members, batch_size=10)
return super().update(instance, validated_data)
else:
return super().update(instance, validated_data)
return super().update(instance, validated_data)
class WorkspaceThemeSerializer(BaseSerializer):

View File

@ -1,7 +1,7 @@
from .analytic import urlpatterns as analytic_urls
from .asset import urlpatterns as asset_urls
from .authentication import urlpatterns as authentication_urls
from .configuration import urlpatterns as configuration_urls
from .config import urlpatterns as configuration_urls
from .cycle import urlpatterns as cycle_urls
from .estimate import urlpatterns as estimate_urls
from .gpt import urlpatterns as gpt_urls

View File

@ -4,17 +4,15 @@ from plane.api.views import (
ProjectViewSet,
InviteProjectEndpoint,
ProjectMemberViewSet,
ProjectMemberEndpoint,
ProjectMemberInvitationsViewset,
ProjectMemberUserEndpoint,
AddMemberToProjectEndpoint,
ProjectJoinEndpoint,
AddTeamToProjectEndpoint,
ProjectUserViewsEndpoint,
ProjectIdentifierEndpoint,
ProjectFavoritesViewSet,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint
ProjectPublicCoverImagesEndpoint,
)
@ -53,7 +51,7 @@ urlpatterns = [
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/",
ProjectMemberViewSet.as_view({"get": "list"}),
ProjectMemberViewSet.as_view({"get": "list", "post": "create"}),
name="project-member",
),
path(
@ -67,16 +65,6 @@ urlpatterns = [
),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/project-members/",
ProjectMemberEndpoint.as_view(),
name="project-member",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/members/add/",
AddMemberToProjectEndpoint.as_view(),
name="project",
),
path(
"workspaces/<str:slug>/projects/join/",
ProjectJoinEndpoint.as_view(),

View File

@ -5,7 +5,6 @@ from plane.api.views import (
WorkSpaceViewSet,
InviteWorkspaceEndpoint,
WorkSpaceMemberViewSet,
WorkspaceMembersEndpoint,
WorkspaceInvitationsViewset,
WorkspaceMemberUserEndpoint,
WorkspaceMemberUserViewsEndpoint,
@ -86,11 +85,6 @@ urlpatterns = [
),
name="workspace-member",
),
path(
"workspaces/<str:slug>/workspace-members/",
WorkspaceMembersEndpoint.as_view(),
name="workspace-members",
),
path(
"workspaces/<str:slug>/teams/",
TeamMemberViewSet.as_view(

View File

@ -28,7 +28,6 @@ from plane.api.views import (
## End User
# Workspaces
WorkSpaceViewSet,
UserWorkspaceInvitationsEndpoint,
UserWorkSpacesEndpoint,
InviteWorkspaceEndpoint,
JoinWorkspaceEndpoint,

View File

@ -7,14 +7,12 @@ from .project import (
ProjectMemberInvitationsViewset,
ProjectMemberInviteDetailViewSet,
ProjectIdentifierEndpoint,
AddMemberToProjectEndpoint,
ProjectJoinEndpoint,
ProjectUserViewsEndpoint,
ProjectMemberUserEndpoint,
ProjectFavoritesViewSet,
ProjectDeployBoardViewSet,
ProjectDeployBoardPublicSettingsEndpoint,
ProjectMemberEndpoint,
WorkspaceProjectDeployBoardEndpoint,
LeaveProjectEndpoint,
ProjectPublicCoverImagesEndpoint,
@ -53,7 +51,6 @@ from .workspace import (
WorkspaceUserProfileEndpoint,
WorkspaceUserProfileIssuesEndpoint,
WorkspaceLabelsEndpoint,
WorkspaceMembersEndpoint,
LeaveWorkspaceEndpoint,
)
from .state import StateViewSet
@ -169,4 +166,4 @@ from .notification import NotificationViewSet, UnreadNotificationEndpoint, MarkA
from .exporter import ExportIssuesEndpoint
from .config import ConfigurationEndpoint
from .config import ConfigurationEndpoint

View File

@ -55,11 +55,11 @@ class VerifyEmailEndpoint(BaseAPIView):
return Response(
{"email": "Successfully activated"}, status=status.HTTP_200_OK
)
except jwt.ExpiredSignatureError as indentifier:
except jwt.ExpiredSignatureError as _indentifier:
return Response(
{"email": "Activation expired"}, status=status.HTTP_400_BAD_REQUEST
)
except jwt.exceptions.DecodeError as indentifier:
except jwt.exceptions.DecodeError as _indentifier:
return Response(
{"email": "Invalid token"}, status=status.HTTP_400_BAD_REQUEST
)

View File

@ -249,11 +249,11 @@ class MagicSignInGenerateEndpoint(BaseAPIView):
## Generate a random token
token = (
"".join(random.choices(string.ascii_lowercase + string.digits, k=4))
"".join(random.choices(string.ascii_lowercase, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "".join(random.choices(string.ascii_lowercase, k=4))
+ "-"
+ "".join(random.choices(string.ascii_lowercase + string.digits, k=4))
+ "".join(random.choices(string.ascii_lowercase, k=4))
)
ri = redis_instance()

View File

@ -21,8 +21,8 @@ class ConfigurationEndpoint(BaseAPIView):
def get(self, request):
data = {}
data["google"] = os.environ.get("GOOGLE_CLIENT_ID", None)
data["github"] = os.environ.get("GITHUB_CLIENT_ID", None)
data["google_client_id"] = os.environ.get("GOOGLE_CLIENT_ID", None)
data["github_client_id"] = os.environ.get("GITHUB_CLIENT_ID", None)
data["github_app_name"] = os.environ.get("GITHUB_APP_NAME", None)
data["magic_login"] = (
bool(settings.EMAIL_HOST_USER) and bool(settings.EMAIL_HOST_PASSWORD)
@ -30,4 +30,5 @@ class ConfigurationEndpoint(BaseAPIView):
data["email_password_login"] = (
os.environ.get("ENABLE_EMAIL_PASSWORD", "0") == "1"
)
data["slack_client_id"] = os.environ.get("SLACK_CLIENT_ID", None)
return Response(data, status=status.HTTP_200_OK)

View File

@ -3,7 +3,6 @@ import json
# Django imports
from django.db.models import (
OuterRef,
Func,
F,
Q,
@ -480,13 +479,13 @@ class CycleViewSet(BaseViewSet):
)
)
cycle = Cycle.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
# Delete the cycle
cycle.delete()
issue_activity.delay(
type="cycle.activity.deleted",
requested_data=json.dumps(
{
"cycle_id": str(pk),
"cycle_name": str(cycle.name),
"issues": [str(issue_id) for issue_id in cycle_issues],
}
),
@ -496,6 +495,8 @@ class CycleViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
# Delete the cycle
cycle.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -512,12 +513,6 @@ class CycleIssueViewSet(BaseViewSet):
"issue__assignees__id",
]
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
cycle_id=self.kwargs.get("cycle_id"),
)
def get_queryset(self):
return self.filter_queryset(
super()
@ -670,7 +665,7 @@ class CycleIssueViewSet(BaseViewSet):
type="cycle.activity.created",
requested_data=json.dumps({"cycles_list": issues}),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{

View File

@ -39,6 +39,7 @@ from plane.utils.integrations.github import get_github_repo_details
from plane.utils.importers.jira import jira_project_issue_summary
from plane.bgtasks.importer_task import service_importer
from plane.utils.html_processor import strip_tags
from plane.api.permissions import WorkSpaceAdminPermission
class ServiceIssueImportSummaryEndpoint(BaseAPIView):
@ -119,6 +120,9 @@ class ServiceIssueImportSummaryEndpoint(BaseAPIView):
class ImportServiceEndpoint(BaseAPIView):
permission_classes = [
WorkSpaceAdminPermission,
]
def post(self, request, slug, service):
project_id = request.data.get("project_id", False)

View File

@ -360,8 +360,7 @@ class InboxIssuePublicViewSet(BaseViewSet):
)
.select_related("issue", "workspace", "project")
)
else:
return InboxIssue.objects.none()
return InboxIssue.objects.none()
def list(self, request, slug, project_id, inbox_id):
project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id)

View File

@ -1,6 +1,6 @@
# Python improts
import uuid
import requests
# Django imports
from django.contrib.auth.hashers import make_password
@ -25,7 +25,7 @@ from plane.utils.integrations.github import (
delete_github_installation,
)
from plane.api.permissions import WorkSpaceAdminPermission
from plane.utils.integrations.slack import slack_oauth
class IntegrationViewSet(BaseViewSet):
serializer_class = IntegrationSerializer
@ -98,12 +98,19 @@ class WorkspaceIntegrationViewSet(BaseViewSet):
config = {"installation_id": installation_id}
if provider == "slack":
metadata = request.data.get("metadata", {})
code = request.data.get("code", False)
if not code:
return Response({"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST)
slack_response = slack_oauth(code=code)
metadata = slack_response
access_token = metadata.get("access_token", False)
team_id = metadata.get("team", {}).get("id", False)
if not metadata or not access_token or not team_id:
return Response(
{"error": "Access token and team id is required"},
{"error": "Slack could not be installed. Please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)
config = {"team_id": team_id, "access_token": access_token}

View File

@ -11,6 +11,7 @@ from plane.api.views import BaseViewSet, BaseAPIView
from plane.db.models import SlackProjectSync, WorkspaceIntegration, ProjectMember
from plane.api.serializers import SlackProjectSyncSerializer
from plane.api.permissions import ProjectBasePermission, ProjectEntityPermission
from plane.utils.integrations.slack import slack_oauth
class SlackProjectSyncViewSet(BaseViewSet):
@ -32,25 +33,47 @@ class SlackProjectSyncViewSet(BaseViewSet):
)
def create(self, request, slug, project_id, workspace_integration_id):
serializer = SlackProjectSyncSerializer(data=request.data)
try:
code = request.data.get("code", False)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
if not code:
return Response(
{"error": "Code is required"}, status=status.HTTP_400_BAD_REQUEST
)
if serializer.is_valid():
serializer.save(
project_id=project_id,
workspace_integration_id=workspace_integration_id,
slack_response = slack_oauth(code=code)
workspace_integration = WorkspaceIntegration.objects.get(
workspace__slug=slug, pk=workspace_integration_id
)
workspace_integration = WorkspaceIntegration.objects.get(
pk=workspace_integration_id, workspace__slug=slug
)
slack_project_sync = SlackProjectSync.objects.create(
access_token=slack_response.get("access_token"),
scopes=slack_response.get("scope"),
bot_user_id=slack_response.get("bot_user_id"),
webhook_url=slack_response.get("incoming_webhook", {}).get("url"),
data=slack_response,
team_id=slack_response.get("team", {}).get("id"),
team_name=slack_response.get("team", {}).get("name"),
workspace_integration=workspace_integration,
project_id=project_id,
)
_ = ProjectMember.objects.get_or_create(
member=workspace_integration.actor, role=20, project_id=project_id
)
serializer = SlackProjectSyncSerializer(slack_project_sync)
return Response(serializer.data, status=status.HTTP_200_OK)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
except IntegrityError as e:
if "already exists" in str(e):
return Response(
{"error": "Slack is already installed for the project"},
status=status.HTTP_410_GONE,
)
capture_exception(e)
return Response(
{"error": "Slack could not be installed. Please try again later"},
status=status.HTTP_400_BAD_REQUEST,
)

View File

@ -39,7 +39,6 @@ from plane.api.serializers import (
IssueActivitySerializer,
IssueCommentSerializer,
IssuePropertySerializer,
LabelSerializer,
IssueSerializer,
LabelSerializer,
IssueFlatSerializer,
@ -235,10 +234,7 @@ class IssueViewSet(BaseViewSet):
status=status.HTTP_200_OK,
)
return Response(
issues, status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
project = Project.objects.get(pk=project_id)
@ -443,9 +439,7 @@ class UserWorkSpaceIssues(BaseAPIView):
status=status.HTTP_200_OK,
)
return Response(
issues, status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
class WorkSpaceIssuesEndpoint(BaseAPIView):
@ -623,13 +617,12 @@ class IssueUserDisplayPropertyEndpoint(BaseAPIView):
serializer = IssuePropertySerializer(issue_property)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def get(self, request, slug, project_id):
issue_property, _ = IssueProperty.objects.get_or_create(
user=request.user, project_id=project_id
)
serializer = IssuePropertySerializer(issue_property)
return Response(serializer.data, status=status.HTTP_200_OK)
issue_property, _ = IssueProperty.objects.get_or_create(
user=request.user, project_id=project_id
)
serializer = IssuePropertySerializer(issue_property)
return Response(serializer.data, status=status.HTTP_200_OK)
class LabelViewSet(BaseViewSet):
@ -780,6 +773,20 @@ class SubIssuesEndpoint(BaseAPIView):
updated_sub_issues = Issue.issue_objects.filter(id__in=sub_issue_ids)
# Track the issue
_ = [
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"parent": str(issue_id)}),
actor_id=str(request.user.id),
issue_id=str(sub_issue_id),
project_id=str(project_id),
current_instance=json.dumps({"parent": str(sub_issue_id)}),
epoch=int(timezone.now().timestamp()),
)
for sub_issue_id in sub_issue_ids
]
return Response(
IssueFlatSerializer(updated_sub_issues, many=True).data,
status=status.HTTP_200_OK,
@ -1092,17 +1099,19 @@ class IssueArchiveViewSet(BaseViewSet):
archived_at__isnull=False,
pk=pk,
)
issue.archived_at = None
issue.save()
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"archived_at": None}),
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
current_instance=json.dumps(
IssueSerializer(issue).data, cls=DjangoJSONEncoder
),
epoch=int(timezone.now().timestamp()),
)
issue.archived_at = None
issue.save()
return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK)
@ -1396,8 +1405,7 @@ class IssueCommentPublicViewSet(BaseViewSet):
)
.distinct()
).order_by("created_at")
else:
return IssueComment.objects.none()
return IssueComment.objects.none()
except ProjectDeployBoard.DoesNotExist:
return IssueComment.objects.none()
@ -1522,8 +1530,7 @@ class IssueReactionPublicViewSet(BaseViewSet):
.order_by("-created_at")
.distinct()
)
else:
return IssueReaction.objects.none()
return IssueReaction.objects.none()
except ProjectDeployBoard.DoesNotExist:
return IssueReaction.objects.none()
@ -1618,8 +1625,7 @@ class CommentReactionPublicViewSet(BaseViewSet):
.order_by("-created_at")
.distinct()
)
else:
return CommentReaction.objects.none()
return CommentReaction.objects.none()
except ProjectDeployBoard.DoesNotExist:
return CommentReaction.objects.none()
@ -1713,8 +1719,7 @@ class IssueVotePublicViewSet(BaseViewSet):
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(project_id=self.kwargs.get("project_id"))
)
else:
return IssueVote.objects.none()
return IssueVote.objects.none()
except ProjectDeployBoard.DoesNotExist:
return IssueVote.objects.none()
@ -2160,9 +2165,7 @@ class IssueDraftViewSet(BaseViewSet):
status=status.HTTP_200_OK,
)
return Response(
issues, status=status.HTTP_200_OK
)
return Response(issues, status=status.HTTP_200_OK)
def create(self, request, slug, project_id):
project = Project.objects.get(pk=project_id)
@ -2227,7 +2230,7 @@ class IssueDraftViewSet(BaseViewSet):
def destroy(self, request, slug, project_id, pk=None):
issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk)
current_instance = json.dumps(
IssueSerializer(current_instance).data, cls=DjangoJSONEncoder
IssueSerializer(issue).data, cls=DjangoJSONEncoder
)
issue.delete()
issue_activity.delay(

View File

@ -266,12 +266,12 @@ class ModuleViewSet(BaseViewSet):
module_issues = list(
ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True)
)
module.delete()
issue_activity.delay(
type="module.activity.deleted",
requested_data=json.dumps(
{
"module_id": str(pk),
"module_name": str(module.name),
"issues": [str(issue_id) for issue_id in module_issues],
}
),
@ -281,6 +281,7 @@ class ModuleViewSet(BaseViewSet):
current_instance=None,
epoch=int(timezone.now().timestamp()),
)
module.delete()
return Response(status=status.HTTP_204_NO_CONTENT)
@ -297,12 +298,6 @@ class ModuleIssueViewSet(BaseViewSet):
ProjectEntityPermission,
]
def perform_create(self, serializer):
serializer.save(
project_id=self.kwargs.get("project_id"),
module_id=self.kwargs.get("module_id"),
)
def get_queryset(self):
return self.filter_queryset(
super()
@ -446,7 +441,7 @@ class ModuleIssueViewSet(BaseViewSet):
type="module.activity.created",
requested_data=json.dumps({"modules_list": issues}),
actor_id=str(self.request.user.id),
issue_id=str(self.kwargs.get("pk", None)),
issue_id=None,
project_id=str(self.kwargs.get("project_id", None)),
current_instance=json.dumps(
{

View File

@ -11,7 +11,6 @@ from django.conf import settings
from rest_framework.response import Response
from rest_framework import exceptions
from rest_framework.permissions import AllowAny
from rest_framework.views import APIView
from rest_framework_simplejwt.tokens import RefreshToken
from rest_framework import status
from sentry_sdk import capture_exception
@ -113,7 +112,7 @@ def get_user_data(access_token: str) -> dict:
url="https://api.github.com/user/emails", headers=headers
).json()
[
_ = [
user_data.update({"email": item.get("email")})
for item in response
if item.get("primary") is True
@ -147,7 +146,7 @@ class OauthEndpoint(BaseAPIView):
data = get_user_data(access_token)
email = data.get("email", None)
if email == None:
if email is None:
return Response(
{
"error": "Something went wrong. Please try again later or contact the support team."
@ -158,7 +157,6 @@ class OauthEndpoint(BaseAPIView):
if "@" in email:
user = User.objects.get(email=email)
email = data["email"]
channel = "email"
mobile_number = uuid.uuid4().hex
email_verified = True
else:
@ -182,7 +180,7 @@ class OauthEndpoint(BaseAPIView):
user.last_active = timezone.now()
user.last_login_time = timezone.now()
user.last_login_ip = request.META.get("REMOTE_ADDR")
user.last_login_medium = f"oauth"
user.last_login_medium = "oauth"
user.last_login_uagent = request.META.get("HTTP_USER_AGENT")
user.is_email_verified = email_verified
user.save()
@ -233,7 +231,6 @@ class OauthEndpoint(BaseAPIView):
if "@" in email:
email = data["email"]
mobile_number = uuid.uuid4().hex
channel = "email"
email_verified = True
else:
return Response(

View File

@ -1,5 +1,5 @@
# Python imports
from datetime import timedelta, datetime, date
from datetime import timedelta, date
# Django imports
from django.db.models import Exists, OuterRef, Q, Prefetch

View File

@ -11,7 +11,6 @@ from django.db.models import (
Q,
Exists,
OuterRef,
Func,
F,
Func,
Subquery,
@ -35,7 +34,6 @@ from plane.api.serializers import (
ProjectDetailSerializer,
ProjectMemberInviteSerializer,
ProjectFavoriteSerializer,
IssueLiteSerializer,
ProjectDeployBoardSerializer,
ProjectMemberAdminSerializer,
)
@ -84,7 +82,7 @@ class ProjectViewSet(BaseViewSet):
]
def get_serializer_class(self, *args, **kwargs):
if self.action == "update" or self.action == "partial_update":
if self.action in ["update", "partial_update"]:
return ProjectSerializer
return ProjectDetailSerializer
@ -336,7 +334,7 @@ class ProjectViewSet(BaseViewSet):
{"name": "The project name is already taken"},
status=status.HTTP_410_GONE,
)
except Project.DoesNotExist or Workspace.DoesNotExist as e:
except (Project.DoesNotExist, Workspace.DoesNotExist):
return Response(
{"error": "Project does not exist"}, status=status.HTTP_404_NOT_FOUND
)
@ -482,6 +480,83 @@ class ProjectMemberViewSet(BaseViewSet):
.select_related("workspace", "workspace__owner")
)
def create(self, request, slug, project_id):
members = request.data.get("members", [])
# get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
if not len(members):
return Response(
{"error": "Atleast one member is required"},
status=status.HTTP_400_BAD_REQUEST,
)
bulk_project_members = []
bulk_issue_props = []
project_members = (
ProjectMember.objects.filter(
workspace__slug=slug,
member_id__in=[member.get("member_id") for member in members],
)
.values("member_id", "sort_order")
.order_by("sort_order")
)
for member in members:
sort_order = [
project_member.get("sort_order")
for project_member in project_members
if str(project_member.get("member_id")) == str(member.get("member_id"))
]
bulk_project_members.append(
ProjectMember(
member_id=member.get("member_id"),
role=member.get("role", 10),
project_id=project_id,
workspace_id=project.workspace_id,
sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
)
)
bulk_issue_props.append(
IssueProperty(
user_id=member.get("member_id"),
project_id=project_id,
workspace_id=project.workspace_id,
)
)
project_members = ProjectMember.objects.bulk_create(
bulk_project_members,
batch_size=10,
ignore_conflicts=True,
)
_ = IssueProperty.objects.bulk_create(
bulk_issue_props, batch_size=10, ignore_conflicts=True
)
serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
def list(self, request, slug, project_id):
project_member = ProjectMember.objects.get(
member=request.user, workspace__slug=slug, project_id=project_id
)
project_members = ProjectMember.objects.filter(
project_id=project_id,
workspace__slug=slug,
member__is_bot=False,
).select_related("project", "member", "workspace")
if project_member.role > 10:
serializer = ProjectMemberAdminSerializer(project_members, many=True)
else:
serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, pk):
project_member = ProjectMember.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id
@ -567,73 +642,6 @@ class ProjectMemberViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT)
class AddMemberToProjectEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
]
def post(self, request, slug, project_id):
members = request.data.get("members", [])
# get the project
project = Project.objects.get(pk=project_id, workspace__slug=slug)
if not len(members):
return Response(
{"error": "Atleast one member is required"},
status=status.HTTP_400_BAD_REQUEST,
)
bulk_project_members = []
bulk_issue_props = []
project_members = (
ProjectMember.objects.filter(
workspace__slug=slug,
member_id__in=[member.get("member_id") for member in members],
)
.values("member_id", "sort_order")
.order_by("sort_order")
)
for member in members:
sort_order = [
project_member.get("sort_order")
for project_member in project_members
if str(project_member.get("member_id"))
== str(member.get("member_id"))
]
bulk_project_members.append(
ProjectMember(
member_id=member.get("member_id"),
role=member.get("role", 10),
project_id=project_id,
workspace_id=project.workspace_id,
sort_order=sort_order[0] - 10000 if len(sort_order) else 65535,
)
)
bulk_issue_props.append(
IssueProperty(
user_id=member.get("member_id"),
project_id=project_id,
workspace_id=project.workspace_id,
)
)
project_members = ProjectMember.objects.bulk_create(
bulk_project_members,
batch_size=10,
ignore_conflicts=True,
)
_ = IssueProperty.objects.bulk_create(
bulk_issue_props, batch_size=10, ignore_conflicts=True
)
serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_201_CREATED)
class AddTeamToProjectEndpoint(BaseAPIView):
permission_classes = [
ProjectBasePermission,
@ -933,21 +941,6 @@ class ProjectDeployBoardViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_200_OK)
class ProjectMemberEndpoint(BaseAPIView):
permission_classes = [
ProjectEntityPermission,
]
def get(self, request, slug, project_id):
project_members = ProjectMember.objects.filter(
project_id=project_id,
workspace__slug=slug,
member__is_bot=False,
).select_related("project", "member", "workspace")
serializer = ProjectMemberSerializer(project_members, many=True)
return Response(serializer.data, status=status.HTTP_200_OK)
class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView):
permission_classes = [
AllowAny,

View File

@ -19,7 +19,6 @@ from plane.db.models import (
WorkspaceMemberInvite,
Issue,
IssueActivity,
WorkspaceMember,
)
from plane.utils.paginator import BasePaginator

View File

@ -6,12 +6,10 @@ from uuid import uuid4
# Django imports
from django.db import IntegrityError
from django.db.models import Prefetch
from django.conf import settings
from django.utils import timezone
from django.core.exceptions import ValidationError
from django.core.validators import validate_email
from django.contrib.sites.shortcuts import get_current_site
from django.db.models import (
Prefetch,
OuterRef,
@ -55,7 +53,6 @@ from . import BaseViewSet
from plane.db.models import (
User,
Workspace,
WorkspaceMember,
WorkspaceMemberInvite,
Team,
ProjectMember,
@ -472,7 +469,7 @@ class WorkSpaceMemberViewSet(BaseViewSet):
model = WorkspaceMember
permission_classes = [
WorkSpaceAdminPermission,
WorkspaceEntityPermission,
]
search_fields = [
@ -489,6 +486,25 @@ class WorkSpaceMemberViewSet(BaseViewSet):
.select_related("member")
)
def list(self, request, slug):
workspace_member = WorkspaceMember.objects.get(
member=request.user, workspace__slug=slug
)
workspace_members = WorkspaceMember.objects.filter(
workspace__slug=slug,
member__is_bot=False,
).select_related("workspace", "member")
if workspace_member.role > 10:
serializer = WorkspaceMemberAdminSerializer(workspace_members, many=True)
else:
serializer = WorkSpaceMemberSerializer(
workspace_members,
many=True,
)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, pk):
workspace_member = WorkspaceMember.objects.get(pk=pk, workspace__slug=slug)
if request.user.id == workspace_member.member_id:
@ -1252,20 +1268,6 @@ class WorkspaceLabelsEndpoint(BaseAPIView):
return Response(labels, status=status.HTTP_200_OK)
class WorkspaceMembersEndpoint(BaseAPIView):
permission_classes = [
WorkspaceEntityPermission,
]
def get(self, request, slug):
workspace_members = WorkspaceMember.objects.filter(
workspace__slug=slug,
member__is_bot=False,
).select_related("workspace", "member")
serialzier = WorkSpaceMemberSerializer(workspace_members, many=True)
return Response(serialzier.data, status=status.HTTP_200_OK)
class LeaveWorkspaceEndpoint(BaseAPIView):
permission_classes = [
WorkspaceEntityPermission,

View File

@ -408,7 +408,6 @@ def analytic_export_task(email, data, slug):
distribution,
x_axis,
y_axis,
segment,
key,
assignee_details,
label_details,

View File

@ -23,7 +23,7 @@ def email_verification(first_name, email, token, current_site):
from_email_string = settings.EMAIL_FROM
subject = f"Verify your Email!"
subject = "Verify your Email!"
context = {
"first_name": first_name,

View File

@ -4,7 +4,6 @@ import io
import json
import boto3
import zipfile
from urllib.parse import urlparse, urlunparse
# Django imports
from django.conf import settings

View File

@ -8,8 +8,6 @@ from django.conf import settings
from celery import shared_task
from sentry_sdk import capture_exception
# Module imports
from plane.db.models import User
@shared_task
@ -21,7 +19,7 @@ def forgot_password(first_name, email, uidb64, token, current_site):
from_email_string = settings.EMAIL_FROM
subject = f"Reset Your Password - Plane"
subject = "Reset Your Password - Plane"
context = {
"first_name": first_name,

View File

@ -2,8 +2,6 @@
import json
import requests
import uuid
import jwt
from datetime import datetime
# Django imports
from django.conf import settings
@ -27,7 +25,6 @@ from plane.db.models import (
User,
IssueProperty,
)
from .workspace_invitation_task import workspace_invitation
from plane.bgtasks.user_welcome_task import send_welcome_slack
@ -58,7 +55,7 @@ def service_importer(service, importer_id):
ignore_conflicts=True,
)
[
_ = [
send_welcome_slack.delay(
str(user.id),
True,
@ -157,7 +154,7 @@ def service_importer(service, importer_id):
)
# Create repo sync
repo_sync = GithubRepositorySync.objects.create(
_ = GithubRepositorySync.objects.create(
repository=repo,
workspace_integration=workspace_integration,
actor=workspace_integration.actor,
@ -179,7 +176,7 @@ def service_importer(service, importer_id):
ImporterSerializer(importer).data,
cls=DjangoJSONEncoder,
)
res = requests.post(
_ = requests.post(
f"{settings.PROXY_BASE_URL}/hooks/workspaces/{str(importer.workspace_id)}/projects/{str(importer.project_id)}/importers/{str(service)}/",
json=import_data_json,
headers=headers,

View File

@ -82,7 +82,7 @@ def track_description(
if (
last_activity is not None
and last_activity.field == "description"
and actor_id == last_activity.actor_id
and actor_id == str(last_activity.actor_id)
):
last_activity.created_at = timezone.now()
last_activity.save(update_fields=["created_at"])
@ -131,7 +131,7 @@ def track_parent(
else "",
field="parent",
project_id=project_id,
workspace=workspace_id,
workspace_id=workspace_id,
comment=f"updated the parent issue to",
old_identifier=old_parent.id if old_parent is not None else None,
new_identifier=new_parent.id if new_parent is not None else None,
@ -276,7 +276,7 @@ def track_labels(
issue_activities,
epoch,
):
requested_labels = set([str(lab) for lab in requested_data.get("labels_list", [])])
requested_labels = set([str(lab) for lab in requested_data.get("labels", [])])
current_labels = set([str(lab) for lab in current_instance.get("labels", [])])
added_labels = requested_labels - current_labels
@ -334,9 +334,7 @@ def track_assignees(
issue_activities,
epoch,
):
requested_assignees = set(
[str(asg) for asg in requested_data.get("assignees_list", [])]
)
requested_assignees = set([str(asg) for asg in requested_data.get("assignees", [])])
current_assignees = set([str(asg) for asg in current_instance.get("assignees", [])])
added_assignees = requested_assignees - current_assignees
@ -363,17 +361,19 @@ def track_assignees(
for dropped_assignee in dropped_assginees:
assignee = User.objects.get(pk=dropped_assignee)
issue_activities.append(
issue_id=issue_id,
actor_id=actor_id,
verb="updated",
old_value=assignee.display_name,
new_value="",
field="assignees",
project_id=project_id,
workspace_id=workspace_id,
comment=f"removed assignee ",
old_identifier=assignee.id,
epoch=epoch,
IssueActivity(
issue_id=issue_id,
actor_id=actor_id,
verb="updated",
old_value=assignee.display_name,
new_value="",
field="assignees",
project_id=project_id,
workspace_id=workspace_id,
comment=f"removed assignee ",
old_identifier=assignee.id,
epoch=epoch,
)
)
@ -418,36 +418,37 @@ def track_archive_at(
issue_activities,
epoch,
):
if requested_data.get("archived_at") is None:
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project_id=project_id,
workspace_id=workspace_id,
comment=f"has restored the issue",
verb="updated",
actor_id=actor_id,
field="archived_at",
old_value="archive",
new_value="restore",
epoch=epoch,
if current_instance.get("archived_at") != requested_data.get("archived_at"):
if requested_data.get("archived_at") is None:
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project_id=project_id,
workspace_id=workspace_id,
comment="has restored the issue",
verb="updated",
actor_id=actor_id,
field="archived_at",
old_value="archive",
new_value="restore",
epoch=epoch,
)
)
)
else:
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project_id=project_id,
workspace_id=workspace_id,
comment=f"Plane has archived the issue",
verb="updated",
actor_id=actor_id,
field="archived_at",
old_value=None,
new_value="archive",
epoch=epoch,
else:
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project_id=project_id,
workspace_id=workspace_id,
comment="Plane has archived the issue",
verb="updated",
actor_id=actor_id,
field="archived_at",
old_value=None,
new_value="archive",
epoch=epoch,
)
)
)
def track_closed_to(
@ -523,8 +524,8 @@ def update_issue_activity(
"description_html": track_description,
"target_date": track_target_date,
"start_date": track_start_date,
"labels_list": track_labels,
"assignees_list": track_assignees,
"labels": track_labels,
"assignees": track_assignees,
"estimate_point": track_estimate_points,
"archived_at": track_archive_at,
"closed_to": track_closed_to,
@ -536,7 +537,7 @@ def update_issue_activity(
)
for key in requested_data:
func = ISSUE_ACTIVITY_MAPPER.get(key, None)
func = ISSUE_ACTIVITY_MAPPER.get(key)
if func is not None:
func(
requested_data=requested_data,
@ -690,6 +691,10 @@ def create_cycle_issue_activity(
new_cycle = Cycle.objects.filter(
pk=updated_record.get("new_cycle_id", None)
).first()
issue = Issue.objects.filter(pk=updated_record.get("issue_id")).first()
if issue:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
@ -712,6 +717,10 @@ def create_cycle_issue_activity(
cycle = Cycle.objects.filter(
pk=created_record.get("fields").get("cycle")
).first()
issue = Issue.objects.filter(pk=created_record.get("fields").get("issue")).first()
if issue:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
@ -746,22 +755,27 @@ def delete_cycle_issue_activity(
)
cycle_id = requested_data.get("cycle_id", "")
cycle_name = requested_data.get("cycle_name", "")
cycle = Cycle.objects.filter(pk=cycle_id).first()
issues = requested_data.get("issues")
for issue in issues:
current_issue = Issue.objects.filter(pk=issue).first()
if issue:
current_issue.updated_at = timezone.now()
current_issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
issue_id=issue,
actor_id=actor_id,
verb="deleted",
old_value=cycle.name if cycle is not None else "",
old_value=cycle.name if cycle is not None else cycle_name,
new_value="",
field="cycles",
project_id=project_id,
workspace_id=workspace_id,
comment=f"removed this issue from {cycle.name if cycle is not None else None}",
old_identifier=cycle.id if cycle is not None else None,
comment=f"removed this issue from {cycle.name if cycle is not None else cycle_name}",
old_identifier=cycle_id if cycle_id is not None else None,
epoch=epoch,
)
)
@ -793,6 +807,10 @@ def create_module_issue_activity(
new_module = Module.objects.filter(
pk=updated_record.get("new_module_id", None)
).first()
issue = Issue.objects.filter(pk=updated_record.get("issue_id")).first()
if issue:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
@ -815,6 +833,10 @@ def create_module_issue_activity(
module = Module.objects.filter(
pk=created_record.get("fields").get("module")
).first()
issue = Issue.objects.filter(pk=created_record.get("fields").get("issue")).first()
if issue:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
issue_id=created_record.get("fields").get("issue"),
@ -848,22 +870,27 @@ def delete_module_issue_activity(
)
module_id = requested_data.get("module_id", "")
module_name = requested_data.get("module_name", "")
module = Module.objects.filter(pk=module_id).first()
issues = requested_data.get("issues")
for issue in issues:
current_issue = Issue.objects.filter(pk=issue).first()
if issue:
current_issue.updated_at = timezone.now()
current_issue.save(update_fields=["updated_at"])
issue_activities.append(
IssueActivity(
issue_id=issue,
actor_id=actor_id,
verb="deleted",
old_value=module.name if module is not None else "",
old_value=module.name if module is not None else module_name,
new_value="",
field="modules",
project_id=project_id,
workspace_id=workspace_id,
comment=f"removed this issue from ",
old_identifier=module.id if module is not None else None,
comment=f"removed this issue from {module.name if module is not None else module_name}",
old_identifier=module_id if module_id is not None else None,
epoch=epoch,
)
)
@ -1451,15 +1478,16 @@ def issue_activity(
issue_activities = []
project = Project.objects.get(pk=project_id)
issue = Issue.objects.filter(pk=issue_id).first()
workspace_id = project.workspace_id
if issue is not None:
try:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
except Exception as e:
pass
if issue_id is not None:
issue = Issue.objects.filter(pk=issue_id).first()
if issue:
try:
issue.updated_at = timezone.now()
issue.save(update_fields=["updated_at"])
except Exception as e:
pass
ACTIVITY_MAPPER = {
"issue.activity.created": create_issue_activity,
@ -1534,6 +1562,8 @@ def issue_activity(
IssueActivitySerializer(issue_activities_created, many=True).data,
cls=DjangoJSONEncoder,
),
requested_data=requested_data,
current_instance=current_instance,
)
return

View File

@ -59,7 +59,7 @@ def archive_old_issues():
# Check if Issues
if issues:
# Set the archive time to current time
archive_at = timezone.now()
archive_at = timezone.now().date()
issues_to_update = []
for issue in issues:
@ -71,14 +71,14 @@ def archive_old_issues():
Issue.objects.bulk_update(
issues_to_update, ["archived_at"], batch_size=100
)
[
_ = [
issue_activity.delay(
type="issue.activity.updated",
requested_data=json.dumps({"archived_at": str(archive_at)}),
actor_id=str(project.created_by_id),
issue_id=issue.id,
project_id=project_id,
current_instance=None,
current_instance=json.dumps({"archived_at": None}),
subscriber=False,
epoch=int(timezone.now().timestamp())
)

View File

@ -17,7 +17,7 @@ def magic_link(email, key, token, current_site):
from_email_string = settings.EMAIL_FROM
subject = f"Login for Plane"
subject = "Login for Plane"
context = {"magic_url": abs_url, "code": token}

View File

@ -5,16 +5,107 @@ import json
from django.utils import timezone
# Module imports
from plane.db.models import IssueSubscriber, Project, IssueAssignee, Issue, Notification
from plane.db.models import (
IssueMention,
IssueSubscriber,
Project,
User,
IssueAssignee,
Issue,
Notification,
IssueComment,
)
# Third Party imports
from celery import shared_task
from bs4 import BeautifulSoup
def get_new_mentions(requested_instance, current_instance):
# requested_data is the newer instance of the current issue
# current_instance is the older instance of the current issue, saved in the database
# extract mentions from both the instance of data
mentions_older = extract_mentions(current_instance)
mentions_newer = extract_mentions(requested_instance)
# Getting Set Difference from mentions_newer
new_mentions = [
mention for mention in mentions_newer if mention not in mentions_older]
return new_mentions
# Get Removed Mention
def get_removed_mentions(requested_instance, current_instance):
# requested_data is the newer instance of the current issue
# current_instance is the older instance of the current issue, saved in the database
# extract mentions from both the instance of data
mentions_older = extract_mentions(current_instance)
mentions_newer = extract_mentions(requested_instance)
# Getting Set Difference from mentions_newer
removed_mentions = [
mention for mention in mentions_older if mention not in mentions_newer]
return removed_mentions
# Adds mentions as subscribers
def extract_mentions_as_subscribers(project_id, issue_id, mentions):
# mentions is an array of User IDs representing the FILTERED set of mentioned users
bulk_mention_subscribers = []
for mention_id in mentions:
# If the particular mention has not already been subscribed to the issue, he must be sent the mentioned notification
if not IssueSubscriber.objects.filter(
issue_id=issue_id,
subscriber=mention_id,
project=project_id,
).exists():
mentioned_user = User.objects.get(pk=mention_id)
project = Project.objects.get(pk=project_id)
issue = Issue.objects.get(pk=issue_id)
bulk_mention_subscribers.append(IssueSubscriber(
workspace=project.workspace,
project=project,
issue=issue,
subscriber=mentioned_user,
))
return bulk_mention_subscribers
# Parse Issue Description & extracts mentions
def extract_mentions(issue_instance):
try:
# issue_instance has to be a dictionary passed, containing the description_html and other set of activity data.
mentions = []
# Convert string to dictionary
data = json.loads(issue_instance)
html = data.get("description_html")
soup = BeautifulSoup(html, 'html.parser')
mention_tags = soup.find_all(
'mention-component', attrs={'target': 'users'})
mentions = [mention_tag['id'] for mention_tag in mention_tags]
return list(set(mentions))
except Exception as e:
return []
@shared_task
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created):
def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activities_created, requested_data, current_instance):
issue_activities_created = (
json.loads(issue_activities_created) if issue_activities_created is not None else None
json.loads(
issue_activities_created) if issue_activities_created is not None else None
)
if type not in [
"cycle.activity.created",
@ -33,14 +124,35 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
]:
# Create Notifications
bulk_notifications = []
"""
Mention Tasks
1. Perform Diffing and Extract the mentions, that mention notification needs to be sent
2. From the latest set of mentions, extract the users which are not a subscribers & make them subscribers
"""
# Get new mentions from the newer instance
new_mentions = get_new_mentions(
requested_instance=requested_data, current_instance=current_instance)
removed_mention = get_removed_mentions(
requested_instance=requested_data, current_instance=current_instance)
# Get New Subscribers from the mentions of the newer instance
requested_mentions = extract_mentions(
issue_instance=requested_data)
mention_subscribers = extract_mentions_as_subscribers(
project_id=project_id, issue_id=issue_id, mentions=requested_mentions)
issue_subscribers = list(
IssueSubscriber.objects.filter(project_id=project_id, issue_id=issue_id)
.exclude(subscriber_id=actor_id)
IssueSubscriber.objects.filter(
project_id=project_id, issue_id=issue_id)
.exclude(subscriber_id__in=list(new_mentions + [actor_id]))
.values_list("subscriber", flat=True)
)
issue_assignees = list(
IssueAssignee.objects.filter(project_id=project_id, issue_id=issue_id)
IssueAssignee.objects.filter(
project_id=project_id, issue_id=issue_id)
.exclude(assignee_id=actor_id)
.values_list("assignee", flat=True)
)
@ -62,6 +174,9 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
for subscriber in list(set(issue_subscribers)):
for issue_activity in issue_activities_created:
issue_comment = issue_activity.get("issue_comment")
if issue_comment is not None:
issue_comment = IssueComment.objects.get(id=issue_comment, issue_id=issue_id, project_id=project_id, workspace_id=project.workspace_id)
bulk_notifications.append(
Notification(
workspace=project.workspace,
@ -89,7 +204,7 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
"new_value": str(issue_activity.get("new_value")),
"old_value": str(issue_activity.get("old_value")),
"issue_comment": str(
issue_activity.get("issue_comment").comment_stripped
issue_comment.comment_stripped
if issue_activity.get("issue_comment") is not None
else ""
),
@ -98,5 +213,62 @@ def notifications(type, issue_id, project_id, actor_id, subscriber, issue_activi
)
)
# Add Mentioned as Issue Subscribers
IssueSubscriber.objects.bulk_create(
mention_subscribers, batch_size=100)
for mention_id in new_mentions:
if (mention_id != actor_id):
for issue_activity in issue_activities_created:
bulk_notifications.append(
Notification(
workspace=project.workspace,
sender="in_app:issue_activities:mention",
triggered_by_id=actor_id,
receiver_id=mention_id,
entity_identifier=issue_id,
entity_name="issue",
project=project,
message=f"You have been mentioned in the issue {issue.name}",
data={
"issue": {
"id": str(issue_id),
"name": str(issue.name),
"identifier": str(issue.project.identifier),
"sequence_id": issue.sequence_id,
"state_name": issue.state.name,
"state_group": issue.state.group,
},
"issue_activity": {
"id": str(issue_activity.get("id")),
"verb": str(issue_activity.get("verb")),
"field": str(issue_activity.get("field")),
"actor": str(issue_activity.get("actor_id")),
"new_value": str(issue_activity.get("new_value")),
"old_value": str(issue_activity.get("old_value")),
},
},
)
)
# Create New Mentions Here
aggregated_issue_mentions = []
for mention_id in new_mentions:
mentioned_user = User.objects.get(pk=mention_id)
aggregated_issue_mentions.append(
IssueMention(
mention=mentioned_user,
issue=issue,
project=project,
workspace=project.workspace
)
)
IssueMention.objects.bulk_create(
aggregated_issue_mentions, batch_size=100)
IssueMention.objects.filter(
issue=issue, mention__in=removed_mention).delete()
# Bulk create notifications
Notification.objects.bulk_create(bulk_notifications, batch_size=100)

View File

@ -11,7 +11,7 @@ from slack_sdk import WebClient
from slack_sdk.errors import SlackApiError
# Module imports
from plane.db.models import Workspace, User, WorkspaceMemberInvite
from plane.db.models import Workspace, WorkspaceMemberInvite
@shared_task

View File

@ -29,4 +29,4 @@ app.conf.beat_schedule = {
# Load task modules from all registered Django app configs.
app.autodiscover_tasks()
app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler'
app.conf.beat_scheduler = 'django_celery_beat.schedulers.DatabaseScheduler'

View File

@ -0,0 +1,41 @@
# Generated by Django 4.2.5 on 2023-10-18 12:04
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
import plane.db.models.issue
import uuid
class Migration(migrations.Migration):
dependencies = [
('db', '0045_issueactivity_epoch_workspacemember_issue_props_and_more'),
]
operations = [
migrations.CreateModel(
name="issue_mentions",
fields=[
('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')),
('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')),
('id', models.UUIDField(db_index=True, default=uuid.uuid4,editable=False, primary_key=True, serialize=False, unique=True)),
('mention', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to=settings.AUTH_USER_MODEL)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL,related_name='issuemention_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='issue_mention', to='db.issue')),
('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_issuemention', to='db.project')),
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='issuemention_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_issuemention', to='db.workspace')),
],
options={
'verbose_name': 'IssueMention',
'verbose_name_plural': 'IssueMentions',
'db_table': 'issue_mentions',
'ordering': ('-created_at',),
},
),
migrations.AlterField(
model_name='issueproperty',
name='properties',
field=models.JSONField(default=plane.db.models.issue.get_default_properties),
),
]

View File

@ -27,12 +27,12 @@ from .issue import (
IssueActivity,
IssueProperty,
IssueComment,
IssueBlocker,
IssueLabel,
IssueAssignee,
Label,
IssueBlocker,
IssueRelation,
IssueMention,
IssueLink,
IssueSequence,
IssueAttachment,
@ -78,4 +78,4 @@ from .analytic import AnalyticView
from .notification import Notification
from .exporter import ExporterHistory
from .exporter import ExporterHistory

View File

@ -53,4 +53,4 @@ class ExporterHistory(BaseModel):
def __str__(self):
"""Return name of the service"""
return f"{self.provider} <{self.workspace.name}>"
return f"{self.provider} <{self.workspace.name}>"

View File

@ -1,3 +1,3 @@
from .base import Integration, WorkspaceIntegration
from .github import GithubRepository, GithubRepositorySync, GithubIssueSync, GithubCommentSync
from .slack import SlackProjectSync
from .slack import SlackProjectSync

View File

@ -6,7 +6,6 @@ from django.db import models
# Module imports
from plane.db.models import ProjectBaseModel
from plane.db.mixins import AuditModel
class GithubRepository(ProjectBaseModel):

View File

@ -226,7 +226,26 @@ class IssueRelation(ProjectBaseModel):
ordering = ("-created_at",)
def __str__(self):
return f"{self.issue.name} {self.related_issue.name}"
return f"{self.issue.name} {self.related_issue.name}"
class IssueMention(ProjectBaseModel):
issue = models.ForeignKey(
Issue, on_delete=models.CASCADE, related_name="issue_mention"
)
mention = models.ForeignKey(
settings.AUTH_USER_MODEL,
on_delete=models.CASCADE,
related_name="issue_mention",
)
class Meta:
unique_together = ["issue", "mention"]
verbose_name = "Issue Mention"
verbose_name_plural = "Issue Mentions"
db_table = "issue_mentions"
ordering = ("-created_at",)
def __str__(self):
return f"{self.issue.name} {self.mention.email}"
class IssueAssignee(ProjectBaseModel):

View File

@ -4,9 +4,6 @@ from uuid import uuid4
# Django imports
from django.db import models
from django.conf import settings
from django.template.defaultfilters import slugify
from django.db.models.signals import post_save
from django.dispatch import receiver
from django.core.validators import MinValueValidator, MaxValueValidator
# Modeule imports

View File

@ -1,4 +1,4 @@
# Generated by Django 4.2.3 on 2023-10-30 11:26
# Generated by Django 4.2.5 on 2023-11-08 06:49
from django.conf import settings
from django.db import migrations, models
@ -26,6 +26,9 @@ class Migration(migrations.Migration):
('api_key', models.CharField(max_length=16)),
('version', models.CharField(max_length=10)),
('email', models.CharField(max_length=256)),
('last_checked_at', models.DateTimeField()),
('is_telemetry_enabled', models.BooleanField(default=True)),
('is_support_required', models.BooleanField(default=True)),
('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')),
('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')),
('user', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='instance_owner', to=settings.AUTH_USER_MODEL)),

View File

@ -1,20 +0,0 @@
# Generated by Django 4.2.3 on 2023-10-30 11:42
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('license', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='instance',
name='last_checked_at',
field=models.DateTimeField(default=datetime.datetime(2023, 10, 30, 11, 42, 52, 273809, tzinfo=datetime.timezone.utc)),
preserve_default=False,
),
]

View File

@ -1,23 +0,0 @@
# Generated by Django 4.2.3 on 2023-10-30 14:22
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('license', '0002_instance_last_checked_at'),
]
operations = [
migrations.AddField(
model_name='instance',
name='is_support_required',
field=models.BooleanField(default=True),
),
migrations.AddField(
model_name='instance',
name='is_telemetry_enabled',
field=models.BooleanField(default=True),
),
]

View File

@ -1,33 +0,0 @@
import jwt
import pytz
from django.conf import settings
from django.utils import timezone
from plane.db.models import User
class UserMiddleware(object):
def __init__(self, get_response):
self.get_response = get_response
def __call__(self, request):
try:
if request.headers.get("Authorization"):
authorization_header = request.headers.get("Authorization")
access_token = authorization_header.split(" ")[1]
decoded = jwt.decode(
access_token, settings.SECRET_KEY, algorithms=["HS256"]
)
id = decoded['user_id']
user = User.objects.get(id=id)
user.last_active = timezone.now()
user.token_updated_at = None
user.save()
timezone.activate(pytz.timezone(user.user_timezone))
except Exception as e:
print(e)
response = self.get_response(request)
return response

View File

@ -4,7 +4,6 @@ import ssl
import certifi
import dj_database_url
from urllib.parse import urlparse
import sentry_sdk
from sentry_sdk.integrations.django import DjangoIntegration

View File

@ -1 +1 @@
from .api import *
from .api import *

View File

@ -2,16 +2,13 @@
"""
# from django.contrib import admin
from django.urls import path, include, re_path
from django.views.generic import TemplateView
from django.conf import settings
# from django.conf.urls.static import static
urlpatterns = [
# path("admin/", admin.site.urls),
path("", TemplateView.as_view(template_name="index.html")),
path("api/", include("plane.api.urls")),
path("api/licenses/", include("plane.license.urls")),

View File

@ -12,19 +12,19 @@ from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Conc
from plane.db.models import Issue
def annotate_with_monthly_dimension(queryset, field_name):
def annotate_with_monthly_dimension(queryset, field_name, attribute):
# Get the year and the months
year = ExtractYear(field_name)
month = ExtractMonth(field_name)
# Concat the year and month
dimension = Concat(year, Value("-"), month, output_field=CharField())
# Annotate the dimension
return queryset.annotate(dimension=dimension)
return queryset.annotate(**{attribute: dimension})
def extract_axis(queryset, x_axis):
# Format the dimension when the axis is in date
if x_axis in ["created_at", "start_date", "target_date", "completed_at"]:
queryset = annotate_with_monthly_dimension(queryset, x_axis)
queryset = annotate_with_monthly_dimension(queryset, x_axis, "dimension")
return queryset, "dimension"
else:
return queryset.annotate(dimension=F(x_axis)), "dimension"
@ -47,7 +47,7 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None):
#
if segment in ["created_at", "start_date", "target_date", "completed_at"]:
queryset = annotate_with_monthly_dimension(queryset, segment)
queryset = annotate_with_monthly_dimension(queryset, segment, "segmented")
segment = "segmented"
queryset = queryset.values(x_axis)
@ -81,7 +81,6 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
# Total Issues in Cycle or Module
total_issues = queryset.total_issues
if cycle_id:
# Get all dates between the two dates
date_range = [
@ -103,7 +102,7 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
.values("date", "total_completed")
.order_by("date")
)
if module_id:
# Get all dates between the two dates
date_range = [
@ -126,18 +125,15 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
.order_by("date")
)
for date in date_range:
cumulative_pending_issues = total_issues
total_completed = 0
total_completed = sum(
[
item["total_completed"]
for item in completed_issues_distribution
if item["date"] is not None and item["date"] <= date
]
item["total_completed"]
for item in completed_issues_distribution
if item["date"] is not None and item["date"] <= date
)
cumulative_pending_issues -= total_completed
chart_data[str(date)] = cumulative_pending_issues
return chart_data
return chart_data

View File

@ -127,7 +127,7 @@ def group_results(results_data, group_by, sub_group_by=False):
return main_responsive_dict
else:
response_dict = dict()
response_dict = {}
if group_by == "priority":
response_dict = {

View File

@ -17,4 +17,4 @@ def import_submodules(context, root_module, path):
for k, v in six.iteritems(vars(module)):
if not k.startswith('_'):
context[k] = v
context[module_name] = module
context[module_name] = module

View File

@ -0,0 +1,20 @@
import os
import requests
def slack_oauth(code):
SLACK_OAUTH_URL = os.environ.get("SLACK_OAUTH_URL", False)
SLACK_CLIENT_ID = os.environ.get("SLACK_CLIENT_ID", False)
SLACK_CLIENT_SECRET = os.environ.get("SLACK_CLIENT_SECRET", False)
# Oauth Slack
if SLACK_OAUTH_URL and SLACK_CLIENT_ID and SLACK_CLIENT_SECRET:
response = requests.get(
SLACK_OAUTH_URL,
params={
"code": code,
"client_id": SLACK_CLIENT_ID,
"client_secret": SLACK_CLIENT_SECRET,
},
)
return response.json()
return {}

View File

@ -4,4 +4,4 @@ def get_client_ip(request):
ip = x_forwarded_for.split(',')[0]
else:
ip = request.META.get('REMOTE_ADDR')
return ip
return ip

View File

@ -150,6 +150,17 @@ def filter_assignees(params, filter, method):
filter["assignees__in"] = params.get("assignees")
return filter
def filter_mentions(params, filter, method):
if method == "GET":
mentions = [item for item in params.get("mentions").split(",") if item != 'null']
mentions = filter_valid_uuids(mentions)
if len(mentions) and "" not in mentions:
filter["issue_mention__mention__id__in"] = mentions
else:
if params.get("mentions", None) and len(params.get("mentions")) and params.get("mentions") != 'null':
filter["issue_mention__mention__id__in"] = params.get("mentions")
return filter
def filter_created_by(params, filter, method):
if method == "GET":
@ -198,7 +209,7 @@ def filter_start_date(params, filter, method):
date_filter(filter=filter, date_term="start_date", queries=start_dates)
else:
if params.get("start_date", None) and len(params.get("start_date")):
date_filter(filter=filter, date_term="start_date", queries=params.get("start_date", []))
filter["start_date"] = params.get("start_date")
return filter
@ -209,7 +220,7 @@ def filter_target_date(params, filter, method):
date_filter(filter=filter, date_term="target_date", queries=target_dates)
else:
if params.get("target_date", None) and len(params.get("target_date")):
date_filter(filter=filter, date_term="target_date", queries=params.get("target_date", []))
filter["target_date"] = params.get("target_date")
return filter
@ -316,7 +327,7 @@ def filter_start_target_date_issues(params, filter, method):
def issue_filters(query_params, method):
filter = dict()
filter = {}
ISSUE_FILTER = {
"state": filter_state,
@ -326,6 +337,7 @@ def issue_filters(query_params, method):
"parent": filter_parent,
"labels": filter_labels,
"assignees": filter_assignees,
"mentions": filter_mentions,
"created_by": filter_created_by,
"name": filter_name,
"created_at": filter_created_at,

View File

@ -1,3 +1,3 @@
import mistune
markdown = mistune.Markdown()
markdown = mistune.Markdown()

View File

@ -21,12 +21,7 @@ class Cursor:
)
def __repr__(self):
return "<{}: value={} offset={} is_prev={}>".format(
type(self).__name__,
self.value,
self.offset,
int(self.is_prev),
)
return f"{type(self).__name__,}: value={self.value} offset={self.offset}, is_prev={int(self.is_prev)}"
def __bool__(self):
return bool(self.has_results)
@ -176,10 +171,6 @@ class BasePaginator:
**paginator_kwargs,
):
"""Paginate the request"""
assert (paginator and not paginator_kwargs) or (
paginator_cls and paginator_kwargs
)
per_page = self.get_per_page(request, default_per_page, max_per_page)
# Convert the cursor value to integer and float from string

View File

@ -33,4 +33,5 @@ django_celery_beat==2.5.0
psycopg-binary==3.1.10
psycopg-c==3.1.10
scout-apm==2.26.1
openpyxl==3.1.2
openpyxl==3.1.2
beautifulsoup4==4.12.2

View File

@ -10,6 +10,8 @@ x-app-env : &app-env
- SENTRY_DSN=${SENTRY_DSN:-""}
- GITHUB_CLIENT_SECRET=${GITHUB_CLIENT_SECRET:-""}
- DOCKERIZED=${DOCKERIZED:-1}
# Gunicorn Workers
- GUNICORN_WORKERS=${GUNICORN_WORKERS:-2}
#DB SETTINGS
- PGHOST=${PGHOST:-plane-db}
- PGDATABASE=${PGDATABASE:-plane}

View File

@ -59,3 +59,5 @@ MINIO_ROOT_PASSWORD="secret-key"
BUCKET_NAME=uploads
FILE_SIZE_LIMIT=5242880
# Gunicorn Workers
GUNICORN_WORKERS=2

View File

@ -27,7 +27,7 @@
"prettier": "latest",
"prettier-plugin-tailwindcss": "^0.5.4",
"tailwindcss": "^3.3.3",
"turbo": "^1.10.14"
"turbo": "^1.10.16"
},
"resolutions": {
"@types/react": "18.2.0"

View File

@ -2,6 +2,7 @@
"name": "@plane/editor-core",
"version": "0.0.1",
"description": "Core Editor that powers Plane",
"private": true,
"main": "./dist/index.mjs",
"module": "./dist/index.mjs",
"types": "./dist/index.d.mts",
@ -21,18 +22,18 @@
"check-types": "tsc --noEmit"
},
"peerDependencies": {
"react": "^18.2.0",
"react-dom": "18.2.0",
"next": "12.3.2",
"next-themes": "^0.2.1"
"next-themes": "^0.2.1",
"react": "^18.2.0",
"react-dom": "18.2.0"
},
"dependencies": {
"react-moveable" : "^0.54.2",
"@blueprintjs/popover2": "^2.0.10",
"@tiptap/core": "^2.1.7",
"@tiptap/extension-color": "^2.1.11",
"@tiptap/extension-image": "^2.1.7",
"@tiptap/extension-link": "^2.1.7",
"@tiptap/extension-mention": "^2.1.12",
"@tiptap/extension-table": "^2.1.6",
"@tiptap/extension-table-cell": "^2.1.6",
"@tiptap/extension-table-header": "^2.1.6",
@ -41,12 +42,15 @@
"@tiptap/extension-task-list": "^2.1.7",
"@tiptap/extension-text-style": "^2.1.11",
"@tiptap/extension-underline": "^2.1.7",
"@tiptap/prosemirror-tables": "^1.1.4",
"jsx-dom-cjs": "^8.0.3",
"@tiptap/pm": "^2.1.7",
"@tiptap/react": "^2.1.7",
"@tiptap/starter-kit": "^2.1.10",
"@tiptap/suggestion": "^2.0.4",
"@types/node": "18.15.3",
"@types/react": "^18.2.5",
"@types/react-dom": "18.0.11",
"@types/node": "18.15.3",
"class-variance-authority": "^0.7.0",
"clsx": "^1.2.1",
"eslint": "8.36.0",
@ -54,6 +58,7 @@
"eventsource-parser": "^0.1.0",
"lucide-react": "^0.244.0",
"react-markdown": "^8.0.7",
"react-moveable": "^0.54.2",
"tailwind-merge": "^1.14.0",
"tippy.js": "^6.3.7",
"tiptap-markdown": "^0.8.2",

View File

@ -2,8 +2,11 @@
// import "./styles/tailwind.css";
// import "./styles/editor.css";
export { isCellSelection } from "./ui/extensions/table/table/utilities/is-cell-selection";
// utils
export * from "./lib/utils";
export * from "./ui/extensions/table/table";
export { startImageUpload } from "./ui/plugins/upload-image";
// components

View File

@ -0,0 +1,10 @@
export type IMentionSuggestion = {
id: string;
type: string;
avatar: string;
title: string;
subtitle: string;
redirect_uri: string;
}
export type IMentionHighlight = string

View File

@ -7,7 +7,11 @@ interface EditorContainerProps {
children: ReactNode;
}
export const EditorContainer = ({ editor, editorClassNames, children }: EditorContainerProps) => (
export const EditorContainer = ({
editor,
editorClassNames,
children,
}: EditorContainerProps) => (
<div
id="editor-container"
onClick={() => {

View File

@ -1,7 +1,6 @@
import { Editor, EditorContent } from "@tiptap/react";
import { ReactNode } from "react";
import { ImageResizer } from "../extensions/image/image-resize";
import { TableMenu } from "../menus/table-menu";
interface EditorContentProps {
editor: Editor | null;
@ -10,10 +9,8 @@ interface EditorContentProps {
}
export const EditorContentWrapper = ({ editor, editorContentCustomClassNames = '', children }: EditorContentProps) => (
<div className={`${editorContentCustomClassNames}`}>
{/* @ts-ignore */}
<div className={`contentEditor ${editorContentCustomClassNames}`}>
<EditorContent editor={editor} />
{editor?.isEditable && <TableMenu editor={editor} />}
{(editor?.isActive("image") && editor?.isEditable) && <ImageResizer editor={editor} />}
{children}
</div>

View File

@ -23,8 +23,8 @@ export const ImageResizer = ({ editor }: { editor: Editor }) => {
origin={false}
edge={false}
throttleDrag={0}
keepRatio={true}
resizable={true}
keepRatio
resizable
throttleResize={0}
onResize={({ target, width, height, delta }: any) => {
delta[0] && (target!.style.width = `${width}px`);
@ -33,7 +33,7 @@ export const ImageResizer = ({ editor }: { editor: Editor }) => {
onResizeEnd={() => {
updateMediaSize();
}}
scalable={true}
scalable
renderDirections={["w", "e"]}
onScale={({ target, transform }: any) => {
target!.style.transform = transform;

View File

@ -8,18 +8,21 @@ import TaskList from "@tiptap/extension-task-list";
import { Markdown } from "tiptap-markdown";
import Gapcursor from "@tiptap/extension-gapcursor";
import { CustomTableCell } from "./table/table-cell";
import { Table } from "./table";
import { TableHeader } from "./table/table-header";
import { TableRow } from "@tiptap/extension-table-row";
import TableHeader from "./table/table-header/table-header";
import Table from "./table/table";
import TableCell from "./table/table-cell/table-cell";
import TableRow from "./table/table-row/table-row";
import ImageExtension from "./image";
import { DeleteImage } from "../../types/delete-image";
import { isValidHttpUrl } from "../../lib/utils";
import { IMentionSuggestion } from "../../types/mention-suggestion";
import { Mentions } from "../mentions";
export const CoreEditorExtensions = (
mentionConfig: { mentionSuggestions: IMentionSuggestion[], mentionHighlights: string[] },
deleteFile: DeleteImage,
) => [
StarterKit.configure({
@ -92,6 +95,7 @@ export const CoreEditorExtensions = (
}),
Table,
TableHeader,
CustomTableCell,
TableCell,
TableRow,
Mentions(mentionConfig.mentionSuggestions, mentionConfig.mentionHighlights, false),
];

View File

@ -1,9 +0,0 @@
import { Table as BaseTable } from "@tiptap/extension-table";
const Table = BaseTable.configure({
resizable: true,
cellMinWidth: 100,
allowTableNodeSelection: true,
});
export { Table };

View File

@ -1,32 +0,0 @@
import { TableCell } from "@tiptap/extension-table-cell";
export const CustomTableCell = TableCell.extend({
addAttributes() {
return {
...this.parent?.(),
isHeader: {
default: false,
parseHTML: (element) => {
isHeader: element.tagName === "TD";
},
renderHTML: (attributes) => {
tag: attributes.isHeader ? "th" : "td";
},
},
};
},
renderHTML({ HTMLAttributes }) {
if (HTMLAttributes.isHeader) {
return [
"th",
{
...HTMLAttributes,
class: `relative ${HTMLAttributes.class}`,
},
["span", { class: "absolute top-0 right-0" }],
0,
];
}
return ["td", HTMLAttributes, 0];
},
});

View File

@ -0,0 +1 @@
export { default as default } from "./table-cell"

View File

@ -0,0 +1,58 @@
import { mergeAttributes, Node } from "@tiptap/core"
export interface TableCellOptions {
HTMLAttributes: Record<string, any>
}
export default Node.create<TableCellOptions>({
name: "tableCell",
addOptions() {
return {
HTMLAttributes: {}
}
},
content: "paragraph+",
addAttributes() {
return {
colspan: {
default: 1
},
rowspan: {
default: 1
},
colwidth: {
default: null,
parseHTML: (element) => {
const colwidth = element.getAttribute("colwidth")
const value = colwidth ? [parseInt(colwidth, 10)] : null
return value
}
},
background: {
default: "none"
}
}
},
tableRole: "cell",
isolating: true,
parseHTML() {
return [{ tag: "td" }]
},
renderHTML({ node, HTMLAttributes }) {
return [
"td",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, {
style: `background-color: ${node.attrs.background}`
}),
0
]
}
})

View File

@ -1,7 +0,0 @@
import { TableHeader as BaseTableHeader } from "@tiptap/extension-table-header";
const TableHeader = BaseTableHeader.extend({
content: "paragraph",
});
export { TableHeader };

View File

@ -0,0 +1 @@
export { default as default } from "./table-header"

View File

@ -0,0 +1,57 @@
import { mergeAttributes, Node } from "@tiptap/core"
export interface TableHeaderOptions {
HTMLAttributes: Record<string, any>
}
export default Node.create<TableHeaderOptions>({
name: "tableHeader",
addOptions() {
return {
HTMLAttributes: {}
}
},
content: "paragraph+",
addAttributes() {
return {
colspan: {
default: 1
},
rowspan: {
default: 1
},
colwidth: {
default: null,
parseHTML: (element) => {
const colwidth = element.getAttribute("colwidth")
const value = colwidth ? [parseInt(colwidth, 10)] : null
return value
}
},
background: {
default: "rgb(var(--color-primary-100))"
}
}
},
tableRole: "header_cell",
isolating: true,
parseHTML() {
return [{ tag: "th" }]
},
renderHTML({ node, HTMLAttributes }) {
return [
"th",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, {
style: `background-color: ${node.attrs.background}`
}),
0
]
}
})

View File

@ -0,0 +1 @@
export { default as default } from "./table-row"

View File

@ -0,0 +1,31 @@
import { mergeAttributes, Node } from "@tiptap/core"
export interface TableRowOptions {
HTMLAttributes: Record<string, any>
}
export default Node.create<TableRowOptions>({
name: "tableRow",
addOptions() {
return {
HTMLAttributes: {}
}
},
content: "(tableCell | tableHeader)*",
tableRole: "row",
parseHTML() {
return [{ tag: "tr" }]
},
renderHTML({ HTMLAttributes }) {
return [
"tr",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes),
0
]
}
})

View File

@ -0,0 +1,55 @@
const icons = {
colorPicker: `<svg xmlns="http://www.w3.org/2000/svg" width="24" height="24" viewBox="0 0 24 24" style="transform: ;msFilter:;"><path fill="rgb(var(--color-text-300))" d="M20 14c-.092.064-2 2.083-2 3.5 0 1.494.949 2.448 2 2.5.906.044 2-.891 2-2.5 0-1.5-1.908-3.436-2-3.5zM9.586 20c.378.378.88.586 1.414.586s1.036-.208 1.414-.586l7-7-.707-.707L11 4.586 8.707 2.293 7.293 3.707 9.586 6 4 11.586c-.378.378-.586.88-.586 1.414s.208 1.036.586 1.414L9.586 20zM11 7.414 16.586 13H5.414L11 7.414z"></path></svg>`,
deleteColumn: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="24" height="24"><path fill="#e53e3e" d="M0 0H24V24H0z"/><path d="M12 3c.552 0 1 .448 1 1v8c.835-.628 1.874-1 3-1 2.761 0 5 2.239 5 5s-2.239 5-5 5c-1.032 0-1.99-.313-2.787-.848L13 20c0 .552-.448 1-1 1H6c-.552 0-1-.448-1-1V4c0-.552.448-1 1-1h6zm-1 2H7v14h4V5zm8 10h-6v2h6v-2z"/></svg>`,
deleteRow: `<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 24 24" width="24" height="24"><path fill="#e53e3e" d="M0 0H24V24H0z"/><path d="M20 5c.552 0 1 .448 1 1v6c0 .552-.448 1-1 1 .628.835 1 1.874 1 3 0 2.761-2.239 5-5 5s-5-2.239-5-5c0-1.126.372-2.165 1-3H4c-.552 0-1-.448-1-1V6c0-.552.448-1 1-1h16zm-7 10v2h6v-2h-6zm6-8H5v4h14V7z"/></svg>`,
insertLeftTableIcon: `<svg
xmlns="http://www.w3.org/2000/svg"
width={24}
height={24}
viewBox="0 -960 960 960"
>
<path
d="M224.617-140.001q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21H360q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H224.617Zm375.383 0q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21h135.383q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H600Zm147.691-607.69q0-4.616-3.846-8.463-3.846-3.846-8.462-3.846H600q-4.616 0-8.462 3.846-3.847 3.847-3.847 8.463v535.382q0 4.616 3.847 8.463Q595.384-200 600-200h135.383q4.616 0 8.462-3.846 3.846-3.847 3.846-8.463v-535.382ZM587.691-200h160-160Z"
fill="rgb(var(--color-text-300))"
/>
</svg>
`,
insertRightTableIcon: `<svg
xmlns="http://www.w3.org/2000/svg"
width={24}
height={24}
viewBox="0 -960 960 960"
>
<path
d="M600-140.001q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21h135.383q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H600Zm-375.383 0q-30.307 0-51.307-21-21-21-21-51.308v-535.382q0-30.308 21-51.308t51.307-21H360q30.307 0 51.307 21 21 21 21 51.308v535.382q0 30.308-21 51.308t-51.307 21H224.617Zm-12.308-607.69v535.382q0 4.616 3.846 8.463 3.846 3.846 8.462 3.846H360q4.616 0 8.462-3.846 3.847-3.847 3.847-8.463v-535.382q0-4.616-3.847-8.463Q364.616-760 360-760H224.617q-4.616 0-8.462 3.846-3.846 3.847-3.846 8.463Zm160 547.691h-160 160Z"
fill="rgb(var(--color-text-300))"
/>
</svg>
`,
insertTopTableIcon: `<svg
xmlns="http://www.w3.org/2000/svg"
width={24}
height={24}
viewBox="0 -960 960 960"
>
<path
d="M212.309-527.693q-30.308 0-51.308-21t-21-51.307v-135.383q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307V-600q0 30.307-21 51.307-21 21-51.308 21H212.309Zm0 375.383q-30.308 0-51.308-21t-21-51.307V-360q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307v135.383q0 30.307-21 51.307-21 21-51.308 21H212.309Zm0-59.999h535.382q4.616 0 8.463-3.846 3.846-3.846 3.846-8.462V-360q0-4.616-3.846-8.462-3.847-3.847-8.463-3.847H212.309q-4.616 0-8.463 3.847Q200-364.616 200-360v135.383q0 4.616 3.846 8.462 3.847 3.846 8.463 3.846Zm-12.309-160v160-160Z"
fill="rgb(var(--color-text-300))"
/>
</svg>
`,
insertBottomTableIcon:`<svg
xmlns="http://www.w3.org/2000/svg"
width={24}
height={24}
viewBox="0 -960 960 960"
>
<path
d="M212.309-152.31q-30.308 0-51.308-21t-21-51.307V-360q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307v135.383q0 30.307-21 51.307-21 21-51.308 21H212.309Zm0-375.383q-30.308 0-51.308-21t-21-51.307v-135.383q0-30.307 21-51.307 21-21 51.308-21h535.382q30.308 0 51.308 21t21 51.307V-600q0 30.307-21 51.307-21 21-51.308 21H212.309Zm535.382-219.998H212.309q-4.616 0-8.463 3.846-3.846 3.846-3.846 8.462V-600q0 4.616 3.846 8.462 3.847 3.847 8.463 3.847h535.382q4.616 0 8.463-3.847Q760-595.384 760-600v-135.383q0-4.616-3.846-8.462-3.847-3.846-8.463-3.846ZM200-587.691v-160 160Z"
fill="rgb(var(--color-text-300))"
/>
</svg>
`,
};
export default icons;

View File

@ -0,0 +1 @@
export { default as default } from "./table"

View File

@ -0,0 +1,117 @@
import { Plugin, PluginKey, TextSelection } from "@tiptap/pm/state";
import { findParentNode } from "@tiptap/core";
import { DecorationSet, Decoration } from "@tiptap/pm/view";
const key = new PluginKey("tableControls");
export function tableControls() {
return new Plugin({
key,
state: {
init() {
return new TableControlsState();
},
apply(tr, prev) {
return prev.apply(tr);
},
},
props: {
handleDOMEvents: {
mousemove: (view, event) => {
const pluginState = key.getState(view.state);
if (
!(event.target as HTMLElement).closest(".tableWrapper") &&
pluginState.values.hoveredTable
) {
return view.dispatch(
view.state.tr.setMeta(key, {
setHoveredTable: null,
setHoveredCell: null,
}),
);
}
const pos = view.posAtCoords({
left: event.clientX,
top: event.clientY,
});
if (!pos) return;
const table = findParentNode((node) => node.type.name === "table")(
TextSelection.create(view.state.doc, pos.pos),
);
const cell = findParentNode(
(node) =>
node.type.name === "tableCell" ||
node.type.name === "tableHeader",
)(TextSelection.create(view.state.doc, pos.pos));
if (!table || !cell) return;
if (pluginState.values.hoveredCell?.pos !== cell.pos) {
return view.dispatch(
view.state.tr.setMeta(key, {
setHoveredTable: table,
setHoveredCell: cell,
}),
);
}
},
},
decorations: (state) => {
const pluginState = key.getState(state);
if (!pluginState) {
return null;
}
const { hoveredTable, hoveredCell } = pluginState.values;
const docSize = state.doc.content.size;
if (hoveredTable && hoveredCell && hoveredTable.pos < docSize && hoveredCell.pos < docSize) {
const decorations = [
Decoration.node(
hoveredTable.pos,
hoveredTable.pos + hoveredTable.node.nodeSize,
{},
{
hoveredTable,
hoveredCell,
},
),
];
return DecorationSet.create(state.doc, decorations);
}
return null;
},
},
});
}
class TableControlsState {
values;
constructor(props = {}) {
this.values = {
hoveredTable: null,
hoveredCell: null,
...props,
};
}
apply(tr: any) {
const actions = tr.getMeta(key);
if (actions?.setHoveredTable !== undefined) {
this.values.hoveredTable = actions.setHoveredTable;
}
if (actions?.setHoveredCell !== undefined) {
this.values.hoveredCell = actions.setHoveredCell;
}
return this;
}
}

View File

@ -0,0 +1,530 @@
import { h } from "jsx-dom-cjs";
import { Node as ProseMirrorNode } from "@tiptap/pm/model";
import { Decoration, NodeView } from "@tiptap/pm/view";
import tippy, { Instance, Props } from "tippy.js";
import { Editor } from "@tiptap/core";
import {
CellSelection,
TableMap,
updateColumnsOnResize,
} from "@tiptap/prosemirror-tables";
import icons from "./icons";
export function updateColumns(
node: ProseMirrorNode,
colgroup: HTMLElement,
table: HTMLElement,
cellMinWidth: number,
overrideCol?: number,
overrideValue?: any,
) {
let totalWidth = 0;
let fixedWidth = true;
let nextDOM = colgroup.firstChild as HTMLElement;
const row = node.firstChild;
if (!row) return;
for (let i = 0, col = 0; i < row.childCount; i += 1) {
const { colspan, colwidth } = row.child(i).attrs;
for (let j = 0; j < colspan; j += 1, col += 1) {
const hasWidth =
overrideCol === col ? overrideValue : colwidth && colwidth[j];
const cssWidth = hasWidth ? `${hasWidth}px` : "";
totalWidth += hasWidth || cellMinWidth;
if (!hasWidth) {
fixedWidth = false;
}
if (!nextDOM) {
colgroup.appendChild(document.createElement("col")).style.width =
cssWidth;
} else {
if (nextDOM.style.width !== cssWidth) {
nextDOM.style.width = cssWidth;
}
nextDOM = nextDOM.nextSibling as HTMLElement;
}
}
}
while (nextDOM) {
const after = nextDOM.nextSibling;
nextDOM.parentNode?.removeChild(nextDOM);
nextDOM = after as HTMLElement;
}
if (fixedWidth) {
table.style.width = `${totalWidth}px`;
table.style.minWidth = "";
} else {
table.style.width = "";
table.style.minWidth = `${totalWidth}px`;
}
}
const defaultTippyOptions: Partial<Props> = {
allowHTML: true,
arrow: false,
trigger: "click",
animation: "scale-subtle",
theme: "light-border no-padding",
interactive: true,
hideOnClick: true,
placement: "right",
};
function setCellsBackgroundColor(editor: Editor, backgroundColor) {
return editor
.chain()
.focus()
.updateAttributes("tableCell", {
background: backgroundColor,
})
.updateAttributes("tableHeader", {
background: backgroundColor,
})
.run();
}
const columnsToolboxItems = [
{
label: "Add Column Before",
icon: icons.insertLeftTableIcon,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().addColumnBefore().run(),
},
{
label: "Add Column After",
icon: icons.insertRightTableIcon,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().addColumnAfter().run(),
},
{
label: "Pick Column Color",
icon: icons.colorPicker,
action: ({
editor,
triggerButton,
controlsContainer,
}: {
editor: Editor;
triggerButton: HTMLElement;
controlsContainer;
}) => {
createColorPickerToolbox({
triggerButton,
tippyOptions: {
appendTo: controlsContainer,
},
onSelectColor: (color) => setCellsBackgroundColor(editor, color),
});
},
},
{
label: "Delete Column",
icon: icons.deleteColumn,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().deleteColumn().run(),
},
];
const rowsToolboxItems = [
{
label: "Add Row Above",
icon: icons.insertTopTableIcon,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().addRowBefore().run(),
},
{
label: "Add Row Below",
icon: icons.insertBottomTableIcon,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().addRowAfter().run(),
},
{
label: "Pick Row Color",
icon: icons.colorPicker,
action: ({
editor,
triggerButton,
controlsContainer,
}: {
editor: Editor;
triggerButton: HTMLButtonElement;
controlsContainer:
| Element
| "parent"
| ((ref: Element) => Element)
| undefined;
}) => {
createColorPickerToolbox({
triggerButton,
tippyOptions: {
appendTo: controlsContainer,
},
onSelectColor: (color) => setCellsBackgroundColor(editor, color),
});
},
},
{
label: "Delete Row",
icon: icons.deleteRow,
action: ({ editor }: { editor: Editor }) =>
editor.chain().focus().deleteRow().run(),
},
];
function createToolbox({
triggerButton,
items,
tippyOptions,
onClickItem,
}: {
triggerButton: HTMLElement;
items: { icon: string; label: string }[];
tippyOptions: any;
onClickItem: any;
}): Instance<Props> {
const toolbox = tippy(triggerButton, {
content: h(
"div",
{ className: "tableToolbox" },
items.map((item) =>
h(
"div",
{
className: "toolboxItem",
onClick() {
onClickItem(item);
},
},
[
h("div", {
className: "iconContainer",
innerHTML: item.icon,
}),
h("div", { className: "label" }, item.label),
],
),
),
),
...tippyOptions,
});
return Array.isArray(toolbox) ? toolbox[0] : toolbox;
}
function createColorPickerToolbox({
triggerButton,
tippyOptions,
onSelectColor = () => {},
}: {
triggerButton: HTMLElement;
tippyOptions: Partial<Props>;
onSelectColor?: (color: string) => void;
}) {
const items = {
Default: "rgb(var(--color-primary-100))",
Orange: "#FFE5D1",
Grey: "#F1F1F1",
Yellow: "#FEF3C7",
Green: "#DCFCE7",
Red: "#FFDDDD",
Blue: "#D9E4FF",
Pink: "#FFE8FA",
Purple: "#E8DAFB",
};
const colorPicker = tippy(triggerButton, {
...defaultTippyOptions,
content: h(
"div",
{ className: "tableColorPickerToolbox" },
Object.entries(items).map(([key, value]) =>
h(
"div",
{
className: "toolboxItem",
onClick: () => {
onSelectColor(value);
colorPicker.hide();
},
},
[
h("div", {
className: "colorContainer",
style: {
backgroundColor: value,
},
}),
h(
"div",
{
className: "label",
},
key,
),
],
),
),
),
onHidden: (instance) => {
instance.destroy();
},
showOnCreate: true,
...tippyOptions,
});
return colorPicker;
}
export class TableView implements NodeView {
node: ProseMirrorNode;
cellMinWidth: number;
decorations: Decoration[];
editor: Editor;
getPos: () => number;
hoveredCell;
map: TableMap;
root: HTMLElement;
table: HTMLElement;
colgroup: HTMLElement;
tbody: HTMLElement;
rowsControl?: HTMLElement;
columnsControl?: HTMLElement;
columnsToolbox?: Instance<Props>;
rowsToolbox?: Instance<Props>;
controls?: HTMLElement;
get dom() {
return this.root;
}
get contentDOM() {
return this.tbody;
}
constructor(
node: ProseMirrorNode,
cellMinWidth: number,
decorations: Decoration[],
editor: Editor,
getPos: () => number,
) {
this.node = node;
this.cellMinWidth = cellMinWidth;
this.decorations = decorations;
this.editor = editor;
this.getPos = getPos;
this.hoveredCell = null;
this.map = TableMap.get(node);
if (editor.isEditable) {
this.rowsControl = h(
"div",
{ className: "rowsControl" },
h("button", {
onClick: () => this.selectRow(),
}),
);
this.columnsControl = h(
"div",
{ className: "columnsControl" },
h("button", {
onClick: () => this.selectColumn(),
}),
);
this.controls = h(
"div",
{ className: "tableControls", contentEditable: "false" },
this.rowsControl,
this.columnsControl,
);
this.columnsToolbox = createToolbox({
triggerButton: this.columnsControl.querySelector("button"),
items: columnsToolboxItems,
tippyOptions: {
...defaultTippyOptions,
appendTo: this.controls,
},
onClickItem: (item) => {
item.action({
editor: this.editor,
triggerButton: this.columnsControl?.firstElementChild,
controlsContainer: this.controls,
});
this.columnsToolbox?.hide();
},
});
this.rowsToolbox = createToolbox({
triggerButton: this.rowsControl.firstElementChild,
items: rowsToolboxItems,
tippyOptions: {
...defaultTippyOptions,
appendTo: this.controls,
},
onClickItem: (item) => {
item.action({
editor: this.editor,
triggerButton: this.rowsControl?.firstElementChild,
controlsContainer: this.controls,
});
this.rowsToolbox?.hide();
},
});
}
// Table
this.colgroup = h(
"colgroup",
null,
Array.from({ length: this.map.width }, () => 1).map(() => h("col")),
);
this.tbody = h("tbody");
this.table = h("table", null, this.colgroup, this.tbody);
this.root = h(
"div",
{
className: "tableWrapper controls--disabled",
},
this.controls,
this.table,
);
this.render();
}
update(node: ProseMirrorNode, decorations) {
if (node.type !== this.node.type) {
return false;
}
this.node = node;
this.decorations = decorations;
this.map = TableMap.get(this.node);
if (this.editor.isEditable) {
this.updateControls();
}
this.render();
return true;
}
render() {
if (this.colgroup.children.length !== this.map.width) {
const cols = Array.from({ length: this.map.width }, () => 1).map(() =>
h("col"),
);
this.colgroup.replaceChildren(...cols);
}
updateColumnsOnResize(
this.node,
this.colgroup,
this.table,
this.cellMinWidth,
);
}
ignoreMutation() {
return true;
}
updateControls() {
const { hoveredTable: table, hoveredCell: cell } = Object.values(
this.decorations,
).reduce(
(acc, curr) => {
if (curr.spec.hoveredCell !== undefined) {
acc["hoveredCell"] = curr.spec.hoveredCell;
}
if (curr.spec.hoveredTable !== undefined) {
acc["hoveredTable"] = curr.spec.hoveredTable;
}
return acc;
},
{} as Record<string, HTMLElement>,
) as any;
if (table === undefined || cell === undefined) {
return this.root.classList.add("controls--disabled");
}
this.root.classList.remove("controls--disabled");
this.hoveredCell = cell;
const cellDom = this.editor.view.nodeDOM(cell.pos) as HTMLElement;
const tableRect = this.table.getBoundingClientRect();
const cellRect = cellDom.getBoundingClientRect();
this.columnsControl.style.left = `${
cellRect.left - tableRect.left - this.table.parentElement!.scrollLeft
}px`;
this.columnsControl.style.width = `${cellRect.width}px`;
this.rowsControl.style.top = `${cellRect.top - tableRect.top}px`;
this.rowsControl.style.height = `${cellRect.height}px`;
}
selectColumn() {
if (!this.hoveredCell) return;
const colIndex = this.map.colCount(
this.hoveredCell.pos - (this.getPos() + 1),
);
const anchorCellPos = this.hoveredCell.pos;
const headCellPos =
this.map.map[colIndex + this.map.width * (this.map.height - 1)] +
(this.getPos() + 1);
const cellSelection = CellSelection.create(
this.editor.view.state.doc,
anchorCellPos,
headCellPos,
);
this.editor.view.dispatch(
// @ts-ignore
this.editor.state.tr.setSelection(cellSelection),
);
}
selectRow() {
if (!this.hoveredCell) return;
const anchorCellPos = this.hoveredCell.pos;
const anchorCellIndex = this.map.map.indexOf(
anchorCellPos - (this.getPos() + 1),
);
const headCellPos =
this.map.map[anchorCellIndex + (this.map.width - 1)] +
(this.getPos() + 1);
const cellSelection = CellSelection.create(
this.editor.state.doc,
anchorCellPos,
headCellPos,
);
this.editor.view.dispatch(
// @ts-ignore
this.editor.view.state.tr.setSelection(cellSelection),
);
}
}

View File

@ -0,0 +1,298 @@
import { TextSelection } from "@tiptap/pm/state"
import { callOrReturn, getExtensionField, mergeAttributes, Node, ParentConfig } from "@tiptap/core"
import {
addColumnAfter,
addColumnBefore,
addRowAfter,
addRowBefore,
CellSelection,
columnResizing,
deleteColumn,
deleteRow,
deleteTable,
fixTables,
goToNextCell,
mergeCells,
setCellAttr,
splitCell,
tableEditing,
toggleHeader,
toggleHeaderCell
} from "@tiptap/prosemirror-tables"
import { tableControls } from "./table-controls"
import { TableView } from "./table-view"
import { createTable } from "./utilities/create-table"
import { deleteTableWhenAllCellsSelected } from "./utilities/delete-table-when-all-cells-selected"
export interface TableOptions {
HTMLAttributes: Record<string, any>
resizable: boolean
handleWidth: number
cellMinWidth: number
lastColumnResizable: boolean
allowTableNodeSelection: boolean
}
declare module "@tiptap/core" {
interface Commands<ReturnType> {
table: {
insertTable: (options?: {
rows?: number
cols?: number
withHeaderRow?: boolean
}) => ReturnType
addColumnBefore: () => ReturnType
addColumnAfter: () => ReturnType
deleteColumn: () => ReturnType
addRowBefore: () => ReturnType
addRowAfter: () => ReturnType
deleteRow: () => ReturnType
deleteTable: () => ReturnType
mergeCells: () => ReturnType
splitCell: () => ReturnType
toggleHeaderColumn: () => ReturnType
toggleHeaderRow: () => ReturnType
toggleHeaderCell: () => ReturnType
mergeOrSplit: () => ReturnType
setCellAttribute: (name: string, value: any) => ReturnType
goToNextCell: () => ReturnType
goToPreviousCell: () => ReturnType
fixTables: () => ReturnType
setCellSelection: (position: {
anchorCell: number
headCell?: number
}) => ReturnType
}
}
interface NodeConfig<Options, Storage> {
tableRole?:
| string
| ((this: {
name: string
options: Options
storage: Storage
parent: ParentConfig<NodeConfig<Options>>["tableRole"]
}) => string)
}
}
export default Node.create({
name: "table",
addOptions() {
return {
HTMLAttributes: {},
resizable: true,
handleWidth: 5,
cellMinWidth: 100,
lastColumnResizable: true,
allowTableNodeSelection: true
}
},
content: "tableRow+",
tableRole: "table",
isolating: true,
group: "block",
allowGapCursor: false,
parseHTML() {
return [{ tag: "table" }]
},
renderHTML({ HTMLAttributes }) {
return [
"table",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes),
["tbody", 0]
]
},
addCommands() {
return {
insertTable:
({ rows = 3, cols = 3, withHeaderRow = true} = {}) =>
({ tr, dispatch, editor }) => {
const node = createTable(
editor.schema,
rows,
cols,
withHeaderRow
)
if (dispatch) {
const offset = tr.selection.anchor + 1
tr.replaceSelectionWith(node)
.scrollIntoView()
.setSelection(
TextSelection.near(tr.doc.resolve(offset))
)
}
return true
},
addColumnBefore:
() =>
({ state, dispatch }) => addColumnBefore(state, dispatch),
addColumnAfter:
() =>
({ state, dispatch }) => addColumnAfter(state, dispatch),
deleteColumn:
() =>
({ state, dispatch }) => deleteColumn(state, dispatch),
addRowBefore:
() =>
({ state, dispatch }) => addRowBefore(state, dispatch),
addRowAfter:
() =>
({ state, dispatch }) => addRowAfter(state, dispatch),
deleteRow:
() =>
({ state, dispatch }) => deleteRow(state, dispatch),
deleteTable:
() =>
({ state, dispatch }) => deleteTable(state, dispatch),
mergeCells:
() =>
({ state, dispatch }) => mergeCells(state, dispatch),
splitCell:
() =>
({ state, dispatch }) => splitCell(state, dispatch),
toggleHeaderColumn:
() =>
({ state, dispatch }) => toggleHeader("column")(state, dispatch),
toggleHeaderRow:
() =>
({ state, dispatch }) => toggleHeader("row")(state, dispatch),
toggleHeaderCell:
() =>
({ state, dispatch }) => toggleHeaderCell(state, dispatch),
mergeOrSplit:
() =>
({ state, dispatch }) => {
if (mergeCells(state, dispatch)) {
return true
}
return splitCell(state, dispatch)
},
setCellAttribute:
(name, value) =>
({ state, dispatch }) => setCellAttr(name, value)(state, dispatch),
goToNextCell:
() =>
({ state, dispatch }) => goToNextCell(1)(state, dispatch),
goToPreviousCell:
() =>
({ state, dispatch }) => goToNextCell(-1)(state, dispatch),
fixTables:
() =>
({ state, dispatch }) => {
if (dispatch) {
fixTables(state)
}
return true
},
setCellSelection:
(position) =>
({ tr, dispatch }) => {
if (dispatch) {
const selection = CellSelection.create(
tr.doc,
position.anchorCell,
position.headCell
)
// @ts-ignore
tr.setSelection(selection)
}
return true
}
}
},
addKeyboardShortcuts() {
return {
Tab: () => {
if (this.editor.commands.goToNextCell()) {
return true
}
if (!this.editor.can().addRowAfter()) {
return false
}
return this.editor.chain().addRowAfter().goToNextCell().run()
},
"Shift-Tab": () => this.editor.commands.goToPreviousCell(),
Backspace: deleteTableWhenAllCellsSelected,
"Mod-Backspace": deleteTableWhenAllCellsSelected,
Delete: deleteTableWhenAllCellsSelected,
"Mod-Delete": deleteTableWhenAllCellsSelected
}
},
addNodeView() {
return ({ editor, getPos, node, decorations }) => {
const { cellMinWidth } = this.options
return new TableView(
node,
cellMinWidth,
decorations,
editor,
getPos as () => number
)
}
},
addProseMirrorPlugins() {
const isResizable = this.options.resizable && this.editor.isEditable
const plugins = [
tableEditing({
allowTableNodeSelection: this.options.allowTableNodeSelection
}),
tableControls()
]
if (isResizable) {
plugins.unshift(
columnResizing({
handleWidth: this.options.handleWidth,
cellMinWidth: this.options.cellMinWidth,
// View: TableView,
// @ts-ignore
lastColumnResizable: this.options.lastColumnResizable
})
)
}
return plugins
},
extendNodeSchema(extension) {
const context = {
name: extension.name,
options: extension.options,
storage: extension.storage
}
return {
tableRole: callOrReturn(
getExtensionField(extension, "tableRole", context)
)
}
}
})

View File

@ -0,0 +1,12 @@
import { Fragment, Node as ProsemirrorNode, NodeType } from "prosemirror-model"
export function createCell(
cellType: NodeType,
cellContent?: Fragment | ProsemirrorNode | Array<ProsemirrorNode>
): ProsemirrorNode | null | undefined {
if (cellContent) {
return cellType.createChecked(null, cellContent)
}
return cellType.createAndFill()
}

View File

@ -0,0 +1,45 @@
import { Fragment, Node as ProsemirrorNode, Schema } from "@tiptap/pm/model"
import { createCell } from "./create-cell"
import { getTableNodeTypes } from "./get-table-node-types"
export function createTable(
schema: Schema,
rowsCount: number,
colsCount: number,
withHeaderRow: boolean,
cellContent?: Fragment | ProsemirrorNode | Array<ProsemirrorNode>
): ProsemirrorNode {
const types = getTableNodeTypes(schema)
const headerCells: ProsemirrorNode[] = []
const cells: ProsemirrorNode[] = []
for (let index = 0; index < colsCount; index += 1) {
const cell = createCell(types.cell, cellContent)
if (cell) {
cells.push(cell)
}
if (withHeaderRow) {
const headerCell = createCell(types.header_cell, cellContent)
if (headerCell) {
headerCells.push(headerCell)
}
}
}
const rows: ProsemirrorNode[] = []
for (let index = 0; index < rowsCount; index += 1) {
rows.push(
types.row.createChecked(
null,
withHeaderRow && index === 0 ? headerCells : cells
)
)
}
return types.table.createChecked(null, rows)
}

Some files were not shown because too many files have changed in this diff Show More