Merge pull request #4238 from makeplane/preview

release: v0.18-dev
This commit is contained in:
sriram veeraghanta 2024-04-19 11:56:03 +05:30 committed by GitHub
commit f71e8a3a0f
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
587 changed files with 18022 additions and 10686 deletions

View File

@ -55,12 +55,19 @@ body:
- Safari - Safari
- Other - Other
- type: dropdown - type: dropdown
id: version id: variant
attributes: attributes:
label: Version label: Variant
options: options:
- Cloud - Cloud
- Self-hosted - Self-hosted
- Local - Local
validations:
required: true
- type: input
id: version
attributes:
label: Version
placeholder: v0.17.0-dev
validations: validations:
required: true required: true

View File

@ -3,7 +3,7 @@ name: "CodeQL"
on: on:
workflow_dispatch: workflow_dispatch:
push: push:
branches: ["develop", "preview", "master"] branches: ["preview", "master"]
pull_request: pull_request:
branches: ["develop", "preview", "master"] branches: ["develop", "preview", "master"]
schedule: schedule:

View File

@ -27,7 +27,7 @@ RUN yarn install
COPY --from=builder /app/out/full/ . COPY --from=builder /app/out/full/ .
COPY turbo.json turbo.json COPY turbo.json turbo.json
COPY replace-env-vars.sh /usr/local/bin/ COPY replace-env-vars.sh /usr/local/bin/
USER root
RUN chmod +x /usr/local/bin/replace-env-vars.sh RUN chmod +x /usr/local/bin/replace-env-vars.sh
RUN yarn turbo run build RUN yarn turbo run build
@ -89,21 +89,17 @@ RUN chmod -R 777 /code
WORKDIR /app WORKDIR /app
# Don't run production as root
RUN addgroup --system --gid 1001 plane
RUN adduser --system --uid 1001 captain
COPY --from=installer /app/apps/app/next.config.js . COPY --from=installer /app/apps/app/next.config.js .
COPY --from=installer /app/apps/app/package.json . COPY --from=installer /app/apps/app/package.json .
COPY --from=installer /app/apps/space/next.config.js . COPY --from=installer /app/apps/space/next.config.js .
COPY --from=installer /app/apps/space/package.json . COPY --from=installer /app/apps/space/package.json .
COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./ COPY --from=installer /app/apps/app/.next/standalone ./
COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static COPY --from=installer /app/apps/app/.next/static ./apps/app/.next/static
COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./ COPY --from=installer /app/apps/space/.next/standalone ./
COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next COPY --from=installer /app/apps/space/.next ./apps/space/.next
ENV NEXT_TELEMETRY_DISABLED 1 ENV NEXT_TELEMETRY_DISABLED 1
@ -118,7 +114,6 @@ ARG NEXT_PUBLIC_API_BASE_URL=http://localhost:8000
ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \
BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL
USER root
COPY replace-env-vars.sh /usr/local/bin/ COPY replace-env-vars.sh /usr/local/bin/
COPY start.sh /usr/local/bin/ COPY start.sh /usr/local/bin/
RUN chmod +x /usr/local/bin/replace-env-vars.sh RUN chmod +x /usr/local/bin/replace-env-vars.sh

44
SECURITY.md Normal file
View File

@ -0,0 +1,44 @@
# Security Policy
This document outlines security procedures and vulnerabilities reporting for the Plane project.
At Plane, we safeguarding the security of our systems with top priority. Despite our efforts, vulnerabilities may still exist. We greatly appreciate your assistance in identifying and reporting any such vulnerabilities to help us maintain the integrity of our systems and protect our clients.
To report a security vulnerability, please email us directly at security@plane.so with a detailed description of the vulnerability and steps to reproduce it. Please refrain from disclosing the vulnerability publicly until we have had an opportunity to review and address it.
## Out of Scope Vulnerabilities
We appreciate your help in identifying vulnerabilities. However, please note that the following types of vulnerabilities are considered out of scope:
- Attacks requiring MITM or physical access to a user's device.
- Content spoofing and text injection issues without demonstrating an attack vector or ability to modify HTML/CSS.
- Email spoofing.
- Missing DNSSEC, CAA, CSP headers.
- Lack of Secure or HTTP only flag on non-sensitive cookies.
## Reporting Process
If you discover a vulnerability, please adhere to the following reporting process:
1. Email your findings to security@plane.so.
2. Refrain from running automated scanners on our infrastructure or dashboard without prior consent. Contact us to set up a sandbox environment if necessary.
3. Do not exploit the vulnerability for malicious purposes, such as downloading excessive data or altering user data.
4. Maintain confidentiality and refrain from disclosing the vulnerability until it has been resolved.
5. Avoid using physical security attacks, social engineering, distributed denial of service, spam, or third-party applications.
When reporting a vulnerability, please provide sufficient information to allow us to reproduce and address the issue promptly. Include the IP address or URL of the affected system, along with a detailed description of the vulnerability.
## Our Commitment
We are committed to promptly addressing reported vulnerabilities and maintaining open communication throughout the resolution process. Here's what you can expect from us:
- **Response Time:** We will acknowledge receipt of your report within three business days and provide an expected resolution date.
- **Legal Protection:** We will not pursue legal action against you for reporting vulnerabilities, provided you adhere to the reporting guidelines.
- **Confidentiality:** Your report will be treated with strict confidentiality. We will not disclose your personal information to third parties without your consent.
- **Progress Updates:** We will keep you informed of our progress in resolving the reported vulnerability.
- **Recognition:** With your permission, we will publicly acknowledge you as the discoverer of the vulnerability.
- **Timely Resolution:** We strive to resolve all reported vulnerabilities promptly and will actively participate in the publication process once the issue is resolved.
We appreciate your cooperation in helping us maintain the security of our systems and protecting our clients. Thank you for your contributions to our security efforts.
reference: https://supabase.com/.well-known/security.txt

View File

@ -32,28 +32,18 @@ RUN apk add --no-cache --virtual .build-deps \
apk del .build-deps apk del .build-deps
RUN addgroup -S plane && \
adduser -S captain -G plane
RUN chown captain.plane /code
USER captain
# Add in Django deps and generate Django's static files # Add in Django deps and generate Django's static files
COPY manage.py manage.py COPY manage.py manage.py
COPY plane plane/ COPY plane plane/
COPY templates templates/ COPY templates templates/
COPY package.json package.json COPY package.json package.json
USER root
RUN apk --no-cache add "bash~=5.2" RUN apk --no-cache add "bash~=5.2"
COPY ./bin ./bin/ COPY ./bin ./bin/
RUN mkdir -p /code/plane/logs RUN mkdir -p /code/plane/logs
RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat RUN chmod +x ./bin/takeoff ./bin/worker ./bin/beat
RUN chmod -R 777 /code RUN chmod -R 777 /code
RUN chown -R captain:plane /code
USER captain
# Expose container port and run entry point script # Expose container port and run entry point script
EXPOSE 8000 EXPOSE 8000

View File

@ -30,17 +30,13 @@ ADD requirements ./requirements
# Install the local development settings # Install the local development settings
RUN pip install -r requirements/local.txt --compile --no-cache-dir RUN pip install -r requirements/local.txt --compile --no-cache-dir
RUN addgroup -S plane && \
adduser -S captain -G plane
COPY . . COPY . .
RUN mkdir -p /code/plane/logs RUN mkdir -p /code/plane/logs
RUN chown -R captain.plane /code
RUN chmod -R +x /code/bin RUN chmod -R +x /code/bin
RUN chmod -R 777 /code RUN chmod -R 777 /code
USER captain
# Expose container port and run entry point script # Expose container port and run entry point script
EXPOSE 8000 EXPOSE 8000

View File

@ -66,11 +66,11 @@ class BaseSerializer(serializers.ModelSerializer):
if expand in self.fields: if expand in self.fields:
# Import all the expandable serializers # Import all the expandable serializers
from . import ( from . import (
WorkspaceLiteSerializer,
ProjectLiteSerializer,
UserLiteSerializer,
StateLiteSerializer,
IssueSerializer, IssueSerializer,
ProjectLiteSerializer,
StateLiteSerializer,
UserLiteSerializer,
WorkspaceLiteSerializer,
) )
# Expansion mapper # Expansion mapper

View File

@ -79,7 +79,7 @@ class IssueSerializer(BaseSerializer):
parsed_str = html.tostring(parsed, encoding="unicode") parsed_str = html.tostring(parsed, encoding="unicode")
data["description_html"] = parsed_str data["description_html"] = parsed_str
except Exception as e: except Exception:
raise serializers.ValidationError("Invalid HTML passed") raise serializers.ValidationError("Invalid HTML passed")
# Validate assignees are from project # Validate assignees are from project
@ -366,7 +366,7 @@ class IssueCommentSerializer(BaseSerializer):
parsed_str = html.tostring(parsed, encoding="unicode") parsed_str = html.tostring(parsed, encoding="unicode")
data["comment_html"] = parsed_str data["comment_html"] = parsed_str
except Exception as e: except Exception:
raise serializers.ValidationError("Invalid HTML passed") raise serializers.ValidationError("Invalid HTML passed")
return data return data

View File

@ -7,6 +7,7 @@ from plane.db.models import (
ProjectIdentifier, ProjectIdentifier,
WorkspaceMember, WorkspaceMember,
) )
from .base import BaseSerializer from .base import BaseSerializer

View File

@ -1,5 +1,6 @@
# Module imports # Module imports
from plane.db.models import User from plane.db.models import User
from .base import BaseSerializer from .base import BaseSerializer
@ -10,7 +11,9 @@ class UserLiteSerializer(BaseSerializer):
"id", "id",
"first_name", "first_name",
"last_name", "last_name",
"email",
"avatar", "avatar",
"display_name", "display_name",
"email",
] ]
read_only_fields = fields read_only_fields = fields

View File

@ -34,7 +34,7 @@ urlpatterns = [
name="transfer-issues", name="transfer-issues",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:pk>/archive/", "workspaces/<str:slug>/projects/<uuid:project_id>/cycles/<uuid:cycle_id>/archive/",
CycleArchiveUnarchiveAPIEndpoint.as_view(), CycleArchiveUnarchiveAPIEndpoint.as_view(),
name="cycle-archive-unarchive", name="cycle-archive-unarchive",
), ),

View File

@ -12,7 +12,7 @@ urlpatterns = [
name="project", name="project",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/", "workspaces/<str:slug>/projects/<uuid:pk>/",
ProjectAPIEndpoint.as_view(), ProjectAPIEndpoint.as_view(),
name="project", name="project",
), ),

View File

@ -7,6 +7,7 @@ import zoneinfo
from django.conf import settings from django.conf import settings
from django.core.exceptions import ObjectDoesNotExist, ValidationError from django.core.exceptions import ObjectDoesNotExist, ValidationError
from django.db import IntegrityError from django.db import IntegrityError
from django.urls import resolve
from django.utils import timezone from django.utils import timezone
from rest_framework import status from rest_framework import status
from rest_framework.permissions import IsAuthenticated from rest_framework.permissions import IsAuthenticated
@ -165,7 +166,12 @@ class BaseAPIView(TimezoneMixin, APIView, BasePaginator):
@property @property
def project_id(self): def project_id(self):
return self.kwargs.get("project_id", None) project_id = self.kwargs.get("project_id", None)
if project_id:
return project_id
if resolve(self.request.path_info).url_name == "project":
return self.kwargs.get("pk", None)
@property @property
def fields(self): def fields(self):

View File

@ -2,29 +2,31 @@
import json import json
# Django imports # Django imports
from django.db.models import Q, Count, Sum, F, OuterRef, Func
from django.utils import timezone
from django.core import serializers from django.core import serializers
from django.db.models import Count, F, Func, OuterRef, Q, Sum
from django.utils import timezone
# Third party imports # Third party imports
from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from rest_framework.response import Response
# Module imports # Module imports
from .base import BaseAPIView, WebhookMixin from plane.api.serializers import (
from plane.db.models import ( CycleIssueSerializer,
Cycle, CycleSerializer,
Issue,
CycleIssue,
IssueLink,
IssueAttachment,
) )
from plane.app.permissions import ProjectEntityPermission from plane.app.permissions import ProjectEntityPermission
from plane.api.serializers import (
CycleSerializer,
CycleIssueSerializer,
)
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (
Cycle,
CycleIssue,
Issue,
IssueAttachment,
IssueLink,
)
from plane.utils.analytics_plot import burndown_plot
from .base import BaseAPIView, WebhookMixin
class CycleAPIEndpoint(WebhookMixin, BaseAPIView): class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
@ -152,9 +154,7 @@ class CycleAPIEndpoint(WebhookMixin, BaseAPIView):
data, data,
status=status.HTTP_200_OK, status=status.HTTP_200_OK,
) )
queryset = ( queryset = self.get_queryset().filter(archived_at__isnull=True)
self.get_queryset().filter(archived_at__isnull=True)
)
cycle_view = request.GET.get("cycle_view", "all") cycle_view = request.GET.get("cycle_view", "all")
# Current Cycle # Current Cycle
@ -493,17 +493,22 @@ class CycleArchiveUnarchiveAPIEndpoint(BaseAPIView):
).data, ).data,
) )
def post(self, request, slug, project_id, pk): def post(self, request, slug, project_id, cycle_id):
cycle = Cycle.objects.get( cycle = Cycle.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug pk=cycle_id, project_id=project_id, workspace__slug=slug
) )
if cycle.end_date >= timezone.now().date():
return Response(
{"error": "Only completed cycles can be archived"},
status=status.HTTP_400_BAD_REQUEST,
)
cycle.archived_at = timezone.now() cycle.archived_at = timezone.now()
cycle.save() cycle.save()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
def delete(self, request, slug, project_id, pk): def delete(self, request, slug, project_id, cycle_id):
cycle = Cycle.objects.get( cycle = Cycle.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug pk=cycle_id, project_id=project_id, workspace__slug=slug
) )
cycle.archived_at = None cycle.archived_at = None
cycle.save() cycle.save()
@ -551,7 +556,21 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
.distinct() .distinct()
) )
def get(self, request, slug, project_id, cycle_id): def get(self, request, slug, project_id, cycle_id, issue_id=None):
# Get
if issue_id:
cycle_issue = CycleIssue.objects.get(
workspace__slug=slug,
project_id=project_id,
cycle_id=cycle_id,
issue_id=issue_id,
)
serializer = CycleIssueSerializer(
cycle_issue, fields=self.fields, expand=self.expand
)
return Response(serializer.data, status=status.HTTP_200_OK)
# List
order_by = request.GET.get("order_by", "created_at") order_by = request.GET.get("order_by", "created_at")
issues = ( issues = (
Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id) Issue.issue_objects.filter(issue_cycle__cycle_id=cycle_id)
@ -727,7 +746,7 @@ class CycleIssueAPIEndpoint(WebhookMixin, BaseAPIView):
class TransferCycleIssueAPIEndpoint(BaseAPIView): class TransferCycleIssueAPIEndpoint(BaseAPIView):
""" """
This viewset provides `create` actions for transfering the issues into a particular cycle. This viewset provides `create` actions for transferring the issues into a particular cycle.
""" """
@ -748,6 +767,209 @@ class TransferCycleIssueAPIEndpoint(BaseAPIView):
workspace__slug=slug, project_id=project_id, pk=new_cycle_id workspace__slug=slug, project_id=project_id, pk=new_cycle_id
) )
old_cycle = (
Cycle.objects.filter(
workspace__slug=slug, project_id=project_id, pk=cycle_id
)
.annotate(
total_issues=Count(
"issue_cycle",
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
completed_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
cancelled_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="cancelled",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
started_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
unstarted_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="unstarted",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate(
backlog_issues=Count(
"issue_cycle__issue__state__group",
filter=Q(
issue_cycle__issue__state__group="backlog",
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
)
# Pass the new_cycle queryset to burndown_plot
completion_chart = burndown_plot(
queryset=old_cycle.first(),
slug=slug,
project_id=project_id,
cycle_id=cycle_id,
)
# Get the assignee distribution
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
workspace__slug=slug,
project_id=project_id,
)
.annotate(display_name=F("assignees__display_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar"))
.values("display_name", "assignee_id", "avatar")
.annotate(
total_issues=Count(
"id",
filter=Q(archived_at__isnull=True, is_draft=False),
),
)
.annotate(
completed_issues=Count(
"id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("display_name")
)
# assignee distribution serialized
assignee_distribution_data = [
{
"display_name": item["display_name"],
"assignee_id": (
str(item["assignee_id"]) if item["assignee_id"] else None
),
"avatar": item["avatar"],
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in assignee_distribution
]
# Get the label distribution
label_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=cycle_id,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
"id",
filter=Q(archived_at__isnull=True, is_draft=False),
)
)
.annotate(
completed_issues=Count(
"id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
# Label distribution serilization
label_distribution_data = [
{
"label_name": item["label_name"],
"color": item["color"],
"label_id": (
str(item["label_id"]) if item["label_id"] else None
),
"total_issues": item["total_issues"],
"completed_issues": item["completed_issues"],
"pending_issues": item["pending_issues"],
}
for item in label_distribution
]
current_cycle = Cycle.objects.filter(
workspace__slug=slug, project_id=project_id, pk=cycle_id
).first()
if current_cycle:
current_cycle.progress_snapshot = {
"total_issues": old_cycle.first().total_issues,
"completed_issues": old_cycle.first().completed_issues,
"cancelled_issues": old_cycle.first().cancelled_issues,
"started_issues": old_cycle.first().started_issues,
"unstarted_issues": old_cycle.first().unstarted_issues,
"backlog_issues": old_cycle.first().backlog_issues,
"distribution": {
"labels": label_distribution_data,
"assignees": assignee_distribution_data,
"completion_chart": completion_chart,
},
}
# Save the snapshot of the current cycle
current_cycle.save(update_fields=["progress_snapshot"])
if ( if (
new_cycle.end_date is not None new_cycle.end_date is not None
and new_cycle.end_date < timezone.now().date() and new_cycle.end_date < timezone.now().date()

View File

@ -2,27 +2,28 @@
import json import json
# Django improts # Django improts
from django.utils import timezone
from django.db.models import Q
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import Q
from django.utils import timezone
# Third party imports # Third party imports
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
# Module imports # Module imports
from .base import BaseAPIView
from plane.app.permissions import ProjectLitePermission
from plane.api.serializers import InboxIssueSerializer, IssueSerializer from plane.api.serializers import InboxIssueSerializer, IssueSerializer
from plane.app.permissions import ProjectLitePermission
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import ( from plane.db.models import (
Inbox,
InboxIssue, InboxIssue,
Issue, Issue,
State,
ProjectMember,
Project, Project,
Inbox, ProjectMember,
State,
) )
from plane.bgtasks.issue_activites_task import issue_activity
from .base import BaseAPIView
class InboxIssueAPIEndpoint(BaseAPIView): class InboxIssueAPIEndpoint(BaseAPIView):
@ -134,10 +135,11 @@ class InboxIssueAPIEndpoint(BaseAPIView):
# Create or get state # Create or get state
state, _ = State.objects.get_or_create( state, _ = State.objects.get_or_create(
name="Triage", name="Triage",
group="backlog", group="triage",
description="Default state for managing all Inbox Issues", description="Default state for managing all Inbox Issues",
project_id=project_id, project_id=project_id,
color="#ff7700", color="#ff7700",
is_triage=True,
) )
# create an issue # create an issue
@ -270,6 +272,9 @@ class InboxIssueAPIEndpoint(BaseAPIView):
serializer = InboxIssueSerializer( serializer = InboxIssueSerializer(
inbox_issue, data=request.data, partial=True inbox_issue, data=request.data, partial=True
) )
current_instance = json.dumps(
InboxIssueSerializer(inbox_issue).data, cls=DjangoJSONEncoder
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
@ -298,7 +303,7 @@ class InboxIssueAPIEndpoint(BaseAPIView):
) )
# Update the issue state only if it is in triage state # Update the issue state only if it is in triage state
if issue.state.name == "Triage": if issue.state.is_triage:
# Move to default state # Move to default state
state = State.objects.filter( state = State.objects.filter(
workspace__slug=slug, workspace__slug=slug,
@ -309,6 +314,21 @@ class InboxIssueAPIEndpoint(BaseAPIView):
issue.state = state issue.state = state
issue.save() issue.save()
# create a activity for status change
issue_activity.delay(
type="inbox.activity.created",
requested_data=json.dumps(
request.data, cls=DjangoJSONEncoder
),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=False,
origin=request.META.get("HTTP_ORIGIN"),
)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response( return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST serializer.errors, status=status.HTTP_400_BAD_REQUEST

View File

@ -308,8 +308,6 @@ class IssueAPIEndpoint(WebhookMixin, BaseAPIView):
actor_id=str(request.user.id), actor_id=str(request.user.id),
issue_id=str(pk), issue_id=str(pk),
project_id=str(project_id), project_id=str(project_id),
external_id__isnull=False,
external_source__isnull=False,
current_instance=current_instance, current_instance=current_instance,
epoch=int(timezone.now().timestamp()), epoch=int(timezone.now().timestamp()),
) )

View File

@ -2,32 +2,33 @@
import json import json
# Django imports # Django imports
from django.db.models import Count, Prefetch, Q, F, Func, OuterRef
from django.utils import timezone
from django.core import serializers from django.core import serializers
from django.db.models import Count, F, Func, OuterRef, Prefetch, Q
from django.utils import timezone
# Third party imports # Third party imports
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
# Module imports # Module imports
from .base import BaseAPIView, WebhookMixin from plane.api.serializers import (
IssueSerializer,
ModuleIssueSerializer,
ModuleSerializer,
)
from plane.app.permissions import ProjectEntityPermission from plane.app.permissions import ProjectEntityPermission
from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import ( from plane.db.models import (
Project,
Module,
ModuleLink,
Issue, Issue,
ModuleIssue,
IssueAttachment, IssueAttachment,
IssueLink, IssueLink,
Module,
ModuleIssue,
ModuleLink,
Project,
) )
from plane.api.serializers import (
ModuleSerializer, from .base import BaseAPIView, WebhookMixin
ModuleIssueSerializer,
IssueSerializer,
)
from plane.bgtasks.issue_activites_task import issue_activity
class ModuleAPIEndpoint(WebhookMixin, BaseAPIView): class ModuleAPIEndpoint(WebhookMixin, BaseAPIView):
@ -553,7 +554,7 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
.order_by(self.kwargs.get("order_by", "-created_at")) .order_by(self.kwargs.get("order_by", "-created_at"))
) )
def get(self, request, slug, project_id): def get(self, request, slug, project_id, pk):
return self.paginate( return self.paginate(
request=request, request=request,
queryset=(self.get_queryset()), queryset=(self.get_queryset()),
@ -569,6 +570,13 @@ class ModuleArchiveUnarchiveAPIEndpoint(BaseAPIView):
module = Module.objects.get( module = Module.objects.get(
pk=pk, project_id=project_id, workspace__slug=slug pk=pk, project_id=project_id, workspace__slug=slug
) )
if module.status not in ["completed", "cancelled"]:
return Response(
{
"error": "Only completed or cancelled modules can be archived"
},
status=status.HTTP_400_BAD_REQUEST,
)
module.archived_at = timezone.now() module.archived_at = timezone.now()
module.save() module.save()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -1,27 +1,29 @@
# Django imports # Django imports
from django.utils import timezone
from django.db import IntegrityError from django.db import IntegrityError
from django.db.models import Exists, OuterRef, Q, F, Func, Subquery, Prefetch from django.db.models import Exists, F, Func, OuterRef, Prefetch, Q, Subquery
from django.utils import timezone
# Third party imports # Third party imports
from rest_framework import status from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework.serializers import ValidationError from rest_framework.serializers import ValidationError
from plane.api.serializers import ProjectSerializer
from plane.app.permissions import ProjectBasePermission
# Module imports # Module imports
from plane.db.models import ( from plane.db.models import (
Workspace,
Project,
ProjectMember,
ProjectDeployBoard,
State,
Cycle, Cycle,
Module,
IssueProperty,
Inbox, Inbox,
IssueProperty,
Module,
Project,
ProjectDeployBoard,
ProjectMember,
State,
Workspace,
) )
from plane.app.permissions import ProjectBasePermission
from plane.api.serializers import ProjectSerializer
from .base import BaseAPIView, WebhookMixin from .base import BaseAPIView, WebhookMixin
@ -103,8 +105,8 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
.distinct() .distinct()
) )
def get(self, request, slug, project_id=None): def get(self, request, slug, pk=None):
if project_id is None: if pk is None:
sort_order_query = ProjectMember.objects.filter( sort_order_query = ProjectMember.objects.filter(
member=request.user, member=request.user,
project_id=OuterRef("pk"), project_id=OuterRef("pk"),
@ -135,7 +137,7 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
expand=self.expand, expand=self.expand,
).data, ).data,
) )
project = self.get_queryset().get(workspace__slug=slug, pk=project_id) project = self.get_queryset().get(workspace__slug=slug, pk=pk)
serializer = ProjectSerializer( serializer = ProjectSerializer(
project, project,
fields=self.fields, fields=self.fields,
@ -259,10 +261,10 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
status=status.HTTP_410_GONE, status=status.HTTP_410_GONE,
) )
def patch(self, request, slug, project_id=None): def patch(self, request, slug, pk):
try: try:
workspace = Workspace.objects.get(slug=slug) workspace = Workspace.objects.get(slug=slug)
project = Project.objects.get(pk=project_id) project = Project.objects.get(pk=pk)
if project.archived_at: if project.archived_at:
return Response( return Response(
@ -289,10 +291,11 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
# Create the triage state in Backlog group # Create the triage state in Backlog group
State.objects.get_or_create( State.objects.get_or_create(
name="Triage", name="Triage",
group="backlog", group="triage",
description="Default state for managing all Inbox Issues", description="Default state for managing all Inbox Issues",
project_id=project_id, project_id=pk,
color="#ff7700", color="#ff7700",
is_triage=True,
) )
project = ( project = (
@ -322,8 +325,8 @@ class ProjectAPIEndpoint(WebhookMixin, BaseAPIView):
status=status.HTTP_410_GONE, status=status.HTTP_410_GONE,
) )
def delete(self, request, slug, project_id): def delete(self, request, slug, pk):
project = Project.objects.get(pk=project_id, workspace__slug=slug) project = Project.objects.get(pk=pk, workspace__slug=slug)
project.delete() project.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -1,16 +1,16 @@
# Django imports # Django imports
from django.db import IntegrityError from django.db import IntegrityError
from django.db.models import Q
# Third party imports # Third party imports
from rest_framework.response import Response
from rest_framework import status from rest_framework import status
from rest_framework.response import Response
from plane.api.serializers import StateSerializer
from plane.app.permissions import ProjectEntityPermission
from plane.db.models import Issue, State
# Module imports # Module imports
from .base import BaseAPIView from .base import BaseAPIView
from plane.api.serializers import StateSerializer
from plane.app.permissions import ProjectEntityPermission
from plane.db.models import State, Issue
class StateAPIEndpoint(BaseAPIView): class StateAPIEndpoint(BaseAPIView):
@ -28,8 +28,8 @@ class StateAPIEndpoint(BaseAPIView):
project__project_projectmember__member=self.request.user, project__project_projectmember__member=self.request.user,
project__project_projectmember__is_active=True, project__project_projectmember__is_active=True,
) )
.filter(is_triage=False)
.filter(project__archived_at__isnull=True) .filter(project__archived_at__isnull=True)
.filter(~Q(name="Triage"))
.select_related("project") .select_related("project")
.select_related("workspace") .select_related("workspace")
.distinct() .distinct()
@ -86,7 +86,11 @@ class StateAPIEndpoint(BaseAPIView):
def get(self, request, slug, project_id, state_id=None): def get(self, request, slug, project_id, state_id=None):
if state_id: if state_id:
serializer = StateSerializer(self.get_queryset().get(pk=state_id)) serializer = StateSerializer(
self.get_queryset().get(pk=state_id),
fields=self.fields,
expand=self.expand,
)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return self.paginate( return self.paginate(
request=request, request=request,
@ -101,7 +105,7 @@ class StateAPIEndpoint(BaseAPIView):
def delete(self, request, slug, project_id, state_id): def delete(self, request, slug, project_id, state_id):
state = State.objects.get( state = State.objects.get(
~Q(name="Triage"), is_triage=False,
pk=state_id, pk=state_id,
project_id=project_id, project_id=project_id,
workspace__slug=slug, workspace__slug=slug,

View File

@ -1,8 +1,8 @@
# Third Party imports # Third Party imports
from rest_framework.permissions import BasePermission, SAFE_METHODS from rest_framework.permissions import SAFE_METHODS, BasePermission
# Module import # Module import
from plane.db.models import WorkspaceMember, ProjectMember from plane.db.models import ProjectMember, WorkspaceMember
# Permission Mappings # Permission Mappings
Admin = 20 Admin = 20

View File

@ -59,6 +59,7 @@ from .issue import (
IssueFlatSerializer, IssueFlatSerializer,
IssueStateSerializer, IssueStateSerializer,
IssueLinkSerializer, IssueLinkSerializer,
IssueInboxSerializer,
IssueLiteSerializer, IssueLiteSerializer,
IssueAttachmentSerializer, IssueAttachmentSerializer,
IssueSubscriberSerializer, IssueSubscriberSerializer,
@ -92,6 +93,7 @@ from .page import (
PageSerializer, PageSerializer,
PageLogSerializer, PageLogSerializer,
SubPageSerializer, SubPageSerializer,
PageDetailSerializer,
PageFavoriteSerializer, PageFavoriteSerializer,
) )
@ -107,6 +109,7 @@ from .inbox import (
InboxIssueSerializer, InboxIssueSerializer,
IssueStateInboxSerializer, IssueStateInboxSerializer,
InboxIssueLiteSerializer, InboxIssueLiteSerializer,
InboxIssueDetailSerializer,
) )
from .analytic import AnalyticViewSerializer from .analytic import AnalyticViewSerializer

View File

@ -3,7 +3,11 @@ from rest_framework import serializers
# Module imports # Module imports
from .base import BaseSerializer from .base import BaseSerializer
from .issue import IssueFlatSerializer, LabelLiteSerializer from .issue import (
IssueInboxSerializer,
LabelLiteSerializer,
IssueDetailSerializer,
)
from .project import ProjectLiteSerializer from .project import ProjectLiteSerializer
from .state import StateLiteSerializer from .state import StateLiteSerializer
from .user import UserLiteSerializer from .user import UserLiteSerializer
@ -24,17 +28,62 @@ class InboxSerializer(BaseSerializer):
class InboxIssueSerializer(BaseSerializer): class InboxIssueSerializer(BaseSerializer):
issue_detail = IssueFlatSerializer(source="issue", read_only=True) issue = IssueInboxSerializer(read_only=True)
project_detail = ProjectLiteSerializer(source="project", read_only=True)
class Meta: class Meta:
model = InboxIssue model = InboxIssue
fields = "__all__" fields = [
"id",
"status",
"duplicate_to",
"snoozed_till",
"source",
"issue",
"created_by",
]
read_only_fields = [ read_only_fields = [
"project", "project",
"workspace", "workspace",
] ]
def to_representation(self, instance):
# Pass the annotated fields to the Issue instance if they exist
if hasattr(instance, "label_ids"):
instance.issue.label_ids = instance.label_ids
return super().to_representation(instance)
class InboxIssueDetailSerializer(BaseSerializer):
issue = IssueDetailSerializer(read_only=True)
duplicate_issue_detail = IssueInboxSerializer(
read_only=True, source="duplicate_to"
)
class Meta:
model = InboxIssue
fields = [
"id",
"status",
"duplicate_to",
"snoozed_till",
"duplicate_issue_detail",
"source",
"issue",
]
read_only_fields = [
"project",
"workspace",
]
def to_representation(self, instance):
# Pass the annotated fields to the Issue instance if they exist
if hasattr(instance, "assignee_ids"):
instance.issue.assignee_ids = instance.assignee_ids
if hasattr(instance, "label_ids"):
instance.issue.label_ids = instance.label_ids
return super().to_representation(instance)
class InboxIssueLiteSerializer(BaseSerializer): class InboxIssueLiteSerializer(BaseSerializer):
class Meta: class Meta:

View File

@ -620,6 +620,26 @@ class IssueStateSerializer(DynamicBaseSerializer):
fields = "__all__" fields = "__all__"
class IssueInboxSerializer(DynamicBaseSerializer):
label_ids = serializers.ListField(
child=serializers.UUIDField(),
required=False,
)
class Meta:
model = Issue
fields = [
"id",
"name",
"priority",
"sequence_id",
"project_id",
"created_at",
"label_ids",
]
read_only_fields = fields
class IssueSerializer(DynamicBaseSerializer): class IssueSerializer(DynamicBaseSerializer):
# ids # ids
cycle_id = serializers.PrimaryKeyRelatedField(read_only=True) cycle_id = serializers.PrimaryKeyRelatedField(read_only=True)
@ -688,7 +708,7 @@ class IssueLiteSerializer(DynamicBaseSerializer):
class IssueDetailSerializer(IssueSerializer): class IssueDetailSerializer(IssueSerializer):
description_html = serializers.CharField() description_html = serializers.CharField()
is_subscribed = serializers.BooleanField() is_subscribed = serializers.BooleanField(read_only=True)
class Meta(IssueSerializer.Meta): class Meta(IssueSerializer.Meta):
fields = IssueSerializer.Meta.fields + [ fields = IssueSerializer.Meta.fields + [

View File

@ -3,9 +3,6 @@ from rest_framework import serializers
# Module imports # Module imports
from .base import BaseSerializer from .base import BaseSerializer
from .issue import LabelLiteSerializer
from .workspace import WorkspaceLiteSerializer
from .project import ProjectLiteSerializer
from plane.db.models import ( from plane.db.models import (
Page, Page,
PageLog, PageLog,
@ -17,22 +14,33 @@ from plane.db.models import (
class PageSerializer(BaseSerializer): class PageSerializer(BaseSerializer):
is_favorite = serializers.BooleanField(read_only=True) is_favorite = serializers.BooleanField(read_only=True)
label_details = LabelLiteSerializer(
read_only=True, source="labels", many=True
)
labels = serializers.ListField( labels = serializers.ListField(
child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()), child=serializers.PrimaryKeyRelatedField(queryset=Label.objects.all()),
write_only=True, write_only=True,
required=False, required=False,
) )
project_detail = ProjectLiteSerializer(source="project", read_only=True)
workspace_detail = WorkspaceLiteSerializer(
source="workspace", read_only=True
)
class Meta: class Meta:
model = Page model = Page
fields = "__all__" fields = [
"id",
"name",
"owned_by",
"access",
"color",
"labels",
"parent",
"is_favorite",
"is_locked",
"archived_at",
"workspace",
"project",
"created_at",
"updated_at",
"created_by",
"updated_by",
"view_props",
]
read_only_fields = [ read_only_fields = [
"workspace", "workspace",
"project", "project",
@ -48,8 +56,12 @@ class PageSerializer(BaseSerializer):
labels = validated_data.pop("labels", None) labels = validated_data.pop("labels", None)
project_id = self.context["project_id"] project_id = self.context["project_id"]
owned_by_id = self.context["owned_by_id"] owned_by_id = self.context["owned_by_id"]
description_html = self.context["description_html"]
page = Page.objects.create( page = Page.objects.create(
**validated_data, project_id=project_id, owned_by_id=owned_by_id **validated_data,
description_html=description_html,
project_id=project_id,
owned_by_id=owned_by_id,
) )
if labels is not None: if labels is not None:
@ -91,6 +103,13 @@ class PageSerializer(BaseSerializer):
return super().update(instance, validated_data) return super().update(instance, validated_data)
class PageDetailSerializer(PageSerializer):
description_html = serializers.CharField()
class Meta(PageSerializer.Meta):
fields = PageSerializer.Meta.fields + ["description_html"]
class SubPageSerializer(BaseSerializer): class SubPageSerializer(BaseSerializer):
entity_details = serializers.SerializerMethodField() entity_details = serializers.SerializerMethodField()

View File

@ -101,4 +101,9 @@ urlpatterns = [
CycleArchiveUnarchiveEndpoint.as_view(), CycleArchiveUnarchiveEndpoint.as_view(),
name="cycle-archive-unarchive", name="cycle-archive-unarchive",
), ),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-cycles/<uuid:pk>/",
CycleArchiveUnarchiveEndpoint.as_view(),
name="cycle-archive-unarchive",
),
] ]

View File

@ -30,7 +30,7 @@ urlpatterns = [
name="inbox", name="inbox",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/", "workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/",
InboxIssueViewSet.as_view( InboxIssueViewSet.as_view(
{ {
"get": "list", "get": "list",
@ -40,7 +40,7 @@ urlpatterns = [
name="inbox-issue", name="inbox-issue",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/inboxes/<uuid:inbox_id>/inbox-issues/<uuid:issue_id>/", "workspaces/<str:slug>/projects/<uuid:project_id>/inbox-issues/<uuid:issue_id>/",
InboxIssueViewSet.as_view( InboxIssueViewSet.as_view(
{ {
"get": "retrieve", "get": "retrieve",

View File

@ -121,4 +121,9 @@ urlpatterns = [
ModuleArchiveUnarchiveEndpoint.as_view(), ModuleArchiveUnarchiveEndpoint.as_view(),
name="module-archive-unarchive", name="module-archive-unarchive",
), ),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-modules/<uuid:pk>/",
ModuleArchiveUnarchiveEndpoint.as_view(),
name="module-archive-unarchive",
),
] ]

View File

@ -31,102 +31,51 @@ urlpatterns = [
), ),
name="project-pages", name="project-pages",
), ),
# favorite pages
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/", "workspaces/<str:slug>/projects/<uuid:project_id>/favorite-pages/<uuid:pk>/",
PageFavoriteViewSet.as_view( PageFavoriteViewSet.as_view(
{ {
"get": "list",
"post": "create", "post": "create",
}
),
name="user-favorite-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/user-favorite-pages/<uuid:page_id>/",
PageFavoriteViewSet.as_view(
{
"delete": "destroy", "delete": "destroy",
} }
), ),
name="user-favorite-pages", name="user-favorite-pages",
), ),
# archived pages
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/", "workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/archive/",
PageViewSet.as_view(
{
"get": "list",
"post": "create",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/",
PageViewSet.as_view(
{
"get": "retrieve",
"patch": "partial_update",
"delete": "destroy",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/archive/",
PageViewSet.as_view( PageViewSet.as_view(
{ {
"post": "archive", "post": "archive",
"delete": "unarchive",
} }
), ),
name="project-page-archive", name="project-page-archive-unarchive",
), ),
# lock and unlock
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/unarchive/", "workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/lock/",
PageViewSet.as_view(
{
"post": "unarchive",
}
),
name="project-page-unarchive",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/archived-pages/",
PageViewSet.as_view(
{
"get": "archive_list",
}
),
name="project-pages",
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/lock/",
PageViewSet.as_view( PageViewSet.as_view(
{ {
"post": "lock", "post": "lock",
"delete": "unlock",
} }
), ),
name="project-pages", name="project-pages-lock-unlock",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/unlock/", "workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/transactions/",
PageViewSet.as_view(
{
"post": "unlock",
}
),
),
path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/transactions/",
PageLogEndpoint.as_view(), PageLogEndpoint.as_view(),
name="page-transactions", name="page-transactions",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/transactions/<uuid:transaction>/", "workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/transactions/<uuid:transaction>/",
PageLogEndpoint.as_view(), PageLogEndpoint.as_view(),
name="page-transactions", name="page-transactions",
), ),
path( path(
"workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:page_id>/sub-pages/", "workspaces/<str:slug>/projects/<uuid:project_id>/pages/<uuid:pk>/sub-pages/",
SubPagesEndpoint.as_view(), SubPagesEndpoint.as_view(),
name="sub-page", name="sub-page",
), ),

View File

@ -21,9 +21,9 @@ from django.db.models import (
) )
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce
from django.utils import timezone from django.utils import timezone
from rest_framework import status
# Third party imports # Third party imports
from rest_framework import status
from rest_framework.response import Response from rest_framework.response import Response
from plane.app.permissions import ( from plane.app.permissions import (
@ -540,6 +540,13 @@ class CycleViewSet(WebhookMixin, BaseViewSet):
.first() .first()
) )
queryset = queryset.first() queryset = queryset.first()
if data is None:
return Response(
{"error": "Cycle does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
# Assignee Distribution # Assignee Distribution
assignee_distribution = ( assignee_distribution = (
Issue.objects.filter( Issue.objects.filter(
@ -719,9 +726,20 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
) )
) )
.annotate(is_favorite=Exists(favorite_subquery)) .annotate(is_favorite=Exists(favorite_subquery))
.annotate(
total_issues=Count(
"issue_cycle__issue__id",
distinct=True,
filter=Q(
issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.annotate( .annotate(
completed_issues=Count( completed_issues=Count(
"issue_cycle__issue__state__group", "issue_cycle__issue__id",
distinct=True,
filter=Q( filter=Q(
issue_cycle__issue__state__group="completed", issue_cycle__issue__state__group="completed",
issue_cycle__issue__archived_at__isnull=True, issue_cycle__issue__archived_at__isnull=True,
@ -731,7 +749,8 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
) )
.annotate( .annotate(
cancelled_issues=Count( cancelled_issues=Count(
"issue_cycle__issue__state__group", "issue_cycle__issue__id",
distinct=True,
filter=Q( filter=Q(
issue_cycle__issue__state__group="cancelled", issue_cycle__issue__state__group="cancelled",
issue_cycle__issue__archived_at__isnull=True, issue_cycle__issue__archived_at__isnull=True,
@ -741,7 +760,8 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
) )
.annotate( .annotate(
started_issues=Count( started_issues=Count(
"issue_cycle__issue__state__group", "issue_cycle__issue__id",
distinct=True,
filter=Q( filter=Q(
issue_cycle__issue__state__group="started", issue_cycle__issue__state__group="started",
issue_cycle__issue__archived_at__isnull=True, issue_cycle__issue__archived_at__isnull=True,
@ -751,7 +771,8 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
) )
.annotate( .annotate(
unstarted_issues=Count( unstarted_issues=Count(
"issue_cycle__issue__state__group", "issue_cycle__issue__id",
distinct=True,
filter=Q( filter=Q(
issue_cycle__issue__state__group="unstarted", issue_cycle__issue__state__group="unstarted",
issue_cycle__issue__archived_at__isnull=True, issue_cycle__issue__archived_at__isnull=True,
@ -761,7 +782,8 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
) )
.annotate( .annotate(
backlog_issues=Count( backlog_issues=Count(
"issue_cycle__issue__state__group", "issue_cycle__issue__id",
distinct=True,
filter=Q( filter=Q(
issue_cycle__issue__state__group="backlog", issue_cycle__issue__state__group="backlog",
issue_cycle__issue__archived_at__isnull=True, issue_cycle__issue__archived_at__isnull=True,
@ -795,9 +817,6 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
distinct=True, distinct=True,
filter=~Q( filter=~Q(
issue_cycle__issue__assignees__id__isnull=True issue_cycle__issue__assignees__id__isnull=True
)
& Q(
issue_cycle__issue__assignees__member_project__is_active=True
), ),
), ),
Value([], output_field=ArrayField(UUIDField())), Value([], output_field=ArrayField(UUIDField())),
@ -807,53 +826,224 @@ class CycleArchiveUnarchiveEndpoint(BaseAPIView):
.distinct() .distinct()
) )
def get(self, request, slug, project_id): def get(self, request, slug, project_id, pk=None):
queryset = ( if pk is None:
self.get_queryset() queryset = (
.annotate( self.get_queryset()
total_issues=Count( .annotate(
"issue_cycle", total_issues=Count(
filter=Q( "issue_cycle",
issue_cycle__issue__archived_at__isnull=True, filter=Q(
issue_cycle__issue__is_draft=False, issue_cycle__issue__archived_at__isnull=True,
issue_cycle__issue__is_draft=False,
),
)
)
.values(
# necessary fields
"id",
"workspace_id",
"project_id",
# model fields
"name",
"description",
"start_date",
"end_date",
"owned_by_id",
"view_props",
"sort_order",
"external_source",
"external_id",
"progress_snapshot",
# meta fields
"total_issues",
"is_favorite",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
"assignee_ids",
"status",
"archived_at",
)
).order_by("-is_favorite", "-created_at")
return Response(queryset, status=status.HTTP_200_OK)
else:
queryset = (
self.get_queryset()
.filter(archived_at__isnull=False)
.filter(pk=pk)
)
data = (
self.get_queryset()
.filter(pk=pk)
.annotate(
sub_issues=Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
parent__isnull=False,
issue_cycle__cycle_id=pk,
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
.values(
# necessary fields
"id",
"workspace_id",
"project_id",
# model fields
"name",
"description",
"start_date",
"end_date",
"owned_by_id",
"view_props",
"sort_order",
"external_source",
"external_id",
"progress_snapshot",
"sub_issues",
# meta fields
"is_favorite",
"total_issues",
"cancelled_issues",
"completed_issues",
"started_issues",
"unstarted_issues",
"backlog_issues",
"assignee_ids",
"status",
)
.first()
)
queryset = queryset.first()
if data is None:
return Response(
{"error": "Cycle does not exist"},
status=status.HTTP_400_BAD_REQUEST,
)
# Assignee Distribution
assignee_distribution = (
Issue.objects.filter(
issue_cycle__cycle_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(first_name=F("assignees__first_name"))
.annotate(last_name=F("assignees__last_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(avatar=F("assignees__avatar"))
.annotate(display_name=F("assignees__display_name"))
.values(
"first_name",
"last_name",
"assignee_id",
"avatar",
"display_name",
)
.annotate(
total_issues=Count(
"id",
filter=Q(archived_at__isnull=True, is_draft=False),
), ),
) )
.annotate(
completed_issues=Count(
"id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("first_name", "last_name")
) )
.values(
# necessary fields # Label Distribution
"id", label_distribution = (
"workspace_id", Issue.objects.filter(
"project_id", issue_cycle__cycle_id=pk,
# model fields workspace__slug=slug,
"name", project_id=project_id,
"description", )
"start_date", .annotate(label_name=F("labels__name"))
"end_date", .annotate(color=F("labels__color"))
"owned_by_id", .annotate(label_id=F("labels__id"))
"view_props", .values("label_name", "color", "label_id")
"sort_order", .annotate(
"external_source", total_issues=Count(
"external_id", "id",
"progress_snapshot", filter=Q(archived_at__isnull=True, is_draft=False),
# meta fields ),
"total_issues", )
"is_favorite", .annotate(
"cancelled_issues", completed_issues=Count(
"completed_issues", "id",
"started_issues", filter=Q(
"unstarted_issues", completed_at__isnull=False,
"backlog_issues", archived_at__isnull=True,
"assignee_ids", is_draft=False,
"status", ),
"archived_at", )
)
.annotate(
pending_issues=Count(
"id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
data["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
if queryset.start_date and queryset.end_date:
data["distribution"]["completion_chart"] = burndown_plot(
queryset=queryset,
slug=slug,
project_id=project_id,
cycle_id=pk,
)
return Response(
data,
status=status.HTTP_200_OK,
) )
).order_by("-is_favorite", "-created_at")
return Response(queryset, status=status.HTTP_200_OK)
def post(self, request, slug, project_id, cycle_id): def post(self, request, slug, project_id, cycle_id):
cycle = Cycle.objects.get( cycle = Cycle.objects.get(
pk=cycle_id, project_id=project_id, workspace__slug=slug pk=cycle_id, project_id=project_id, workspace__slug=slug
) )
if cycle.end_date >= timezone.now().date():
return Response(
{"error": "Only completed cycles can be archived"},
status=status.HTTP_400_BAD_REQUEST,
)
cycle.archived_at = timezone.now() cycle.archived_at = timezone.now()
cycle.save() cycle.save()
return Response( return Response(

View File

@ -3,7 +3,7 @@ import json
# Django import # Django import
from django.utils import timezone from django.utils import timezone
from django.db.models import Q, Count, OuterRef, Func, F, Prefetch, Exists from django.db.models import Q, Count, OuterRef, Func, F, Prefetch
from django.core.serializers.json import DjangoJSONEncoder from django.core.serializers.json import DjangoJSONEncoder
from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
@ -24,16 +24,15 @@ from plane.db.models import (
State, State,
IssueLink, IssueLink,
IssueAttachment, IssueAttachment,
Project,
ProjectMember, ProjectMember,
IssueReaction,
IssueSubscriber,
) )
from plane.app.serializers import ( from plane.app.serializers import (
IssueCreateSerializer, IssueCreateSerializer,
IssueSerializer, IssueSerializer,
InboxSerializer, InboxSerializer,
InboxIssueSerializer, InboxIssueSerializer,
IssueDetailSerializer, InboxIssueDetailSerializer,
) )
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
@ -64,13 +63,20 @@ class InboxViewSet(BaseViewSet):
.select_related("workspace", "project") .select_related("workspace", "project")
) )
def list(self, request, slug, project_id):
inbox = self.get_queryset().first()
return Response(
InboxSerializer(inbox).data,
status=status.HTTP_200_OK,
)
def perform_create(self, serializer): def perform_create(self, serializer):
serializer.save(project_id=self.kwargs.get("project_id")) serializer.save(project_id=self.kwargs.get("project_id"))
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
inbox = Inbox.objects.get( inbox = Inbox.objects.filter(
workspace__slug=slug, project_id=project_id, pk=pk workspace__slug=slug, project_id=project_id, pk=pk
) ).first()
# Handle default inbox delete # Handle default inbox delete
if inbox.is_default: if inbox.is_default:
return Response( return Response(
@ -98,7 +104,6 @@ class InboxIssueViewSet(BaseViewSet):
Issue.objects.filter( Issue.objects.filter(
project_id=self.kwargs.get("project_id"), project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"), workspace__slug=self.kwargs.get("slug"),
issue_inbox__inbox_id=self.kwargs.get("inbox_id"),
) )
.select_related("workspace", "project", "state", "parent") .select_related("workspace", "project", "state", "parent")
.prefetch_related("assignees", "labels", "issue_module__module") .prefetch_related("assignees", "labels", "issue_module__module")
@ -162,51 +167,49 @@ class InboxIssueViewSet(BaseViewSet):
) )
).distinct() ).distinct()
def list(self, request, slug, project_id, inbox_id): def list(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET") inbox_id = Inbox.objects.filter(
issue_queryset = ( workspace__slug=slug, project_id=project_id
self.get_queryset() ).first()
.filter(**filters) filters = issue_filters(request.GET, "GET", "issue__")
.order_by("issue_inbox__snoozed_till", "issue_inbox__status") inbox_issue = (
) InboxIssue.objects.filter(
if self.expand: inbox_id=inbox_id.id, project_id=project_id, **filters
issues = IssueSerializer(
issue_queryset, expand=self.expand, many=True
).data
else:
issues = issue_queryset.values(
"id",
"name",
"state_id",
"sort_order",
"completed_at",
"estimate_point",
"priority",
"start_date",
"target_date",
"sequence_id",
"project_id",
"parent_id",
"cycle_id",
"module_ids",
"label_ids",
"assignee_ids",
"sub_issues_count",
"created_at",
"updated_at",
"created_by",
"updated_by",
"attachment_count",
"link_count",
"is_draft",
"archived_at",
) )
return Response( .select_related("issue")
issues, .prefetch_related(
status=status.HTTP_200_OK, "issue__labels",
)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"issue__labels__id",
distinct=True,
filter=~Q(issue__labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
)
)
).order_by(request.GET.get("order_by", "-issue__created_at"))
# inbox status filter
inbox_status = [
item
for item in request.GET.get("status", "-2").split(",")
if item != "null"
]
if inbox_status:
inbox_issue = inbox_issue.filter(status__in=inbox_status)
return self.paginate(
request=request,
queryset=(inbox_issue),
on_results=lambda inbox_issues: InboxIssueSerializer(
inbox_issues,
many=True,
).data,
) )
def create(self, request, slug, project_id, inbox_id): def create(self, request, slug, project_id):
if not request.data.get("issue", {}).get("name", False): if not request.data.get("issue", {}).get("name", False):
return Response( return Response(
{"error": "Name is required"}, {"error": "Name is required"},
@ -229,49 +232,88 @@ class InboxIssueViewSet(BaseViewSet):
# Create or get state # Create or get state
state, _ = State.objects.get_or_create( state, _ = State.objects.get_or_create(
name="Triage", name="Triage",
group="backlog", group="triage",
description="Default state for managing all Inbox Issues", description="Default state for managing all Inbox Issues",
project_id=project_id, project_id=project_id,
color="#ff7700", color="#ff7700",
is_triage=True,
) )
# create an issue # create an issue
issue = Issue.objects.create( project = Project.objects.get(pk=project_id)
name=request.data.get("issue", {}).get("name"), serializer = IssueCreateSerializer(
description=request.data.get("issue", {}).get("description", {}), data=request.data.get("issue"),
description_html=request.data.get("issue", {}).get( context={
"description_html", "<p></p>" "project_id": project_id,
), "workspace_id": project.workspace_id,
priority=request.data.get("issue", {}).get("priority", "low"), "default_assignee_id": project.default_assignee_id,
project_id=project_id, },
state=state,
) )
if serializer.is_valid():
serializer.save()
# Create an Issue Activity
issue_activity.delay(
type="issue.activity.created",
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder),
actor_id=str(request.user.id),
issue_id=str(serializer.data["id"]),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
inbox_id = Inbox.objects.filter(
workspace__slug=slug, project_id=project_id
).first()
# create an inbox issue
inbox_issue = InboxIssue.objects.create(
inbox_id=inbox_id.id,
project_id=project_id,
issue_id=serializer.data["id"],
source=request.data.get("source", "in-app"),
)
inbox_issue = (
InboxIssue.objects.select_related("issue")
.prefetch_related(
"issue__labels",
"issue__assignees",
)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"issue__labels__id",
distinct=True,
filter=~Q(issue__labels__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
assignee_ids=Coalesce(
ArrayAgg(
"issue__assignees__id",
distinct=True,
filter=~Q(issue__assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
)
.get(
inbox_id=inbox_id.id,
issue_id=serializer.data["id"],
project_id=project_id,
)
)
serializer = InboxIssueDetailSerializer(inbox_issue)
return Response(serializer.data, status=status.HTTP_200_OK)
else:
return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST
)
# Create an Issue Activity def partial_update(self, request, slug, project_id, issue_id):
issue_activity.delay( inbox_id = Inbox.objects.filter(
type="issue.activity.created", workspace__slug=slug, project_id=project_id
requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), ).first()
actor_id=str(request.user.id),
issue_id=str(issue.id),
project_id=str(project_id),
current_instance=None,
epoch=int(timezone.now().timestamp()),
notification=True,
origin=request.META.get("HTTP_ORIGIN"),
)
# create an inbox issue
InboxIssue.objects.create(
inbox_id=inbox_id,
project_id=project_id,
issue=issue,
source=request.data.get("source", "in-app"),
)
issue = self.get_queryset().filter(pk=issue.id).first()
serializer = IssueSerializer(issue, expand=self.expand)
return Response(serializer.data, status=status.HTTP_200_OK)
def partial_update(self, request, slug, project_id, inbox_id, issue_id):
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
issue_id=issue_id, issue_id=issue_id,
workspace__slug=slug, workspace__slug=slug,
@ -296,9 +338,12 @@ class InboxIssueViewSet(BaseViewSet):
# Get issue data # Get issue data
issue_data = request.data.pop("issue", False) issue_data = request.data.pop("issue", False)
if bool(issue_data): if bool(issue_data):
issue = self.get_queryset().filter(pk=inbox_issue.issue_id).first() issue = Issue.objects.get(
pk=inbox_issue.issue_id,
workspace__slug=slug,
project_id=project_id,
)
# Only allow guests and viewers to edit name and description # Only allow guests and viewers to edit name and description
if project_member.role <= 10: if project_member.role <= 10:
# viewers and guests since only viewers and guests # viewers and guests since only viewers and guests
@ -346,7 +391,9 @@ class InboxIssueViewSet(BaseViewSet):
serializer = InboxIssueSerializer( serializer = InboxIssueSerializer(
inbox_issue, data=request.data, partial=True inbox_issue, data=request.data, partial=True
) )
current_instance = json.dumps(
InboxIssueSerializer(inbox_issue).data, cls=DjangoJSONEncoder
)
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
# Update the issue state if the issue is rejected or marked as duplicate # Update the issue state if the issue is rejected or marked as duplicate
@ -374,7 +421,7 @@ class InboxIssueViewSet(BaseViewSet):
) )
# Update the issue state only if it is in triage state # Update the issue state only if it is in triage state
if issue.state.name == "Triage": if issue.state.is_triage:
# Move to default state # Move to default state
state = State.objects.filter( state = State.objects.filter(
workspace__slug=slug, workspace__slug=slug,
@ -384,60 +431,108 @@ class InboxIssueViewSet(BaseViewSet):
if state is not None: if state is not None:
issue.state = state issue.state = state
issue.save() issue.save()
return Response(status=status.HTTP_204_NO_CONTENT) # create a activity for status change
issue_activity.delay(
type="inbox.activity.created",
requested_data=json.dumps(
request.data, cls=DjangoJSONEncoder
),
actor_id=str(request.user.id),
issue_id=str(issue_id),
project_id=str(project_id),
current_instance=current_instance,
epoch=int(timezone.now().timestamp()),
notification=False,
origin=request.META.get("HTTP_ORIGIN"),
)
inbox_issue = (
InboxIssue.objects.filter(
inbox_id=inbox_id.id,
issue_id=serializer.data["id"],
project_id=project_id,
)
.select_related("issue")
.prefetch_related(
"issue__labels",
"issue__assignees",
)
.annotate(
label_ids=Coalesce(
ArrayAgg(
"issue__labels__id",
distinct=True,
filter=~Q(issue__labels__id__isnull=True),
),
Value(
[],
output_field=ArrayField(UUIDField()),
),
),
assignee_ids=Coalesce(
ArrayAgg(
"issue__assignees__id",
distinct=True,
filter=~Q(issue__assignees__id__isnull=True),
),
Value(
[],
output_field=ArrayField(UUIDField()),
),
),
).first()
)
serializer = InboxIssueDetailSerializer(inbox_issue).data
return Response(serializer, status=status.HTTP_200_OK)
return Response( return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST serializer.errors, status=status.HTTP_400_BAD_REQUEST
) )
else: else:
issue = self.get_queryset().filter(pk=issue_id).first() serializer = InboxIssueDetailSerializer(inbox_issue).data
serializer = IssueSerializer(issue, expand=self.expand) return Response(serializer, status=status.HTTP_200_OK)
return Response(serializer.data, status=status.HTTP_200_OK)
def retrieve(self, request, slug, project_id, inbox_id, issue_id): def retrieve(self, request, slug, project_id, issue_id):
issue = ( inbox_id = Inbox.objects.filter(
self.get_queryset() workspace__slug=slug, project_id=project_id
.filter(pk=issue_id) ).first()
inbox_issue = (
InboxIssue.objects.select_related("issue")
.prefetch_related( .prefetch_related(
Prefetch( "issue__labels",
"issue_reactions", "issue__assignees",
queryset=IssueReaction.objects.select_related(
"issue", "actor"
),
)
)
.prefetch_related(
Prefetch(
"issue_attachment",
queryset=IssueAttachment.objects.select_related("issue"),
)
)
.prefetch_related(
Prefetch(
"issue_link",
queryset=IssueLink.objects.select_related("created_by"),
)
) )
.annotate( .annotate(
is_subscribed=Exists( label_ids=Coalesce(
IssueSubscriber.objects.filter( ArrayAgg(
workspace__slug=slug, "issue__labels__id",
project_id=project_id, distinct=True,
issue_id=OuterRef("pk"), filter=~Q(issue__labels__id__isnull=True),
subscriber=request.user, ),
) Value([], output_field=ArrayField(UUIDField())),
) ),
assignee_ids=Coalesce(
ArrayAgg(
"issue__assignees__id",
distinct=True,
filter=~Q(issue__assignees__id__isnull=True),
),
Value([], output_field=ArrayField(UUIDField())),
),
) )
.get(
inbox_id=inbox_id.id, issue_id=issue_id, project_id=project_id
)
)
issue = InboxIssueDetailSerializer(inbox_issue).data
return Response(
issue,
status=status.HTTP_200_OK,
)
def destroy(self, request, slug, project_id, issue_id):
inbox_id = Inbox.objects.filter(
workspace__slug=slug, project_id=project_id
).first() ).first()
if issue is None:
return Response(
{"error": "Requested object was not found"},
status=status.HTTP_404_NOT_FOUND,
)
serializer = IssueDetailSerializer(issue)
return Response(serializer.data, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, inbox_id, issue_id):
inbox_issue = InboxIssue.objects.get( inbox_issue = InboxIssue.objects.get(
issue_id=issue_id, issue_id=issue_id,
workspace__slug=slug, workspace__slug=slug,

View File

@ -1,57 +1,59 @@
# Python imports # Python imports
import json import json
# Django imports
from django.utils import timezone
from django.db.models import (
Prefetch,
OuterRef,
Func,
F,
Q,
Case,
Value,
CharField,
When,
Exists,
Max,
)
from django.core.serializers.json import DjangoJSONEncoder
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from django.contrib.postgres.aggregates import ArrayAgg from django.contrib.postgres.aggregates import ArrayAgg
from django.contrib.postgres.fields import ArrayField from django.contrib.postgres.fields import ArrayField
from django.db.models import UUIDField from django.core.serializers.json import DjangoJSONEncoder
from django.db.models import (
Case,
CharField,
Exists,
F,
Func,
Max,
OuterRef,
Prefetch,
Q,
UUIDField,
Value,
When,
)
from django.db.models.functions import Coalesce from django.db.models.functions import Coalesce
# Django imports
from django.utils import timezone
from django.utils.decorators import method_decorator
from django.views.decorators.gzip import gzip_page
from rest_framework import status
# Third Party imports # Third Party imports
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status
# Module imports
from .. import BaseViewSet, BaseAPIView, WebhookMixin
from plane.app.serializers import (
IssuePropertySerializer,
IssueSerializer,
IssueCreateSerializer,
IssueDetailSerializer,
)
from plane.app.permissions import ( from plane.app.permissions import (
ProjectEntityPermission, ProjectEntityPermission,
ProjectLitePermission, ProjectLitePermission,
) )
from plane.db.models import ( from plane.app.serializers import (
Project, IssueCreateSerializer,
Issue, IssueDetailSerializer,
IssueProperty, IssuePropertySerializer,
IssueLink, IssueSerializer,
IssueAttachment,
IssueSubscriber,
IssueReaction,
) )
from plane.bgtasks.issue_activites_task import issue_activity from plane.bgtasks.issue_activites_task import issue_activity
from plane.db.models import (
Issue,
IssueAttachment,
IssueLink,
IssueProperty,
IssueReaction,
IssueSubscriber,
Project,
)
from plane.utils.issue_filters import issue_filters from plane.utils.issue_filters import issue_filters
# Module imports
from .. import BaseAPIView, BaseViewSet, WebhookMixin
class IssueListEndpoint(BaseAPIView): class IssueListEndpoint(BaseAPIView):

View File

@ -120,7 +120,6 @@ class IssueDraftViewSet(BaseViewSet):
@method_decorator(gzip_page) @method_decorator(gzip_page)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
filters = issue_filters(request.query_params, "GET") filters = issue_filters(request.query_params, "GET")
# Custom ordering for priority and state # Custom ordering for priority and state
priority_order = ["urgent", "high", "medium", "low", "none"] priority_order = ["urgent", "high", "medium", "low", "none"]
state_order = [ state_order = [

View File

@ -528,13 +528,64 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
project_id=self.kwargs.get("project_id"), project_id=self.kwargs.get("project_id"),
workspace__slug=self.kwargs.get("slug"), workspace__slug=self.kwargs.get("slug"),
) )
cancelled_issues = (
Issue.issue_objects.filter(
state__group="cancelled",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
completed_issues = (
Issue.issue_objects.filter(
state__group="completed",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
started_issues = (
Issue.issue_objects.filter(
state__group="started",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
unstarted_issues = (
Issue.issue_objects.filter(
state__group="unstarted",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
backlog_issues = (
Issue.issue_objects.filter(
state__group="backlog",
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
total_issues = (
Issue.issue_objects.filter(
issue_module__module_id=OuterRef("pk"),
)
.values("issue_module__module_id")
.annotate(cnt=Count("pk"))
.values("cnt")
)
return ( return (
Module.objects.filter(workspace__slug=self.kwargs.get("slug")) Module.objects.filter(workspace__slug=self.kwargs.get("slug"))
.filter(archived_at__isnull=False) .filter(archived_at__isnull=False)
.annotate(is_favorite=Exists(favorite_subquery)) .annotate(is_favorite=Exists(favorite_subquery))
.select_related("project") .select_related("workspace", "project", "lead")
.select_related("workspace")
.select_related("lead")
.prefetch_related("members") .prefetch_related("members")
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
@ -545,68 +596,39 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
) )
) )
.annotate( .annotate(
total_issues=Count( completed_issues=Coalesce(
"issue_module", Subquery(completed_issues[:1]),
filter=Q( Value(0, output_field=IntegerField()),
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
),
)
.annotate(
completed_issues=Count(
"issue_module__issue__state__group",
filter=Q(
issue_module__issue__state__group="completed",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
) )
) )
.annotate( .annotate(
cancelled_issues=Count( cancelled_issues=Coalesce(
"issue_module__issue__state__group", Subquery(cancelled_issues[:1]),
filter=Q( Value(0, output_field=IntegerField()),
issue_module__issue__state__group="cancelled",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
) )
) )
.annotate( .annotate(
started_issues=Count( started_issues=Coalesce(
"issue_module__issue__state__group", Subquery(started_issues[:1]),
filter=Q( Value(0, output_field=IntegerField()),
issue_module__issue__state__group="started",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
) )
) )
.annotate( .annotate(
unstarted_issues=Count( unstarted_issues=Coalesce(
"issue_module__issue__state__group", Subquery(unstarted_issues[:1]),
filter=Q( Value(0, output_field=IntegerField()),
issue_module__issue__state__group="unstarted",
issue_module__issue__archived_at__isnull=True,
issue_module__issue__is_draft=False,
),
distinct=True,
) )
) )
.annotate( .annotate(
backlog_issues=Count( backlog_issues=Coalesce(
"issue_module__issue__state__group", Subquery(backlog_issues[:1]),
filter=Q( Value(0, output_field=IntegerField()),
issue_module__issue__state__group="backlog", )
issue_module__issue__archived_at__isnull=True, )
issue_module__issue__is_draft=False, .annotate(
), total_issues=Coalesce(
distinct=True, Subquery(total_issues[:1]),
Value(0, output_field=IntegerField()),
) )
) )
.annotate( .annotate(
@ -622,44 +644,180 @@ class ModuleArchiveUnarchiveEndpoint(BaseAPIView):
.order_by("-is_favorite", "-created_at") .order_by("-is_favorite", "-created_at")
) )
def get(self, request, slug, project_id): def get(self, request, slug, project_id, pk=None):
queryset = self.get_queryset() if pk is None:
modules = queryset.values( # Required fields queryset = self.get_queryset()
"id", modules = queryset.values( # Required fields
"workspace_id", "id",
"project_id", "workspace_id",
# Model fields "project_id",
"name", # Model fields
"description", "name",
"description_text", "description",
"description_html", "description_text",
"start_date", "description_html",
"target_date", "start_date",
"status", "target_date",
"lead_id", "status",
"member_ids", "lead_id",
"view_props", "member_ids",
"sort_order", "view_props",
"external_source", "sort_order",
"external_id", "external_source",
# computed fields "external_id",
"total_issues", # computed fields
"is_favorite", "total_issues",
"cancelled_issues", "is_favorite",
"completed_issues", "cancelled_issues",
"started_issues", "completed_issues",
"unstarted_issues", "started_issues",
"backlog_issues", "unstarted_issues",
"created_at", "backlog_issues",
"updated_at", "created_at",
"archived_at", "updated_at",
) "archived_at",
return Response(modules, status=status.HTTP_200_OK) )
return Response(modules, status=status.HTTP_200_OK)
else:
queryset = (
self.get_queryset()
.filter(pk=pk)
.annotate(
sub_issues=Issue.issue_objects.filter(
project_id=self.kwargs.get("project_id"),
parent__isnull=False,
issue_module__module_id=pk,
)
.order_by()
.annotate(count=Func(F("id"), function="Count"))
.values("count")
)
)
assignee_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(first_name=F("assignees__first_name"))
.annotate(last_name=F("assignees__last_name"))
.annotate(assignee_id=F("assignees__id"))
.annotate(display_name=F("assignees__display_name"))
.annotate(avatar=F("assignees__avatar"))
.values(
"first_name",
"last_name",
"assignee_id",
"avatar",
"display_name",
)
.annotate(
total_issues=Count(
"id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
completed_issues=Count(
"id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("first_name", "last_name")
)
label_distribution = (
Issue.objects.filter(
issue_module__module_id=pk,
workspace__slug=slug,
project_id=project_id,
)
.annotate(label_name=F("labels__name"))
.annotate(color=F("labels__color"))
.annotate(label_id=F("labels__id"))
.values("label_name", "color", "label_id")
.annotate(
total_issues=Count(
"id",
filter=Q(
archived_at__isnull=True,
is_draft=False,
),
),
)
.annotate(
completed_issues=Count(
"id",
filter=Q(
completed_at__isnull=False,
archived_at__isnull=True,
is_draft=False,
),
)
)
.annotate(
pending_issues=Count(
"id",
filter=Q(
completed_at__isnull=True,
archived_at__isnull=True,
is_draft=False,
),
)
)
.order_by("label_name")
)
data = ModuleDetailSerializer(queryset.first()).data
data["distribution"] = {
"assignees": assignee_distribution,
"labels": label_distribution,
"completion_chart": {},
}
# Fetch the modules
modules = queryset.first()
if modules and modules.start_date and modules.target_date:
data["distribution"]["completion_chart"] = burndown_plot(
queryset=modules,
slug=slug,
project_id=project_id,
module_id=pk,
)
return Response(
data,
status=status.HTTP_200_OK,
)
def post(self, request, slug, project_id, module_id): def post(self, request, slug, project_id, module_id):
module = Module.objects.get( module = Module.objects.get(
pk=module_id, project_id=project_id, workspace__slug=slug pk=module_id, project_id=project_id, workspace__slug=slug
) )
if module.status not in ["completed", "cancelled"]:
return Response(
{
"error": "Only completed or cancelled modules can be archived"
},
status=status.HTTP_400_BAD_REQUEST,
)
module.archived_at = timezone.now() module.archived_at = timezone.now()
module.save() module.save()
return Response( return Response(

View File

@ -1,5 +1,7 @@
# Python imports # Python imports
import json
from datetime import datetime from datetime import datetime
from django.core.serializers.json import DjangoJSONEncoder
# Django imports # Django imports
from django.db import connection from django.db import connection
@ -17,6 +19,7 @@ from plane.app.serializers import (
PageLogSerializer, PageLogSerializer,
PageSerializer, PageSerializer,
SubPageSerializer, SubPageSerializer,
PageDetailSerializer,
) )
from plane.db.models import ( from plane.db.models import (
Page, Page,
@ -28,6 +31,8 @@ from plane.db.models import (
# Module imports # Module imports
from ..base import BaseAPIView, BaseViewSet from ..base import BaseAPIView, BaseViewSet
from plane.bgtasks.page_transaction_task import page_transaction
def unarchive_archive_page_and_descendants(page_id, archived_at): def unarchive_archive_page_and_descendants(page_id, archived_at):
# Your SQL query # Your SQL query
@ -87,11 +92,21 @@ class PageViewSet(BaseViewSet):
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
serializer = PageSerializer( serializer = PageSerializer(
data=request.data, data=request.data,
context={"project_id": project_id, "owned_by_id": request.user.id}, context={
"project_id": project_id,
"owned_by_id": request.user.id,
"description_html": request.data.get(
"description_html", "<p></p>"
),
},
) )
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
# capture the page transaction
page_transaction.delay(request.data, None, serializer.data["id"])
page = Page.objects.get(pk=serializer.data["id"])
serializer = PageDetailSerializer(page)
return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@ -125,9 +140,25 @@ class PageViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
serializer = PageSerializer(page, data=request.data, partial=True) serializer = PageDetailSerializer(
page, data=request.data, partial=True
)
page_description = page.description_html
if serializer.is_valid(): if serializer.is_valid():
serializer.save() serializer.save()
# capture the page transaction
if request.data.get("description_html"):
page_transaction.delay(
new_value=request.data,
old_value=json.dumps(
{
"description_html": page_description,
},
cls=DjangoJSONEncoder,
),
page_id=pk,
)
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
return Response( return Response(
serializer.errors, status=status.HTTP_400_BAD_REQUEST serializer.errors, status=status.HTTP_400_BAD_REQUEST
@ -140,18 +171,30 @@ class PageViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
def lock(self, request, slug, project_id, page_id): def retrieve(self, request, slug, project_id, pk=None):
page = self.get_queryset().filter(pk=pk).first()
if page is None:
return Response(
{"error": "Page not found"},
status=status.HTTP_404_NOT_FOUND,
)
else:
return Response(
PageDetailSerializer(page).data, status=status.HTTP_200_OK
)
def lock(self, request, slug, project_id, pk):
page = Page.objects.filter( page = Page.objects.filter(
pk=page_id, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, project_id=project_id
).first() ).first()
page.is_locked = True page.is_locked = True
page.save() page.save()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
def unlock(self, request, slug, project_id, page_id): def unlock(self, request, slug, project_id, pk):
page = Page.objects.filter( page = Page.objects.filter(
pk=page_id, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, project_id=project_id
).first() ).first()
page.is_locked = False page.is_locked = False
@ -160,13 +203,13 @@ class PageViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
def list(self, request, slug, project_id): def list(self, request, slug, project_id):
queryset = self.get_queryset().filter(archived_at__isnull=True) queryset = self.get_queryset()
pages = PageSerializer(queryset, many=True).data pages = PageSerializer(queryset, many=True).data
return Response(pages, status=status.HTTP_200_OK) return Response(pages, status=status.HTTP_200_OK)
def archive(self, request, slug, project_id, page_id): def archive(self, request, slug, project_id, pk):
page = Page.objects.get( page = Page.objects.get(
pk=page_id, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, project_id=project_id
) )
# only the owner or admin can archive the page # only the owner or admin can archive the page
@ -184,13 +227,16 @@ class PageViewSet(BaseViewSet):
status=status.HTTP_400_BAD_REQUEST, status=status.HTTP_400_BAD_REQUEST,
) )
unarchive_archive_page_and_descendants(page_id, datetime.now()) unarchive_archive_page_and_descendants(pk, datetime.now())
return Response(status=status.HTTP_204_NO_CONTENT) return Response(
{"archived_at": str(datetime.now())},
status=status.HTTP_200_OK,
)
def unarchive(self, request, slug, project_id, page_id): def unarchive(self, request, slug, project_id, pk):
page = Page.objects.get( page = Page.objects.get(
pk=page_id, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, project_id=project_id
) )
# only the owner or admin can un archive the page # only the owner or admin can un archive the page
@ -213,19 +259,10 @@ class PageViewSet(BaseViewSet):
page.parent = None page.parent = None
page.save(update_fields=["parent"]) page.save(update_fields=["parent"])
unarchive_archive_page_and_descendants(page_id, None) unarchive_archive_page_and_descendants(pk, None)
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
def archive_list(self, request, slug, project_id):
pages = Page.objects.filter(
project_id=project_id,
workspace__slug=slug,
).filter(archived_at__isnull=False)
pages = PageSerializer(pages, many=True).data
return Response(pages, status=status.HTTP_200_OK)
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
page = Page.objects.get( page = Page.objects.get(
pk=pk, workspace__slug=slug, project_id=project_id pk=pk, workspace__slug=slug, project_id=project_id
@ -269,29 +306,20 @@ class PageFavoriteViewSet(BaseViewSet):
serializer_class = PageFavoriteSerializer serializer_class = PageFavoriteSerializer
model = PageFavorite model = PageFavorite
def get_queryset(self): def create(self, request, slug, project_id, pk):
return self.filter_queryset( _ = PageFavorite.objects.create(
super() project_id=project_id,
.get_queryset() page_id=pk,
.filter(archived_at__isnull=True) user=request.user,
.filter(workspace__slug=self.kwargs.get("slug"))
.filter(user=self.request.user)
.select_related("page", "page__owned_by")
) )
return Response(status=status.HTTP_204_NO_CONTENT)
def create(self, request, slug, project_id): def destroy(self, request, slug, project_id, pk):
serializer = PageFavoriteSerializer(data=request.data)
if serializer.is_valid():
serializer.save(user=request.user, project_id=project_id)
return Response(serializer.data, status=status.HTTP_201_CREATED)
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, page_id):
page_favorite = PageFavorite.objects.get( page_favorite = PageFavorite.objects.get(
project=project_id, project=project_id,
user=request.user, user=request.user,
workspace__slug=slug, workspace__slug=slug,
page_id=page_id, page_id=pk,
) )
page_favorite.delete() page_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -393,10 +393,11 @@ class ProjectViewSet(WebhookMixin, BaseViewSet):
# Create the triage state in Backlog group # Create the triage state in Backlog group
State.objects.get_or_create( State.objects.get_or_create(
name="Triage", name="Triage",
group="backlog", group="triage",
description="Default state for managing all Inbox Issues", description="Default state for managing all Inbox Issues",
project_id=pk, project_id=pk,
color="#ff7700", color="#ff7700",
is_triage=True,
) )
project = ( project = (

View File

@ -1,9 +1,6 @@
# Python imports # Python imports
from itertools import groupby from itertools import groupby
# Django imports
from django.db.models import Q
# Third party imports # Third party imports
from rest_framework.response import Response from rest_framework.response import Response
from rest_framework import status from rest_framework import status
@ -17,6 +14,7 @@ from plane.app.permissions import (
from plane.db.models import State, Issue from plane.db.models import State, Issue
from plane.utils.cache import invalidate_cache from plane.utils.cache import invalidate_cache
class StateViewSet(BaseViewSet): class StateViewSet(BaseViewSet):
serializer_class = StateSerializer serializer_class = StateSerializer
model = State model = State
@ -35,13 +33,15 @@ class StateViewSet(BaseViewSet):
project__project_projectmember__is_active=True, project__project_projectmember__is_active=True,
project__archived_at__isnull=True, project__archived_at__isnull=True,
) )
.filter(~Q(name="Triage")) .filter(is_triage=False)
.select_related("project") .select_related("project")
.select_related("workspace") .select_related("workspace")
.distinct() .distinct()
) )
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False) @invalidate_cache(
path="workspaces/:slug/states/", url_params=True, user=False
)
def create(self, request, slug, project_id): def create(self, request, slug, project_id):
serializer = StateSerializer(data=request.data) serializer = StateSerializer(data=request.data)
if serializer.is_valid(): if serializer.is_valid():
@ -62,7 +62,9 @@ class StateViewSet(BaseViewSet):
return Response(state_dict, status=status.HTTP_200_OK) return Response(state_dict, status=status.HTTP_200_OK)
return Response(states, status=status.HTTP_200_OK) return Response(states, status=status.HTTP_200_OK)
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False) @invalidate_cache(
path="workspaces/:slug/states/", url_params=True, user=False
)
def mark_as_default(self, request, slug, project_id, pk): def mark_as_default(self, request, slug, project_id, pk):
# Select all the states which are marked as default # Select all the states which are marked as default
_ = State.objects.filter( _ = State.objects.filter(
@ -73,10 +75,12 @@ class StateViewSet(BaseViewSet):
).update(default=True) ).update(default=True)
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@invalidate_cache(path="workspaces/:slug/states/", url_params=True, user=False) @invalidate_cache(
path="workspaces/:slug/states/", url_params=True, user=False
)
def destroy(self, request, slug, project_id, pk): def destroy(self, request, slug, project_id, pk):
state = State.objects.get( state = State.objects.get(
~Q(name="Triage"), is_triage=False,
pk=pk, pk=pk,
project_id=project_id, project_id=project_id,
workspace__slug=slug, workspace__slug=slug,

View File

@ -326,11 +326,11 @@ class IssueViewFavoriteViewSet(BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
def destroy(self, request, slug, project_id, view_id): def destroy(self, request, slug, project_id, view_id):
view_favourite = IssueViewFavorite.objects.get( view_favorite = IssueViewFavorite.objects.get(
project=project_id, project=project_id,
user=request.user, user=request.user,
workspace__slug=slug, workspace__slug=slug,
view_id=view_id, view_id=view_id,
) )
view_favourite.delete() view_favorite.delete()
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)

View File

@ -151,8 +151,8 @@ class WorkSpaceViewSet(BaseViewSet):
return super().partial_update(request, *args, **kwargs) return super().partial_update(request, *args, **kwargs)
@invalidate_cache(path="/api/workspaces/", user=False) @invalidate_cache(path="/api/workspaces/", user=False)
@invalidate_cache(path="/api/users/me/workspaces/") @invalidate_cache(path="/api/users/me/workspaces/", multiple=True)
@invalidate_cache(path="/api/users/me/settings/") @invalidate_cache(path="/api/users/me/settings/", multiple=True)
def destroy(self, request, *args, **kwargs): def destroy(self, request, *args, **kwargs):
return super().destroy(request, *args, **kwargs) return super().destroy(request, *args, **kwargs)

View File

@ -27,6 +27,7 @@ class WorkspaceCyclesEndpoint(BaseAPIView):
.select_related("project") .select_related("project")
.select_related("workspace") .select_related("workspace")
.select_related("owned_by") .select_related("owned_by")
.filter(archived_at__isnull=False)
.annotate( .annotate(
total_issues=Count( total_issues=Count(
"issue_cycle", "issue_cycle",

View File

@ -1,36 +1,39 @@
# Python imports # Python imports
import jwt
from datetime import datetime from datetime import datetime
import jwt
# Django imports # Django imports
from django.conf import settings from django.conf import settings
from django.utils import timezone
from django.db.models import Count
from django.core.exceptions import ValidationError from django.core.exceptions import ValidationError
from django.core.validators import validate_email from django.core.validators import validate_email
from django.db.models import Count
from django.utils import timezone
# Third party modules # Third party modules
from rest_framework import status from rest_framework import status
from rest_framework.response import Response
from rest_framework.permissions import AllowAny from rest_framework.permissions import AllowAny
from rest_framework.response import Response
# Module imports # Module imports
from plane.app.permissions import WorkSpaceAdminPermission
from plane.app.serializers import ( from plane.app.serializers import (
WorkSpaceMemberSerializer,
WorkSpaceMemberInviteSerializer, WorkSpaceMemberInviteSerializer,
WorkSpaceMemberSerializer,
) )
from plane.app.views.base import BaseAPIView from plane.app.views.base import BaseAPIView
from .. import BaseViewSet from plane.bgtasks.event_tracking_task import workspace_invite_event
from plane.bgtasks.workspace_invitation_task import workspace_invitation
from plane.db.models import ( from plane.db.models import (
User, User,
Workspace, Workspace,
WorkspaceMemberInvite,
WorkspaceMember, WorkspaceMember,
WorkspaceMemberInvite,
) )
from plane.app.permissions import WorkSpaceAdminPermission from plane.utils.cache import invalidate_cache, invalidate_cache_directly
from plane.bgtasks.workspace_invitation_task import workspace_invitation
from plane.bgtasks.event_tracking_task import workspace_invite_event from .. import BaseViewSet
from plane.utils.cache import invalidate_cache
class WorkspaceInvitationsViewset(BaseViewSet): class WorkspaceInvitationsViewset(BaseViewSet):
"""Endpoint for creating, listing and deleting workspaces""" """Endpoint for creating, listing and deleting workspaces"""
@ -166,7 +169,14 @@ class WorkspaceJoinEndpoint(BaseAPIView):
"""Invitation response endpoint the user can respond to the invitation""" """Invitation response endpoint the user can respond to the invitation"""
@invalidate_cache(path="/api/workspaces/", user=False) @invalidate_cache(path="/api/workspaces/", user=False)
@invalidate_cache(path="/api/users/me/workspaces/") @invalidate_cache(path="/api/users/me/workspaces/", multiple=True)
@invalidate_cache(
path="/api/workspaces/:slug/members/",
user=False,
multiple=True,
url_params=True,
)
@invalidate_cache(path="/api/users/me/settings/", multiple=True)
def post(self, request, slug, pk): def post(self, request, slug, pk):
workspace_invite = WorkspaceMemberInvite.objects.get( workspace_invite = WorkspaceMemberInvite.objects.get(
pk=pk, workspace__slug=slug pk=pk, workspace__slug=slug
@ -264,10 +274,7 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
) )
@invalidate_cache(path="/api/workspaces/", user=False) @invalidate_cache(path="/api/workspaces/", user=False)
@invalidate_cache(path="/api/users/me/workspaces/") @invalidate_cache(path="/api/users/me/workspaces/", multiple=True)
@invalidate_cache(
path="/api/workspaces/:slug/members/", url_params=True, user=False
)
def create(self, request): def create(self, request):
invitations = request.data.get("invitations", []) invitations = request.data.get("invitations", [])
workspace_invitations = WorkspaceMemberInvite.objects.filter( workspace_invitations = WorkspaceMemberInvite.objects.filter(
@ -276,6 +283,12 @@ class UserWorkspaceInvitationsViewSet(BaseViewSet):
# If the user is already a member of workspace and was deactivated then activate the user # If the user is already a member of workspace and was deactivated then activate the user
for invitation in workspace_invitations: for invitation in workspace_invitations:
invalidate_cache_directly(
path=f"/api/workspaces/{invitation.workspace.slug}/members/",
user=False,
request=request,
multiple=True,
)
# Update the WorkspaceMember for this specific invitation # Update the WorkspaceMember for this specific invitation
WorkspaceMember.objects.filter( WorkspaceMember.objects.filter(
workspace_id=invitation.workspace_id, member=request.user workspace_id=invitation.workspace_id, member=request.user

View File

@ -102,7 +102,10 @@ class WorkSpaceMemberViewSet(BaseViewSet):
return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.data, status=status.HTTP_200_OK)
@invalidate_cache( @invalidate_cache(
path="/api/workspaces/:slug/members/", url_params=True, user=False path="/api/workspaces/:slug/members/",
url_params=True,
user=False,
multiple=True,
) )
def partial_update(self, request, slug, pk): def partial_update(self, request, slug, pk):
workspace_member = WorkspaceMember.objects.get( workspace_member = WorkspaceMember.objects.get(
@ -147,9 +150,15 @@ class WorkSpaceMemberViewSet(BaseViewSet):
return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST)
@invalidate_cache( @invalidate_cache(
path="/api/workspaces/:slug/members/", url_params=True, user=False path="/api/workspaces/:slug/members/",
url_params=True,
user=False,
multiple=True,
)
@invalidate_cache(path="/api/users/me/settings/", multiple=True)
@invalidate_cache(
path="/api/users/me/workspaces/", user=False, multiple=True
) )
@invalidate_cache(path="/api/users/me/settings/")
def destroy(self, request, slug, pk): def destroy(self, request, slug, pk):
# Check the user role who is deleting the user # Check the user role who is deleting the user
workspace_member = WorkspaceMember.objects.get( workspace_member = WorkspaceMember.objects.get(
@ -215,9 +224,15 @@ class WorkSpaceMemberViewSet(BaseViewSet):
return Response(status=status.HTTP_204_NO_CONTENT) return Response(status=status.HTTP_204_NO_CONTENT)
@invalidate_cache( @invalidate_cache(
path="/api/workspaces/:slug/members/", url_params=True, user=False path="/api/workspaces/:slug/members/",
url_params=True,
user=False,
multiple=True,
) )
@invalidate_cache(path="/api/users/me/settings/") @invalidate_cache(path="/api/users/me/settings/")
@invalidate_cache(
path="api/users/me/workspaces/", user=False, multiple=True
)
def leave(self, request, slug): def leave(self, request, slug):
workspace_member = WorkspaceMember.objects.get( workspace_member = WorkspaceMember.objects.get(
workspace__slug=slug, workspace__slug=slug,

View File

@ -30,6 +30,7 @@ class WorkspaceModulesEndpoint(BaseAPIView):
.select_related("workspace") .select_related("workspace")
.select_related("lead") .select_related("lead")
.prefetch_related("members") .prefetch_related("members")
.filter(archived_at__isnull=False)
.prefetch_related( .prefetch_related(
Prefetch( Prefetch(
"link_module", "link_module",

View File

@ -0,0 +1,679 @@
# Python imports
import uuid
import random
from datetime import datetime, timedelta
# Django imports
from django.db.models import Max
# Third party imports
from celery import shared_task
from faker import Faker
# Module imports
from plane.db.models import (
Workspace,
User,
Project,
ProjectMember,
State,
Label,
Cycle,
Module,
Issue,
IssueSequence,
IssueAssignee,
IssueLabel,
IssueActivity,
CycleIssue,
ModuleIssue,
Page,
PageLabel,
Inbox,
InboxIssue,
)
def create_project(workspace, user_id):
fake = Faker()
name = fake.name()
unique_id = str(uuid.uuid4())[:5]
project = Project.objects.create(
workspace=workspace,
name=f"{name}_{unique_id}",
identifier=name[
: random.randint(2, 12 if len(name) - 1 >= 12 else len(name) - 1)
].upper(),
created_by_id=user_id,
inbox_view=True,
)
# Add current member as project member
_ = ProjectMember.objects.create(
project=project,
member_id=user_id,
role=20,
)
return project
def create_project_members(workspace, project, members):
members = User.objects.filter(email__in=members)
_ = ProjectMember.objects.bulk_create(
[
ProjectMember(
project=project,
workspace=workspace,
member=member,
role=20,
sort_order=random.randint(0, 65535),
)
for member in members
],
ignore_conflicts=True,
)
return
def create_states(workspace, project, user_id):
states = [
{
"name": "Backlog",
"color": "#A3A3A3",
"sequence": 15000,
"group": "backlog",
"default": True,
},
{
"name": "Todo",
"color": "#3A3A3A",
"sequence": 25000,
"group": "unstarted",
},
{
"name": "In Progress",
"color": "#F59E0B",
"sequence": 35000,
"group": "started",
},
{
"name": "Done",
"color": "#16A34A",
"sequence": 45000,
"group": "completed",
},
{
"name": "Cancelled",
"color": "#EF4444",
"sequence": 55000,
"group": "cancelled",
},
]
states = State.objects.bulk_create(
[
State(
name=state["name"],
color=state["color"],
project=project,
sequence=state["sequence"],
workspace=workspace,
group=state["group"],
default=state.get("default", False),
created_by_id=user_id,
)
for state in states
]
)
return states
def create_labels(workspace, project, user_id):
fake = Faker()
Faker.seed(0)
return Label.objects.bulk_create(
[
Label(
name=fake.color_name(),
color=fake.hex_color(),
project=project,
workspace=workspace,
created_by_id=user_id,
sort_order=random.randint(0, 65535),
)
for _ in range(0, 50)
],
ignore_conflicts=True,
)
def create_cycles(workspace, project, user_id, cycle_count):
fake = Faker()
Faker.seed(0)
cycles = []
used_date_ranges = set() # Track used date ranges
while len(cycles) <= cycle_count:
# Generate a start date, allowing for None
start_date_option = [None, fake.date_this_year()]
start_date = start_date_option[random.randint(0, 1)]
# Initialize end_date based on start_date
end_date = (
None
if start_date is None
else fake.date_between_dates(
date_start=start_date,
date_end=datetime.now().date().replace(month=12, day=31),
)
)
# Ensure end_date is strictly after start_date if start_date is not None
while start_date is not None and (
end_date <= start_date
or (start_date, end_date) in used_date_ranges
):
end_date = fake.date_this_year()
# Add the unique date range to the set
(
used_date_ranges.add((start_date, end_date))
if (end_date is not None and start_date is not None)
else None
)
# Append the cycle with unique date range
cycles.append(
Cycle(
name=fake.name(),
owned_by_id=user_id,
sort_order=random.randint(0, 65535),
start_date=start_date,
end_date=end_date,
project=project,
workspace=workspace,
)
)
return Cycle.objects.bulk_create(cycles, ignore_conflicts=True)
def create_modules(workspace, project, user_id, module_count):
fake = Faker()
Faker.seed(0)
modules = []
for _ in range(0, module_count):
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
end_date = (
None
if start_date is None
else fake.date_between_dates(
date_start=start_date,
date_end=datetime.now().date().replace(month=12, day=31),
)
)
modules.append(
Module(
name=fake.name(),
sort_order=random.randint(0, 65535),
start_date=start_date,
target_date=end_date,
project=project,
workspace=workspace,
)
)
return Module.objects.bulk_create(modules, ignore_conflicts=True)
def create_pages(workspace, project, user_id, pages_count):
fake = Faker()
Faker.seed(0)
pages = []
for _ in range(0, pages_count):
text = fake.text(max_nb_chars=60000)
pages.append(
Page(
name=fake.name(),
project=project,
workspace=workspace,
owned_by_id=user_id,
access=random.randint(0, 1),
color=fake.hex_color(),
description_html=f"<p>{text}</p>",
archived_at=None,
is_locked=False,
)
)
return Page.objects.bulk_create(pages, ignore_conflicts=True)
def create_page_labels(workspace, project, user_id, pages_count):
# labels
labels = Label.objects.filter(project=project).values_list("id", flat=True)
pages = random.sample(
list(
Page.objects.filter(project=project).values_list("id", flat=True)
),
int(pages_count / 2),
)
# Bulk page labels
bulk_page_labels = []
for page in pages:
for label in random.sample(
list(labels), random.randint(0, len(labels) - 1)
):
bulk_page_labels.append(
PageLabel(
page_id=page,
label_id=label,
project=project,
workspace=workspace,
)
)
# Page labels
PageLabel.objects.bulk_create(
bulk_page_labels, batch_size=1000, ignore_conflicts=True
)
def create_issues(workspace, project, user_id, issue_count):
fake = Faker()
Faker.seed(0)
states = State.objects.filter(workspace=workspace, project=project).exclude(group="Triage").values_list("id", flat=True)
creators = ProjectMember.objects.filter(workspace=workspace, project=project).values_list("member_id", flat=True)
issues = []
# Get the maximum sequence_id
last_id = IssueSequence.objects.filter(
project=project,
).aggregate(
largest=Max("sequence")
)["largest"]
last_id = 1 if last_id is None else last_id + 1
# Get the maximum sort order
largest_sort_order = Issue.objects.filter(
project=project,
state_id=states[random.randint(0, len(states) - 1)],
).aggregate(largest=Max("sort_order"))["largest"]
largest_sort_order = (
65535 if largest_sort_order is None else largest_sort_order + 10000
)
for _ in range(0, issue_count):
start_date = [None, fake.date_this_year()][random.randint(0, 1)]
end_date = (
None
if start_date is None
else fake.date_between_dates(
date_start=start_date,
date_end=datetime.now().date().replace(month=12, day=31),
)
)
text = fake.text(max_nb_chars=60000)
issues.append(
Issue(
state_id=states[random.randint(0, len(states) - 1)],
project=project,
workspace=workspace,
name=text[:254],
description_html=f"<p>{text}</p>",
description_stripped=text,
sequence_id=last_id,
sort_order=largest_sort_order,
start_date=start_date,
target_date=end_date,
priority=["urgent", "high", "medium", "low", "none"][
random.randint(0, 4)
],
created_by_id=creators[random.randint(0, len(creators) - 1)],
)
)
largest_sort_order = largest_sort_order + random.randint(0, 1000)
last_id = last_id + 1
issues = Issue.objects.bulk_create(
issues, ignore_conflicts=True, batch_size=1000
)
# Sequences
_ = IssueSequence.objects.bulk_create(
[
IssueSequence(
issue=issue,
sequence=issue.sequence_id,
project=project,
workspace=workspace,
)
for issue in issues
],
batch_size=100,
)
# Track the issue activities
IssueActivity.objects.bulk_create(
[
IssueActivity(
issue=issue,
actor_id=user_id,
project=project,
workspace=workspace,
comment="created the issue",
verb="created",
created_by_id=user_id,
)
for issue in issues
],
batch_size=100,
)
return issues
def create_inbox_issues(workspace, project, user_id, inbox_issue_count):
issues = create_issues(workspace, project, user_id, inbox_issue_count)
inbox, create = Inbox.objects.get_or_create(
name="Inbox",
project=project,
is_default=True,
)
InboxIssue.objects.bulk_create(
[
InboxIssue(
issue=issue,
inbox=inbox,
status=(status := [-2, -1, 0, 1, 2][random.randint(0, 4)]),
snoozed_till=(
datetime.now() + timedelta(days=random.randint(1, 30))
if status == 0
else None
),
source="in-app",
workspace=workspace,
project=project,
)
for issue in issues
],
batch_size=100,
)
def create_issue_parent(workspace, project, user_id, issue_count):
parent_count = issue_count / 4
parent_issues = Issue.objects.filter(project=project).values_list(
"id", flat=True
)[: int(parent_count)]
sub_issues = Issue.objects.filter(project=project).exclude(
pk__in=parent_issues
)[: int(issue_count / 2)]
bulk_sub_issues = []
for sub_issue in sub_issues:
sub_issue.parent_id = parent_issues[
random.randint(0, int(parent_count - 1))
]
Issue.objects.bulk_update(bulk_sub_issues, ["parent"], batch_size=1000)
def create_issue_assignees(workspace, project, user_id, issue_count):
# assignees
assignees = ProjectMember.objects.filter(project=project).values_list(
"member_id", flat=True
)
issues = random.sample(
list(
Issue.objects.filter(project=project).values_list("id", flat=True)
),
int(issue_count / 2),
)
# Bulk issue
bulk_issue_assignees = []
for issue in issues:
for assignee in random.sample(
list(assignees), random.randint(0, len(assignees) - 1)
):
bulk_issue_assignees.append(
IssueAssignee(
issue_id=issue,
assignee_id=assignee,
project=project,
workspace=workspace,
)
)
# Issue assignees
IssueAssignee.objects.bulk_create(
bulk_issue_assignees, batch_size=1000, ignore_conflicts=True
)
def create_issue_labels(workspace, project, user_id, issue_count):
# labels
labels = Label.objects.filter(project=project).values_list("id", flat=True)
issues = random.sample(
list(
Issue.objects.filter(project=project).values_list("id", flat=True)
),
int(issue_count / 2),
)
# Bulk issue
bulk_issue_labels = []
for issue in issues:
for label in random.sample(
list(labels), random.randint(0, len(labels) - 1)
):
bulk_issue_labels.append(
IssueLabel(
issue_id=issue,
label_id=label,
project=project,
workspace=workspace,
)
)
# Issue labels
IssueLabel.objects.bulk_create(
bulk_issue_labels, batch_size=1000, ignore_conflicts=True
)
def create_cycle_issues(workspace, project, user_id, issue_count):
# assignees
cycles = Cycle.objects.filter(project=project).values_list("id", flat=True)
issues = random.sample(
list(
Issue.objects.filter(project=project).values_list("id", flat=True)
),
int(issue_count / 2),
)
# Bulk issue
bulk_cycle_issues = []
for issue in issues:
cycle = cycles[random.randint(0, len(cycles) - 1)]
bulk_cycle_issues.append(
CycleIssue(
cycle_id=cycle,
issue_id=issue,
project=project,
workspace=workspace,
)
)
# Issue assignees
CycleIssue.objects.bulk_create(
bulk_cycle_issues, batch_size=1000, ignore_conflicts=True
)
def create_module_issues(workspace, project, user_id, issue_count):
# assignees
modules = Module.objects.filter(project=project).values_list(
"id", flat=True
)
issues = random.sample(
list(
Issue.objects.filter(project=project).values_list("id", flat=True)
),
int(issue_count / 2),
)
# Bulk issue
bulk_module_issues = []
for issue in issues:
module = modules[random.randint(0, len(modules) - 1)]
bulk_module_issues.append(
ModuleIssue(
module_id=module,
issue_id=issue,
project=project,
workspace=workspace,
)
)
# Issue assignees
ModuleIssue.objects.bulk_create(
bulk_module_issues, batch_size=1000, ignore_conflicts=True
)
@shared_task
def create_dummy_data(
slug,
email,
members,
issue_count,
cycle_count,
module_count,
pages_count,
inbox_issue_count,
):
workspace = Workspace.objects.get(slug=slug)
user = User.objects.get(email=email)
user_id = user.id
# Create a project
project = create_project(workspace=workspace, user_id=user_id)
# create project members
create_project_members(
workspace=workspace, project=project, members=members
)
# Create states
create_states(workspace=workspace, project=project, user_id=user_id)
# Create labels
create_labels(workspace=workspace, project=project, user_id=user_id)
# create cycles
create_cycles(
workspace=workspace,
project=project,
user_id=user_id,
cycle_count=cycle_count,
)
# create modules
create_modules(
workspace=workspace,
project=project,
user_id=user_id,
module_count=module_count,
)
# create pages
create_pages(
workspace=workspace,
project=project,
user_id=user_id,
pages_count=pages_count,
)
# create page labels
create_page_labels(
workspace=workspace,
project=project,
user_id=user_id,
pages_count=pages_count,
)
# create issues
create_issues(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
# create inbox issues
create_inbox_issues(
workspace=workspace,
project=project,
user_id=user_id,
inbox_issue_count=inbox_issue_count,
)
# create issue parent
create_issue_parent(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
# create issue assignees
create_issue_assignees(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
# create issue labels
create_issue_labels(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
# create cycle issues
create_cycle_issues(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
# create module issues
create_module_issues(
workspace=workspace,
project=project,
user_id=user_id,
issue_count=issue_count,
)
return

View File

@ -1,4 +1,5 @@
import logging import logging
import re
from datetime import datetime from datetime import datetime
from bs4 import BeautifulSoup from bs4 import BeautifulSoup
@ -19,6 +20,12 @@ from plane.settings.redis import redis_instance
from plane.utils.exception_logger import log_exception from plane.utils.exception_logger import log_exception
def remove_unwanted_characters(input_text):
# Keep only alphanumeric characters, spaces, and dashes.
processed_text = re.sub(r"[^a-zA-Z0-9 \-]", "", input_text)
return processed_text
# acquire and delete redis lock # acquire and delete redis lock
def acquire_lock(lock_id, expire_time=300): def acquire_lock(lock_id, expire_time=300):
redis_client = redis_instance() redis_client = redis_instance()
@ -175,7 +182,16 @@ def send_email_notification(
if acquire_lock(lock_id=lock_id): if acquire_lock(lock_id=lock_id):
# get the redis instance # get the redis instance
ri = redis_instance() ri = redis_instance()
base_api = ri.get(str(issue_id)).decode() base_api = (
ri.get(str(issue_id)).decode()
if ri.get(str(issue_id))
else None
)
# Skip if base api is not present
if not base_api:
return
data = create_payload(notification_data=notification_data) data = create_payload(notification_data=notification_data)
# Get email configurations # Get email configurations
@ -255,9 +271,7 @@ def send_email_notification(
summary = "Updates were made to the issue by" summary = "Updates were made to the issue by"
# Send the mail # Send the mail
subject = ( subject = f"{issue.project.identifier}-{issue.sequence_id} {remove_unwanted_characters(issue.name)}"
f"{issue.project.identifier}-{issue.sequence_id} {issue.name}"
)
context = { context = {
"data": template_data, "data": template_data,
"summary": summary, "summary": summary,
@ -321,8 +335,7 @@ def send_email_notification(
"Duplicate email received skipping" "Duplicate email received skipping"
) )
return return
except (Issue.DoesNotExist, User.DoesNotExist) as e: except (Issue.DoesNotExist, User.DoesNotExist):
log_exception(e)
release_lock(lock_id=lock_id) release_lock(lock_id=lock_id)
return return
except Exception as e: except Exception as e:

View File

@ -1553,6 +1553,46 @@ def delete_draft_issue_activity(
) )
def create_inbox_activity(
requested_data,
current_instance,
issue_id,
project_id,
workspace_id,
actor_id,
issue_activities,
epoch,
):
requested_data = (
json.loads(requested_data) if requested_data is not None else None
)
current_instance = (
json.loads(current_instance) if current_instance is not None else None
)
status_dict = {
-2: "Pending",
-1: "Rejected",
0: "Snoozed",
1: "Accepted",
2: "Duplicate",
}
if requested_data.get("status") is not None:
issue_activities.append(
IssueActivity(
issue_id=issue_id,
project_id=project_id,
workspace_id=workspace_id,
comment="updated the inbox status",
field="inbox",
verb=requested_data.get("status"),
actor_id=actor_id,
epoch=epoch,
old_value=status_dict.get(current_instance.get("status")),
new_value=status_dict.get(requested_data.get("status")),
)
)
# Receive message from room group # Receive message from room group
@shared_task @shared_task
def issue_activity( def issue_activity(
@ -1613,6 +1653,7 @@ def issue_activity(
"issue_draft.activity.created": create_draft_issue_activity, "issue_draft.activity.created": create_draft_issue_activity,
"issue_draft.activity.updated": update_draft_issue_activity, "issue_draft.activity.updated": update_draft_issue_activity,
"issue_draft.activity.deleted": delete_draft_issue_activity, "issue_draft.activity.deleted": delete_draft_issue_activity,
"inbox.activity.created": create_inbox_activity,
} }
func = ACTIVITY_MAPPER.get(type) func = ACTIVITY_MAPPER.get(type)

View File

@ -0,0 +1,76 @@
# Python imports
import json
# Django imports
from django.utils import timezone
# Third-party imports
from bs4 import BeautifulSoup
# Module imports
from plane.db.models import Page, PageLog
from celery import shared_task
def extract_components(value, tag):
try:
mentions = []
html = value.get("description_html")
soup = BeautifulSoup(html, "html.parser")
mention_tags = soup.find_all(tag)
for mention_tag in mention_tags:
mention = {
"id": mention_tag.get("id"),
"entity_identifier": mention_tag.get("entity_identifier"),
"entity_name": mention_tag.get("entity_name"),
}
mentions.append(mention)
return mentions
except Exception:
return []
@shared_task
def page_transaction(new_value, old_value, page_id):
page = Page.objects.get(pk=page_id)
new_page_mention = PageLog.objects.filter(page_id=page_id).exists()
old_value = json.loads(old_value) if old_value else {}
new_transactions = []
deleted_transaction_ids = set()
# TODO - Add "issue-embed-component", "img", "todo" components
components = ["mention-component"]
for component in components:
old_mentions = extract_components(old_value, component)
new_mentions = extract_components(new_value, component)
new_mentions_ids = {mention["id"] for mention in new_mentions}
old_mention_ids = {mention["id"] for mention in old_mentions}
deleted_transaction_ids.update(old_mention_ids - new_mentions_ids)
new_transactions.extend(
PageLog(
transaction=mention["id"],
page_id=page_id,
entity_identifier=mention["entity_identifier"],
entity_name=mention["entity_name"],
workspace_id=page.workspace_id,
project_id=page.project_id,
created_at=timezone.now(),
updated_at=timezone.now(),
)
for mention in new_mentions
if mention["id"] not in old_mention_ids or not new_page_mention
)
# Create new PageLog objects for new transactions
PageLog.objects.bulk_create(
new_transactions, batch_size=10, ignore_conflicts=True
)
# Delete the removed transactions
PageLog.objects.filter(transaction__in=deleted_transaction_ids).delete()

View File

@ -202,16 +202,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
if webhooks: if webhooks:
if action in ["POST", "PATCH"]: if action in ["POST", "PATCH"]:
if bulk and event in ["cycle_issue", "module_issue"]: if bulk and event in ["cycle_issue", "module_issue"]:
event_data = IssueExpandSerializer( return
Issue.objects.filter(
pk__in=[
str(event.get("issue")) for event in payload
]
).prefetch_related("issue_cycle", "issue_module"),
many=True,
).data
event = "issue"
action = "PATCH"
else: else:
event_data = [ event_data = [
get_model_data( get_model_data(
@ -219,7 +210,7 @@ def send_webhook(event, payload, kw, action, slug, bulk, current_site):
event_id=( event_id=(
payload.get("id") payload.get("id")
if isinstance(payload, dict) if isinstance(payload, dict)
else None else kw.get("pk")
), ),
many=False, many=False,
) )

View File

@ -0,0 +1,95 @@
# Django imports
from typing import Any
from django.core.management.base import BaseCommand, CommandError
# Module imports
from plane.db.models import User, Workspace, WorkspaceMember
class Command(BaseCommand):
help = "Create dump issues, cycles etc. for a project in a given workspace"
def handle(self, *args: Any, **options: Any) -> str | None:
try:
workspace_name = input("Workspace Name: ")
workspace_slug = input("Workspace slug: ")
if workspace_slug == "":
raise CommandError("Workspace slug is required")
if Workspace.objects.filter(slug=workspace_slug).exists():
raise CommandError("Workspace already exists")
creator = input("Your email: ")
if (
creator == ""
or not User.objects.filter(email=creator).exists()
):
raise CommandError(
"User email is required and should have signed in plane"
)
user = User.objects.get(email=creator)
members = input("Enter Member emails (comma separated): ")
members = members.split(",") if members != "" else []
# Create workspace
workspace = Workspace.objects.create(
slug=workspace_slug,
name=workspace_name,
owner=user,
)
# Create workspace member
WorkspaceMember.objects.create(
workspace=workspace, role=20, member=user
)
user_ids = User.objects.filter(email__in=members)
_ = WorkspaceMember.objects.bulk_create(
[
WorkspaceMember(
workspace=workspace,
member=user_id,
role=20,
)
for user_id in user_ids
],
ignore_conflicts=True,
)
project_count = int(input("Number of projects to be created: "))
for i in range(project_count):
print(f"Please provide the following details for project {i+1}:")
issue_count = int(input("Number of issues to be created: "))
cycle_count = int(input("Number of cycles to be created: "))
module_count = int(input("Number of modules to be created: "))
pages_count = int(input("Number of pages to be created: "))
inbox_issue_count = int(
input("Number of inbox issues to be created: ")
)
from plane.bgtasks.dummy_data_task import create_dummy_data
create_dummy_data.delay(
slug=workspace_slug,
email=creator,
members=members,
issue_count=issue_count,
cycle_count=cycle_count,
module_count=module_count,
pages_count=pages_count,
inbox_issue_count=inbox_issue_count,
)
self.stdout.write(
self.style.SUCCESS("Data is pushed to the queue")
)
return
except Exception as e:
self.stdout.write(
self.style.ERROR(f"Command errored out {str(e)}")
)
return

View File

@ -0,0 +1,48 @@
# Django imports
from django.core.management.base import BaseCommand, CommandError
# Module imports
from plane.license.models import Instance, InstanceAdmin
from plane.db.models import User
class Command(BaseCommand):
help = "Add a new instance admin"
def add_arguments(self, parser):
# Positional argument
parser.add_argument(
"admin_email", type=str, help="Instance Admin Email"
)
def handle(self, *args, **options):
admin_email = options.get("admin_email", False)
if not admin_email:
raise CommandError("Please provide the email of the admin.")
user = User.objects.filter(email=admin_email).first()
if user is None:
raise CommandError("User with the provided email does not exist.")
try:
# Get the instance
instance = Instance.objects.last()
# Get or create an instance admin
_, created = InstanceAdmin.objects.get_or_create(
user=user, instance=instance, role=20
)
if not created:
raise CommandError(
"The provided email is already an instance admin."
)
self.stdout.write(
self.style.SUCCESS("Successfully created the admin")
)
except Exception as e:
print(e)
raise CommandError("Failed to create the instance admin.")

View File

@ -0,0 +1,44 @@
# Generated by Django 4.2.10 on 2024-04-02 12:18
from django.db import migrations, models
def update_project_state_group(apps, schema_editor):
State = apps.get_model("db", "State")
# Update states in bulk
State.objects.filter(group="backlog", name="Triage").update(
is_triage=True, group="triage"
)
class Migration(migrations.Migration):
dependencies = [
("db", "0062_cycle_archived_at_module_archived_at_and_more"),
]
operations = [
migrations.AddField(
model_name="state",
name="is_triage",
field=models.BooleanField(default=False),
),
migrations.AlterField(
model_name="state",
name="group",
field=models.CharField(
choices=[
("backlog", "Backlog"),
("unstarted", "Unstarted"),
("started", "Started"),
("completed", "Completed"),
("cancelled", "Cancelled"),
("triage", "Triage"),
],
default="backlog",
max_length=20,
),
),
migrations.RunPython(update_project_state_group),
]

View File

@ -0,0 +1,20 @@
# Generated by Django 4.2.10 on 2024-04-09 11:34
from django.db import migrations, models
import plane.db.models.page
class Migration(migrations.Migration):
dependencies = [
('db', '0063_state_is_triage_alter_state_group'),
]
operations = [
migrations.AddField(
model_name="page",
name="view_props",
field=models.JSONField(
default=plane.db.models.page.get_view_props
),
),
]

View File

@ -171,14 +171,14 @@ class Issue(ProjectBaseModel):
from plane.db.models import State from plane.db.models import State
default_state = State.objects.filter( default_state = State.objects.filter(
~models.Q(name="Triage"), ~models.Q(is_triage=True),
project=self.project, project=self.project,
default=True, default=True,
).first() ).first()
# if there is no default state assign any random state # if there is no default state assign any random state
if default_state is None: if default_state is None:
random_state = State.objects.filter( random_state = State.objects.filter(
~models.Q(name="Triage"), project=self.project ~models.Q(is_triage=True), project=self.project
).first() ).first()
self.state = random_state self.state = random_state
else: else:

View File

@ -9,6 +9,10 @@ from . import ProjectBaseModel
from plane.utils.html_processor import strip_tags from plane.utils.html_processor import strip_tags
def get_view_props():
return {"full_width": False}
class Page(ProjectBaseModel): class Page(ProjectBaseModel):
name = models.CharField(max_length=255) name = models.CharField(max_length=255)
description = models.JSONField(default=dict, blank=True) description = models.JSONField(default=dict, blank=True)
@ -35,6 +39,7 @@ class Page(ProjectBaseModel):
) )
archived_at = models.DateField(null=True) archived_at = models.DateField(null=True)
is_locked = models.BooleanField(default=False) is_locked = models.BooleanField(default=False)
view_props = models.JSONField(default=get_view_props)
class Meta: class Meta:
verbose_name = "Page" verbose_name = "Page"
@ -81,7 +86,7 @@ class PageLog(ProjectBaseModel):
ordering = ("-created_at",) ordering = ("-created_at",)
def __str__(self): def __str__(self):
return f"{self.page.name} {self.type}" return f"{self.page.name} {self.entity_name}"
class PageBlock(ProjectBaseModel): class PageBlock(ProjectBaseModel):

View File

@ -21,10 +21,12 @@ class State(ProjectBaseModel):
("started", "Started"), ("started", "Started"),
("completed", "Completed"), ("completed", "Completed"),
("cancelled", "Cancelled"), ("cancelled", "Cancelled"),
("triage", "Triage")
), ),
default="backlog", default="backlog",
max_length=20, max_length=20,
) )
is_triage = models.BooleanField(default=False)
default = models.BooleanField(default=False) default = models.BooleanField(default=False)
external_source = models.CharField(max_length=255, null=True, blank=True) external_source = models.CharField(max_length=255, null=True, blank=True)
external_id = models.CharField(max_length=255, blank=True, null=True) external_id = models.CharField(max_length=255, blank=True, null=True)

View File

@ -88,6 +88,12 @@ class Command(BaseCommand):
"category": "SMTP", "category": "SMTP",
"is_encrypted": False, "is_encrypted": False,
}, },
{
"key": "EMAIL_USE_SSL",
"value": os.environ.get("EMAIL_USE_SSL", "0"),
"category": "SMTP",
"is_encrypted": False,
},
{ {
"key": "OPENAI_API_KEY", "key": "OPENAI_API_KEY",
"value": os.environ.get("OPENAI_API_KEY"), "value": os.environ.get("OPENAI_API_KEY"),

View File

@ -288,10 +288,13 @@ else:
CELERY_BROKER_URL = REDIS_URL CELERY_BROKER_URL = REDIS_URL
CELERY_IMPORTS = ( CELERY_IMPORTS = (
# scheduled tasks
"plane.bgtasks.issue_automation_task", "plane.bgtasks.issue_automation_task",
"plane.bgtasks.exporter_expired_task", "plane.bgtasks.exporter_expired_task",
"plane.bgtasks.file_asset_task", "plane.bgtasks.file_asset_task",
"plane.bgtasks.email_notification_task", "plane.bgtasks.email_notification_task",
# management tasks
"plane.bgtasks.dummy_data_task",
) )
# Sentry Settings # Sentry Settings

View File

@ -6,7 +6,7 @@ from .common import * # noqa
# SECURITY WARNING: don't run with debug turned on in production! # SECURITY WARNING: don't run with debug turned on in production!
DEBUG = int(os.environ.get("DEBUG", 0)) == 1 DEBUG = int(os.environ.get("DEBUG", 0)) == 1
DEBUG = True
# Honor the 'X-Forwarded-Proto' header for request.is_secure() # Honor the 'X-Forwarded-Proto' header for request.is_secure()
SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
@ -25,7 +25,7 @@ LOG_DIR = os.path.join(BASE_DIR, "logs") # noqa
if not os.path.exists(LOG_DIR): if not os.path.exists(LOG_DIR):
os.makedirs(LOG_DIR) os.makedirs(LOG_DIR)
# Logging configuration
LOGGING = { LOGGING = {
"version": 1, "version": 1,
"disable_existing_loggers": False, "disable_existing_loggers": False,

View File

@ -1,18 +1,18 @@
# Python imports # Python imports
from itertools import groupby
from datetime import timedelta from datetime import timedelta
from itertools import groupby
# Django import # Django import
from django.db import models from django.db import models
from django.utils import timezone from django.db.models import Case, CharField, Count, F, Sum, Value, When
from django.db.models.functions import TruncDate
from django.db.models import Count, F, Sum, Value, Case, When, CharField
from django.db.models.functions import ( from django.db.models.functions import (
Coalesce, Coalesce,
Concat,
ExtractMonth, ExtractMonth,
ExtractYear, ExtractYear,
Concat, TruncDate,
) )
from django.utils import timezone
# Module imports # Module imports
from plane.db.models import Issue from plane.db.models import Issue
@ -115,11 +115,16 @@ def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None):
total_issues = queryset.total_issues total_issues = queryset.total_issues
if cycle_id: if cycle_id:
# Get all dates between the two dates if queryset.end_date and queryset.start_date:
date_range = [ # Get all dates between the two dates
queryset.start_date + timedelta(days=x) date_range = [
for x in range((queryset.end_date - queryset.start_date).days + 1) queryset.start_date + timedelta(days=x)
] for x in range(
(queryset.end_date - queryset.start_date).days + 1
)
]
else:
date_range = []
chart_data = {str(date): 0 for date in date_range} chart_data = {str(date): 0 for date in date_range}

View File

@ -33,12 +33,12 @@ def cache_response(timeout=60 * 60, path=None, user=True):
custom_path = path if path is not None else request.get_full_path() custom_path = path if path is not None else request.get_full_path()
key = generate_cache_key(custom_path, auth_header) key = generate_cache_key(custom_path, auth_header)
cached_result = cache.get(key) cached_result = cache.get(key)
if cached_result is not None: if cached_result is not None:
return Response( return Response(
cached_result["data"], status=cached_result["status"] cached_result["data"], status=cached_result["status"]
) )
response = view_func(instance, request, *args, **kwargs) response = view_func(instance, request, *args, **kwargs)
if response.status_code == 200 and not settings.DEBUG: if response.status_code == 200 and not settings.DEBUG:
cache.set( cache.set(
key, key,
@ -53,34 +53,42 @@ def cache_response(timeout=60 * 60, path=None, user=True):
return decorator return decorator
def invalidate_cache(path=None, url_params=False, user=True): def invalidate_cache_directly(
"""invalidate cache per user""" path=None, url_params=False, user=True, request=None, multiple=False
):
if url_params and path:
path_with_values = path
# Assuming `kwargs` could be passed directly if needed, otherwise, skip this part
for key, value in request.resolver_match.kwargs.items():
path_with_values = path_with_values.replace(f":{key}", str(value))
custom_path = path_with_values
else:
custom_path = path if path is not None else request.get_full_path()
auth_header = (
None
if request.user.is_anonymous
else str(request.user.id) if user else None
)
key = generate_cache_key(custom_path, auth_header)
if multiple:
cache.delete_many(keys=cache.keys(f"*{key}*"))
else:
cache.delete(key)
def invalidate_cache(path=None, url_params=False, user=True, multiple=False):
def decorator(view_func): def decorator(view_func):
@wraps(view_func) @wraps(view_func)
def _wrapped_view(instance, request, *args, **kwargs): def _wrapped_view(instance, request, *args, **kwargs):
# Invalidate cache before executing the view function # invalidate the cache
if url_params: invalidate_cache_directly(
path_with_values = path path=path,
for key, value in kwargs.items(): url_params=url_params,
path_with_values = path_with_values.replace( user=user,
f":{key}", str(value) request=request,
) multiple=multiple,
custom_path = path_with_values
else:
custom_path = (
path if path is not None else request.get_full_path()
)
auth_header = (
None
if request.user.is_anonymous
else str(request.user.id) if user else None
) )
key = generate_cache_key(custom_path, auth_header)
cache.delete(key)
# Execute the view function
return view_func(instance, request, *args, **kwargs) return view_func(instance, request, *args, **kwargs)
return _wrapped_view return _wrapped_view

View File

@ -52,9 +52,9 @@ def string_date_filter(
filter[f"{date_filter}__gte"] = now - timedelta(weeks=duration) filter[f"{date_filter}__gte"] = now - timedelta(weeks=duration)
else: else:
if offset == "fromnow": if offset == "fromnow":
filter[f"{date_filter}__lte"] = now + timedelta(days=duration) filter[f"{date_filter}__lte"] = now + timedelta(weeks=duration)
else: else:
filter[f"{date_filter}__lte"] = now - timedelta(days=duration) filter[f"{date_filter}__lte"] = now - timedelta(weeks=duration)
def date_filter(filter, date_term, queries): def date_filter(filter, date_term, queries):
@ -83,25 +83,25 @@ def date_filter(filter, date_term, queries):
filter[f"{date_term}__lte"] = date_query[0] filter[f"{date_term}__lte"] = date_query[0]
def filter_state(params, filter, method): def filter_state(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
states = [ states = [
item for item in params.get("state").split(",") if item != "null" item for item in params.get("state").split(",") if item != "null"
] ]
states = filter_valid_uuids(states) states = filter_valid_uuids(states)
if len(states) and "" not in states: if len(states) and "" not in states:
filter["state__in"] = states filter[f"{prefix}state__in"] = states
else: else:
if ( if (
params.get("state", None) params.get("state", None)
and len(params.get("state")) and len(params.get("state"))
and params.get("state") != "null" and params.get("state") != "null"
): ):
filter["state__in"] = params.get("state") filter[f"{prefix}state__in"] = params.get("state")
return filter return filter
def filter_state_group(params, filter, method): def filter_state_group(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
state_group = [ state_group = [
item item
@ -109,18 +109,18 @@ def filter_state_group(params, filter, method):
if item != "null" if item != "null"
] ]
if len(state_group) and "" not in state_group: if len(state_group) and "" not in state_group:
filter["state__group__in"] = state_group filter[f"{prefix}state__group__in"] = state_group
else: else:
if ( if (
params.get("state_group", None) params.get("state_group", None)
and len(params.get("state_group")) and len(params.get("state_group"))
and params.get("state_group") != "null" and params.get("state_group") != "null"
): ):
filter["state__group__in"] = params.get("state_group") filter[f"{prefix}state__group__in"] = params.get("state_group")
return filter return filter
def filter_estimate_point(params, filter, method): def filter_estimate_point(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
estimate_points = [ estimate_points = [
item item
@ -128,18 +128,20 @@ def filter_estimate_point(params, filter, method):
if item != "null" if item != "null"
] ]
if len(estimate_points) and "" not in estimate_points: if len(estimate_points) and "" not in estimate_points:
filter["estimate_point__in"] = estimate_points filter[f"{prefix}estimate_point__in"] = estimate_points
else: else:
if ( if (
params.get("estimate_point", None) params.get("estimate_point", None)
and len(params.get("estimate_point")) and len(params.get("estimate_point"))
and params.get("estimate_point") != "null" and params.get("estimate_point") != "null"
): ):
filter["estimate_point__in"] = params.get("estimate_point") filter[f"{prefix}estimate_point__in"] = params.get(
"estimate_point"
)
return filter return filter
def filter_priority(params, filter, method): def filter_priority(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
priorities = [ priorities = [
item item
@ -147,47 +149,47 @@ def filter_priority(params, filter, method):
if item != "null" if item != "null"
] ]
if len(priorities) and "" not in priorities: if len(priorities) and "" not in priorities:
filter["priority__in"] = priorities filter[f"{prefix}priority__in"] = priorities
return filter return filter
def filter_parent(params, filter, method): def filter_parent(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
parents = [ parents = [
item for item in params.get("parent").split(",") if item != "null" item for item in params.get("parent").split(",") if item != "null"
] ]
parents = filter_valid_uuids(parents) parents = filter_valid_uuids(parents)
if len(parents) and "" not in parents: if len(parents) and "" not in parents:
filter["parent__in"] = parents filter[f"{prefix}parent__in"] = parents
else: else:
if ( if (
params.get("parent", None) params.get("parent", None)
and len(params.get("parent")) and len(params.get("parent"))
and params.get("parent") != "null" and params.get("parent") != "null"
): ):
filter["parent__in"] = params.get("parent") filter[f"{prefix}parent__in"] = params.get("parent")
return filter return filter
def filter_labels(params, filter, method): def filter_labels(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
labels = [ labels = [
item for item in params.get("labels").split(",") if item != "null" item for item in params.get("labels").split(",") if item != "null"
] ]
labels = filter_valid_uuids(labels) labels = filter_valid_uuids(labels)
if len(labels) and "" not in labels: if len(labels) and "" not in labels:
filter["labels__in"] = labels filter[f"{prefix}labels__in"] = labels
else: else:
if ( if (
params.get("labels", None) params.get("labels", None)
and len(params.get("labels")) and len(params.get("labels"))
and params.get("labels") != "null" and params.get("labels") != "null"
): ):
filter["labels__in"] = params.get("labels") filter[f"{prefix}labels__in"] = params.get("labels")
return filter return filter
def filter_assignees(params, filter, method): def filter_assignees(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
assignees = [ assignees = [
item item
@ -196,18 +198,18 @@ def filter_assignees(params, filter, method):
] ]
assignees = filter_valid_uuids(assignees) assignees = filter_valid_uuids(assignees)
if len(assignees) and "" not in assignees: if len(assignees) and "" not in assignees:
filter["assignees__in"] = assignees filter[f"{prefix}assignees__in"] = assignees
else: else:
if ( if (
params.get("assignees", None) params.get("assignees", None)
and len(params.get("assignees")) and len(params.get("assignees"))
and params.get("assignees") != "null" and params.get("assignees") != "null"
): ):
filter["assignees__in"] = params.get("assignees") filter[f"{prefix}assignees__in"] = params.get("assignees")
return filter return filter
def filter_mentions(params, filter, method): def filter_mentions(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
mentions = [ mentions = [
item item
@ -216,18 +218,20 @@ def filter_mentions(params, filter, method):
] ]
mentions = filter_valid_uuids(mentions) mentions = filter_valid_uuids(mentions)
if len(mentions) and "" not in mentions: if len(mentions) and "" not in mentions:
filter["issue_mention__mention__id__in"] = mentions filter[f"{prefix}issue_mention__mention__id__in"] = mentions
else: else:
if ( if (
params.get("mentions", None) params.get("mentions", None)
and len(params.get("mentions")) and len(params.get("mentions"))
and params.get("mentions") != "null" and params.get("mentions") != "null"
): ):
filter["issue_mention__mention__id__in"] = params.get("mentions") filter[f"{prefix}issue_mention__mention__id__in"] = params.get(
"mentions"
)
return filter return filter
def filter_created_by(params, filter, method): def filter_created_by(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
created_bys = [ created_bys = [
item item
@ -236,94 +240,98 @@ def filter_created_by(params, filter, method):
] ]
created_bys = filter_valid_uuids(created_bys) created_bys = filter_valid_uuids(created_bys)
if len(created_bys) and "" not in created_bys: if len(created_bys) and "" not in created_bys:
filter["created_by__in"] = created_bys filter[f"{prefix}created_by__in"] = created_bys
else: else:
if ( if (
params.get("created_by", None) params.get("created_by", None)
and len(params.get("created_by")) and len(params.get("created_by"))
and params.get("created_by") != "null" and params.get("created_by") != "null"
): ):
filter["created_by__in"] = params.get("created_by") filter[f"{prefix}created_by__in"] = params.get("created_by")
return filter return filter
def filter_name(params, filter, method): def filter_name(params, filter, method, prefix=""):
if params.get("name", "") != "": if params.get("name", "") != "":
filter["name__icontains"] = params.get("name") filter[f"{prefix}name__icontains"] = params.get("name")
return filter return filter
def filter_created_at(params, filter, method): def filter_created_at(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
created_ats = params.get("created_at").split(",") created_ats = params.get("created_at").split(",")
if len(created_ats) and "" not in created_ats: if len(created_ats) and "" not in created_ats:
date_filter( date_filter(
filter=filter, filter=filter,
date_term="created_at__date", date_term=f"{prefix}created_at__date",
queries=created_ats, queries=created_ats,
) )
else: else:
if params.get("created_at", None) and len(params.get("created_at")): if params.get("created_at", None) and len(params.get("created_at")):
date_filter( date_filter(
filter=filter, filter=filter,
date_term="created_at__date", date_term=f"{prefix}created_at__date",
queries=params.get("created_at", []), queries=params.get("created_at", []),
) )
return filter return filter
def filter_updated_at(params, filter, method): def filter_updated_at(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
updated_ats = params.get("updated_at").split(",") updated_ats = params.get("updated_at").split(",")
if len(updated_ats) and "" not in updated_ats: if len(updated_ats) and "" not in updated_ats:
date_filter( date_filter(
filter=filter, filter=filter,
date_term="created_at__date", date_term=f"{prefix}created_at__date",
queries=updated_ats, queries=updated_ats,
) )
else: else:
if params.get("updated_at", None) and len(params.get("updated_at")): if params.get("updated_at", None) and len(params.get("updated_at")):
date_filter( date_filter(
filter=filter, filter=filter,
date_term="created_at__date", date_term=f"{prefix}created_at__date",
queries=params.get("updated_at", []), queries=params.get("updated_at", []),
) )
return filter return filter
def filter_start_date(params, filter, method): def filter_start_date(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
start_dates = params.get("start_date").split(",") start_dates = params.get("start_date").split(",")
if len(start_dates) and "" not in start_dates: if len(start_dates) and "" not in start_dates:
date_filter( date_filter(
filter=filter, date_term="start_date", queries=start_dates filter=filter,
date_term=f"{prefix}start_date",
queries=start_dates,
) )
else: else:
if params.get("start_date", None) and len(params.get("start_date")): if params.get("start_date", None) and len(params.get("start_date")):
filter["start_date"] = params.get("start_date") filter[f"{prefix}start_date"] = params.get("start_date")
return filter return filter
def filter_target_date(params, filter, method): def filter_target_date(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
target_dates = params.get("target_date").split(",") target_dates = params.get("target_date").split(",")
if len(target_dates) and "" not in target_dates: if len(target_dates) and "" not in target_dates:
date_filter( date_filter(
filter=filter, date_term="target_date", queries=target_dates filter=filter,
date_term=f"{prefix}target_date",
queries=target_dates,
) )
else: else:
if params.get("target_date", None) and len(params.get("target_date")): if params.get("target_date", None) and len(params.get("target_date")):
filter["target_date"] = params.get("target_date") filter[f"{prefix}target_date"] = params.get("target_date")
return filter return filter
def filter_completed_at(params, filter, method): def filter_completed_at(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
completed_ats = params.get("completed_at").split(",") completed_ats = params.get("completed_at").split(",")
if len(completed_ats) and "" not in completed_ats: if len(completed_ats) and "" not in completed_ats:
date_filter( date_filter(
filter=filter, filter=filter,
date_term="completed_at__date", date_term=f"{prefix}completed_at__date",
queries=completed_ats, queries=completed_ats,
) )
else: else:
@ -332,13 +340,13 @@ def filter_completed_at(params, filter, method):
): ):
date_filter( date_filter(
filter=filter, filter=filter,
date_term="completed_at__date", date_term=f"{prefix}completed_at__date",
queries=params.get("completed_at", []), queries=params.get("completed_at", []),
) )
return filter return filter
def filter_issue_state_type(params, filter, method): def filter_issue_state_type(params, filter, method, prefix=""):
type = params.get("type", "all") type = params.get("type", "all")
group = ["backlog", "unstarted", "started", "completed", "cancelled"] group = ["backlog", "unstarted", "started", "completed", "cancelled"]
if type == "backlog": if type == "backlog":
@ -346,65 +354,67 @@ def filter_issue_state_type(params, filter, method):
if type == "active": if type == "active":
group = ["unstarted", "started"] group = ["unstarted", "started"]
filter["state__group__in"] = group filter[f"{prefix}state__group__in"] = group
return filter return filter
def filter_project(params, filter, method): def filter_project(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
projects = [ projects = [
item for item in params.get("project").split(",") if item != "null" item for item in params.get("project").split(",") if item != "null"
] ]
projects = filter_valid_uuids(projects) projects = filter_valid_uuids(projects)
if len(projects) and "" not in projects: if len(projects) and "" not in projects:
filter["project__in"] = projects filter[f"{prefix}project__in"] = projects
else: else:
if ( if (
params.get("project", None) params.get("project", None)
and len(params.get("project")) and len(params.get("project"))
and params.get("project") != "null" and params.get("project") != "null"
): ):
filter["project__in"] = params.get("project") filter[f"{prefix}project__in"] = params.get("project")
return filter return filter
def filter_cycle(params, filter, method): def filter_cycle(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
cycles = [ cycles = [
item for item in params.get("cycle").split(",") if item != "null" item for item in params.get("cycle").split(",") if item != "null"
] ]
cycles = filter_valid_uuids(cycles) cycles = filter_valid_uuids(cycles)
if len(cycles) and "" not in cycles: if len(cycles) and "" not in cycles:
filter["issue_cycle__cycle_id__in"] = cycles filter[f"{prefix}issue_cycle__cycle_id__in"] = cycles
else: else:
if ( if (
params.get("cycle", None) params.get("cycle", None)
and len(params.get("cycle")) and len(params.get("cycle"))
and params.get("cycle") != "null" and params.get("cycle") != "null"
): ):
filter["issue_cycle__cycle_id__in"] = params.get("cycle") filter[f"{prefix}issue_cycle__cycle_id__in"] = params.get("cycle")
return filter return filter
def filter_module(params, filter, method): def filter_module(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
modules = [ modules = [
item for item in params.get("module").split(",") if item != "null" item for item in params.get("module").split(",") if item != "null"
] ]
modules = filter_valid_uuids(modules) modules = filter_valid_uuids(modules)
if len(modules) and "" not in modules: if len(modules) and "" not in modules:
filter["issue_module__module_id__in"] = modules filter[f"{prefix}issue_module__module_id__in"] = modules
else: else:
if ( if (
params.get("module", None) params.get("module", None)
and len(params.get("module")) and len(params.get("module"))
and params.get("module") != "null" and params.get("module") != "null"
): ):
filter["issue_module__module_id__in"] = params.get("module") filter[f"{prefix}issue_module__module_id__in"] = params.get(
"module"
)
return filter return filter
def filter_inbox_status(params, filter, method): def filter_inbox_status(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
status = [ status = [
item item
@ -412,30 +422,32 @@ def filter_inbox_status(params, filter, method):
if item != "null" if item != "null"
] ]
if len(status) and "" not in status: if len(status) and "" not in status:
filter["issue_inbox__status__in"] = status filter[f"{prefix}issue_inbox__status__in"] = status
else: else:
if ( if (
params.get("inbox_status", None) params.get("inbox_status", None)
and len(params.get("inbox_status")) and len(params.get("inbox_status"))
and params.get("inbox_status") != "null" and params.get("inbox_status") != "null"
): ):
filter["issue_inbox__status__in"] = params.get("inbox_status") filter[f"{prefix}issue_inbox__status__in"] = params.get(
"inbox_status"
)
return filter return filter
def filter_sub_issue_toggle(params, filter, method): def filter_sub_issue_toggle(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
sub_issue = params.get("sub_issue", "false") sub_issue = params.get("sub_issue", "false")
if sub_issue == "false": if sub_issue == "false":
filter["parent__isnull"] = True filter[f"{prefix}parent__isnull"] = True
else: else:
sub_issue = params.get("sub_issue", "false") sub_issue = params.get("sub_issue", "false")
if sub_issue == "false": if sub_issue == "false":
filter["parent__isnull"] = True filter[f"{prefix}parent__isnull"] = True
return filter return filter
def filter_subscribed_issues(params, filter, method): def filter_subscribed_issues(params, filter, method, prefix=""):
if method == "GET": if method == "GET":
subscribers = [ subscribers = [
item item
@ -444,28 +456,30 @@ def filter_subscribed_issues(params, filter, method):
] ]
subscribers = filter_valid_uuids(subscribers) subscribers = filter_valid_uuids(subscribers)
if len(subscribers) and "" not in subscribers: if len(subscribers) and "" not in subscribers:
filter["issue_subscribers__subscriber_id__in"] = subscribers filter[f"{prefix}issue_subscribers__subscriber_id__in"] = (
subscribers
)
else: else:
if ( if (
params.get("subscriber", None) params.get("subscriber", None)
and len(params.get("subscriber")) and len(params.get("subscriber"))
and params.get("subscriber") != "null" and params.get("subscriber") != "null"
): ):
filter["issue_subscribers__subscriber_id__in"] = params.get( filter[f"{prefix}issue_subscribers__subscriber_id__in"] = (
"subscriber" params.get("subscriber")
) )
return filter return filter
def filter_start_target_date_issues(params, filter, method): def filter_start_target_date_issues(params, filter, method, prefix=""):
start_target_date = params.get("start_target_date", "false") start_target_date = params.get("start_target_date", "false")
if start_target_date == "true": if start_target_date == "true":
filter["target_date__isnull"] = False filter[f"{prefix}target_date__isnull"] = False
filter["start_date__isnull"] = False filter[f"{prefix}start_date__isnull"] = False
return filter return filter
def issue_filters(query_params, method): def issue_filters(query_params, method, prefix=""):
filter = {} filter = {}
ISSUE_FILTER = { ISSUE_FILTER = {
@ -497,6 +511,5 @@ def issue_filters(query_params, method):
for key, value in ISSUE_FILTER.items(): for key, value in ISSUE_FILTER.items():
if key in query_params: if key in query_params:
func = value func = value
func(query_params, filter, method) func(query_params, filter, method, prefix)
return filter return filter

View File

@ -134,7 +134,7 @@ class OffsetPaginator:
results=results, results=results,
next=next_cursor, next=next_cursor,
prev=prev_cursor, prev=prev_cursor,
hits=None, hits=count,
max_hits=max_hits, max_hits=max_hits,
) )
@ -217,6 +217,7 @@ class BasePaginator:
"prev_page_results": cursor_result.prev.has_results, "prev_page_results": cursor_result.prev.has_results,
"count": cursor_result.__len__(), "count": cursor_result.__len__(),
"total_pages": cursor_result.max_hits, "total_pages": cursor_result.max_hits,
"total_results": cursor_result.hits,
"extra_stats": extra_stats, "extra_stats": extra_stats,
"results": results, "results": results,
} }

View File

@ -1,5 +1,3 @@
version: "3.8"
x-app-env: &app-env x-app-env: &app-env
environment: environment:
- NGINX_PORT=${NGINX_PORT:-80} - NGINX_PORT=${NGINX_PORT:-80}

View File

@ -1,5 +1,3 @@
version: "3.8"
networks: networks:
dev_env: dev_env:
driver: bridge driver: bridge

View File

@ -1,5 +1,3 @@
version: "3.8"
services: services:
web: web:
container_name: web container_name: web

View File

@ -28,7 +28,7 @@
"prettier": "latest", "prettier": "latest",
"prettier-plugin-tailwindcss": "^0.5.4", "prettier-plugin-tailwindcss": "^0.5.4",
"tailwindcss": "^3.3.3", "tailwindcss": "^3.3.3",
"turbo": "^1.11.3" "turbo": "^1.13.2"
}, },
"resolutions": { "resolutions": {
"@types/react": "18.2.42" "@types/react": "18.2.42"

View File

@ -28,13 +28,13 @@
"react-dom": "18.2.0" "react-dom": "18.2.0"
}, },
"dependencies": { "dependencies": {
"@plane/ui": "*",
"@tiptap/core": "^2.1.13", "@tiptap/core": "^2.1.13",
"@tiptap/extension-blockquote": "^2.1.13", "@tiptap/extension-blockquote": "^2.1.13",
"@tiptap/extension-code-block-lowlight": "^2.1.13",
"@tiptap/extension-color": "^2.1.13",
"@tiptap/extension-image": "^2.1.13", "@tiptap/extension-image": "^2.1.13",
"@tiptap/extension-list-item": "^2.1.13", "@tiptap/extension-list-item": "^2.1.13",
"@tiptap/extension-mention": "^2.1.13", "@tiptap/extension-mention": "^2.1.13",
"@tiptap/extension-placeholder": "^2.3.0",
"@tiptap/extension-task-item": "^2.1.13", "@tiptap/extension-task-item": "^2.1.13",
"@tiptap/extension-task-list": "^2.1.13", "@tiptap/extension-task-list": "^2.1.13",
"@tiptap/extension-text-style": "^2.1.13", "@tiptap/extension-text-style": "^2.1.13",
@ -50,6 +50,7 @@
"linkifyjs": "^4.1.3", "linkifyjs": "^4.1.3",
"lowlight": "^3.0.0", "lowlight": "^3.0.0",
"lucide-react": "^0.294.0", "lucide-react": "^0.294.0",
"prosemirror-codemark": "^0.4.2",
"react-moveable": "^0.54.2", "react-moveable": "^0.54.2",
"tailwind-merge": "^1.14.0", "tailwind-merge": "^1.14.0",
"tippy.js": "^6.3.7", "tippy.js": "^6.3.7",

View File

@ -7,11 +7,22 @@ export const insertContentAtSavedSelection = (
content: string, content: string,
savedSelection: Selection savedSelection: Selection
) => { ) => {
if (editorRef.current && savedSelection) { if (!editorRef.current || editorRef.current.isDestroyed) {
editorRef.current console.error("Editor reference is not available or has been destroyed.");
.chain() return;
.focus() }
.insertContentAt(savedSelection?.anchor, content)
.run(); if (!savedSelection) {
console.error("Saved selection is invalid.");
return;
}
const docSize = editorRef.current.state.doc.content.size;
const safePosition = Math.max(0, Math.min(savedSelection.anchor, docSize));
try {
editorRef.current.chain().focus().insertContentAt(safePosition, content).run();
} catch (error) {
console.error("An error occurred while inserting content at saved selection:", error);
} }
}; };

View File

@ -0,0 +1,40 @@
import { Editor } from "@tiptap/react";
export interface IMarking {
type: "heading";
level: number;
text: string;
sequence: number;
}
function findNthH1(editor: Editor, n: number, level: number): number {
let count = 0;
let pos = 0;
editor.state.doc.descendants((node, position) => {
if (node.type.name === "heading" && node.attrs.level === level) {
count++;
if (count === n) {
pos = position;
return false;
}
}
});
return pos;
}
function scrollToNode(editor: Editor, pos: number): void {
const headingNode = editor.state.doc.nodeAt(pos);
if (headingNode) {
const headingDOM = editor.view.nodeDOM(pos);
if (headingDOM instanceof HTMLElement) {
headingDOM.scrollIntoView({ behavior: "smooth" });
}
}
}
export function scrollSummary(editor: Editor, marking: IMarking) {
if (editor) {
const pos = findNthH1(editor, marking.sequence, marking.level);
scrollToNode(editor, pos);
}
}

View File

@ -1,111 +1,215 @@
import { useEditor as useCustomEditor, Editor } from "@tiptap/react"; import { useEditor as useCustomEditor, Editor } from "@tiptap/react";
import { useImperativeHandle, useRef, MutableRefObject, useState } from "react"; import { useImperativeHandle, useRef, MutableRefObject, useState, useEffect } from "react";
import { CoreEditorProps } from "src/ui/props"; import { CoreEditorProps } from "src/ui/props";
import { CoreEditorExtensions } from "src/ui/extensions"; import { CoreEditorExtensions } from "src/ui/extensions";
import { EditorProps } from "@tiptap/pm/view"; import { EditorProps } from "@tiptap/pm/view";
import { getTrimmedHTML } from "src/lib/utils"; import { getTrimmedHTML } from "src/lib/utils";
import { DeleteImage } from "src/types/delete-image"; import { DeleteImage } from "src/types/delete-image";
import { IMentionSuggestion } from "src/types/mention-suggestion"; import { IMentionHighlight, IMentionSuggestion } from "src/types/mention-suggestion";
import { RestoreImage } from "src/types/restore-image"; import { RestoreImage } from "src/types/restore-image";
import { UploadImage } from "src/types/upload-image"; import { UploadImage } from "src/types/upload-image";
import { Selection } from "@tiptap/pm/state"; import { Selection } from "@tiptap/pm/state";
import { insertContentAtSavedSelection } from "src/helpers/insert-content-at-cursor-position"; import { insertContentAtSavedSelection } from "src/helpers/insert-content-at-cursor-position";
import { EditorMenuItemNames, getEditorMenuItems } from "src/ui/menus/menu-items";
import { EditorRefApi } from "src/types/editor-ref-api";
import { IMarking, scrollSummary } from "src/helpers/scroll-to-node";
interface CustomEditorProps { interface CustomEditorProps {
id?: string;
uploadFile: UploadImage; uploadFile: UploadImage;
restoreFile: RestoreImage; restoreFile: RestoreImage;
rerenderOnPropsChange?: {
id: string;
description_html: string;
};
deleteFile: DeleteImage; deleteFile: DeleteImage;
cancelUploadImage?: () => any; cancelUploadImage?: () => void;
setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void; initialValue: string;
setShouldShowAlert?: (showAlert: boolean) => void; editorClassName: string;
value: string; // undefined when prop is not passed, null if intentionally passed to stop
debouncedUpdatesEnabled?: boolean; // swr syncing
onStart?: (json: any, html: string) => void; value: string | null | undefined;
onChange?: (json: any, html: string) => void; onChange?: (json: object, html: string) => void;
extensions?: any; extensions?: any;
editorProps?: EditorProps; editorProps?: EditorProps;
forwardedRef?: any; forwardedRef?: MutableRefObject<EditorRefApi | null>;
mentionHighlights?: string[]; mentionHandler: {
mentionSuggestions?: IMentionSuggestion[]; highlights: () => Promise<IMentionHighlight[]>;
suggestions?: () => Promise<IMentionSuggestion[]>;
};
handleEditorReady?: (value: boolean) => void;
placeholder?: string | ((isFocused: boolean) => string);
tabIndex?: number;
} }
export const useEditor = ({ export const useEditor = ({
uploadFile, uploadFile,
id = "",
deleteFile, deleteFile,
cancelUploadImage, cancelUploadImage,
editorProps = {}, editorProps = {},
initialValue,
editorClassName,
value, value,
rerenderOnPropsChange,
extensions = [], extensions = [],
onStart,
onChange, onChange,
setIsSubmitting,
forwardedRef, forwardedRef,
tabIndex,
restoreFile, restoreFile,
setShouldShowAlert, handleEditorReady,
mentionHighlights, mentionHandler,
mentionSuggestions, placeholder,
}: CustomEditorProps) => { }: CustomEditorProps) => {
const editor = useCustomEditor( const editor = useCustomEditor({
{ editorProps: {
editorProps: { ...CoreEditorProps(editorClassName),
...CoreEditorProps(uploadFile, setIsSubmitting), ...editorProps,
...editorProps, },
}, extensions: [
extensions: [ ...CoreEditorExtensions({
...CoreEditorExtensions( mentionConfig: {
{ mentionSuggestions: mentionHandler.suggestions ?? (() => Promise.resolve<IMentionSuggestion[]>([])),
mentionSuggestions: mentionSuggestions ?? [], mentionHighlights: mentionHandler.highlights ?? [],
mentionHighlights: mentionHighlights ?? [], },
}, fileConfig: {
deleteFile, deleteFile,
restoreFile, restoreFile,
cancelUploadImage cancelUploadImage,
), uploadFile,
...extensions, },
], placeholder,
content: typeof value === "string" && value.trim() !== "" ? value : "<p></p>", tabIndex,
onCreate: async ({ editor }) => { }),
onStart?.(editor.getJSON(), getTrimmedHTML(editor.getHTML())); ...extensions,
}, ],
onTransaction: async ({ editor }) => { content: typeof initialValue === "string" && initialValue.trim() !== "" ? initialValue : "<p></p>",
setSavedSelection(editor.state.selection); onCreate: async () => {
}, handleEditorReady?.(true);
onUpdate: async ({ editor }) => {
setIsSubmitting?.("submitting");
setShouldShowAlert?.(true);
onChange?.(editor.getJSON(), getTrimmedHTML(editor.getHTML()));
},
}, },
[rerenderOnPropsChange] onTransaction: async ({ editor }) => {
); setSavedSelection(editor.state.selection);
},
onUpdate: async ({ editor }) => {
onChange?.(editor.getJSON(), getTrimmedHTML(editor.getHTML()));
},
onDestroy: async () => {
handleEditorReady?.(false);
},
});
const editorRef: MutableRefObject<Editor | null> = useRef(null); const editorRef: MutableRefObject<Editor | null> = useRef(null);
editorRef.current = editor;
const [savedSelection, setSavedSelection] = useState<Selection | null>(null); const [savedSelection, setSavedSelection] = useState<Selection | null>(null);
useImperativeHandle(forwardedRef, () => ({ // Inside your component or hook
clearEditor: () => { const savedSelectionRef = useRef(savedSelection);
editorRef.current?.commands.clearContent();
}, // Update the ref whenever savedSelection changes
setEditorValue: (content: string) => { useEffect(() => {
editorRef.current?.commands.setContent(content); savedSelectionRef.current = savedSelection;
}, }, [savedSelection]);
setEditorValueAtCursorPosition: (content: string) => {
if (savedSelection) { // Effect for syncing SWR data
insertContentAtSavedSelection(editorRef, content, savedSelection); useEffect(() => {
// value is null when intentionally passed where syncing is not yet
// supported and value is undefined when the data from swr is not populated
if (value === null || value === undefined) return;
if (editor && !editor.isDestroyed && !editor.storage.image.uploadInProgress) {
try {
editor.commands.setContent(value);
const currentSavedSelection = savedSelectionRef.current;
if (currentSavedSelection) {
const docLength = editor.state.doc.content.size;
const relativePosition = Math.min(currentSavedSelection.from, docLength - 1);
editor.commands.setTextSelection(relativePosition);
}
} catch (error) {
console.error("Error syncing editor content with external value:", error);
} }
}, }
})); }, [editor, value, id]);
useImperativeHandle(
forwardedRef,
() => ({
clearEditor: () => {
editorRef.current?.commands.clearContent();
},
setEditorValue: (content: string) => {
editorRef.current?.commands.setContent(content);
},
setEditorValueAtCursorPosition: (content: string) => {
if (savedSelection) {
insertContentAtSavedSelection(editorRef, content, savedSelection);
}
},
executeMenuItemCommand: (itemName: EditorMenuItemNames) => {
const editorItems = getEditorMenuItems(editorRef.current, uploadFile);
const getEditorMenuItem = (itemName: EditorMenuItemNames) => editorItems.find((item) => item.name === itemName);
const item = getEditorMenuItem(itemName);
if (item) {
if (item.name === "image") {
item.command(savedSelection);
} else {
item.command();
}
} else {
console.warn(`No command found for item: ${itemName}`);
}
},
isMenuItemActive: (itemName: EditorMenuItemNames): boolean => {
const editorItems = getEditorMenuItems(editorRef.current, uploadFile);
const getEditorMenuItem = (itemName: EditorMenuItemNames) => editorItems.find((item) => item.name === itemName);
const item = getEditorMenuItem(itemName);
return item ? item.isActive() : false;
},
onStateChange: (callback: () => void) => {
// Subscribe to editor state changes
editorRef.current?.on("transaction", () => {
callback();
});
// Return a function to unsubscribe to the continuous transactions of
// the editor on unmounting the component that has subscribed to this
// method
return () => {
editorRef.current?.off("transaction");
};
},
getMarkDown: (): string => {
const markdownOutput = editorRef.current?.storage.markdown.getMarkdown();
return markdownOutput;
},
scrollSummary: (marking: IMarking): void => {
if (!editorRef.current) return;
scrollSummary(editorRef.current, marking);
},
setFocusAtPosition: (position: number) => {
if (!editorRef.current || editorRef.current.isDestroyed) {
console.error("Editor reference is not available or has been destroyed.");
return;
}
try {
const docSize = editorRef.current.state.doc.content.size;
const safePosition = Math.max(0, Math.min(position, docSize));
editorRef.current
.chain()
.insertContentAt(safePosition, [{ type: "paragraph" }])
.focus()
.run();
} catch (error) {
console.error("An error occurred while setting focus at position:", error);
}
},
}),
[editorRef, savedSelection, uploadFile]
);
if (!editor) { if (!editor) {
return null; return null;
} }
// the editorRef is used to access the editor instance from outside the hook
// and should only be used after editor is initialized
editorRef.current = editor;
return editor; return editor;
}; };

View File

@ -1,53 +1,61 @@
import { useEditor as useCustomEditor, Editor } from "@tiptap/react"; import { useEditor as useCustomEditor, Editor } from "@tiptap/react";
import { useImperativeHandle, useRef, MutableRefObject } from "react"; import { useImperativeHandle, useRef, MutableRefObject, useEffect } from "react";
import { CoreReadOnlyEditorExtensions } from "src/ui/read-only/extensions"; import { CoreReadOnlyEditorExtensions } from "src/ui/read-only/extensions";
import { CoreReadOnlyEditorProps } from "src/ui/read-only/props"; import { CoreReadOnlyEditorProps } from "src/ui/read-only/props";
import { EditorProps } from "@tiptap/pm/view"; import { EditorProps } from "@tiptap/pm/view";
import { IMentionSuggestion } from "src/types/mention-suggestion"; import { EditorReadOnlyRefApi } from "src/types/editor-ref-api";
import { IMarking, scrollSummary } from "src/helpers/scroll-to-node";
import { IMentionHighlight } from "src/types/mention-suggestion";
interface CustomReadOnlyEditorProps { interface CustomReadOnlyEditorProps {
value: string; initialValue: string;
forwardedRef?: any; editorClassName: string;
forwardedRef?: MutableRefObject<EditorReadOnlyRefApi | null>;
extensions?: any; extensions?: any;
editorProps?: EditorProps; editorProps?: EditorProps;
rerenderOnPropsChange?: { handleEditorReady?: (value: boolean) => void;
id: string; mentionHandler: {
description_html: string; highlights: () => Promise<IMentionHighlight[]>;
}; };
mentionHighlights?: string[];
mentionSuggestions?: IMentionSuggestion[];
} }
export const useReadOnlyEditor = ({ export const useReadOnlyEditor = ({
value, initialValue,
editorClassName,
forwardedRef, forwardedRef,
extensions = [], extensions = [],
editorProps = {}, editorProps = {},
rerenderOnPropsChange, handleEditorReady,
mentionHighlights, mentionHandler,
mentionSuggestions,
}: CustomReadOnlyEditorProps) => { }: CustomReadOnlyEditorProps) => {
const editor = useCustomEditor( const editor = useCustomEditor({
{ editable: false,
editable: false, content: typeof initialValue === "string" && initialValue.trim() !== "" ? initialValue : "<p></p>",
content: typeof value === "string" && value.trim() !== "" ? value : "<p></p>", editorProps: {
editorProps: { ...CoreReadOnlyEditorProps(editorClassName),
...CoreReadOnlyEditorProps, ...editorProps,
...editorProps,
},
extensions: [
...CoreReadOnlyEditorExtensions({
mentionSuggestions: mentionSuggestions ?? [],
mentionHighlights: mentionHighlights ?? [],
}),
...extensions,
],
}, },
[rerenderOnPropsChange] onCreate: async () => {
); handleEditorReady?.(true);
},
extensions: [
...CoreReadOnlyEditorExtensions({
mentionHighlights: mentionHandler.highlights,
}),
...extensions,
],
onDestroy: () => {
handleEditorReady?.(false);
},
});
// for syncing swr data on tab refocus etc
useEffect(() => {
if (initialValue === null || initialValue === undefined) return;
if (editor && !editor.isDestroyed) editor?.commands.setContent(initialValue);
}, [editor, initialValue]);
const editorRef: MutableRefObject<Editor | null> = useRef(null); const editorRef: MutableRefObject<Editor | null> = useRef(null);
editorRef.current = editor;
useImperativeHandle(forwardedRef, () => ({ useImperativeHandle(forwardedRef, () => ({
clearEditor: () => { clearEditor: () => {
@ -56,11 +64,20 @@ export const useReadOnlyEditor = ({
setEditorValue: (content: string) => { setEditorValue: (content: string) => {
editorRef.current?.commands.setContent(content); editorRef.current?.commands.setContent(content);
}, },
getMarkDown: (): string => {
const markdownOutput = editorRef.current?.storage.markdown.getMarkdown();
return markdownOutput;
},
scrollSummary: (marking: IMarking): void => {
if (!editorRef.current) return;
scrollSummary(editorRef.current, marking);
},
})); }));
if (!editor) { if (!editor) {
return null; return null;
} }
editorRef.current = editor;
return editor; return editor;
}; };

View File

@ -26,6 +26,7 @@ export * from "src/lib/editor-commands";
// types // types
export type { DeleteImage } from "src/types/delete-image"; export type { DeleteImage } from "src/types/delete-image";
export type { UploadImage } from "src/types/upload-image"; export type { UploadImage } from "src/types/upload-image";
export type { EditorRefApi, EditorReadOnlyRefApi } from "src/types/editor-ref-api";
export type { RestoreImage } from "src/types/restore-image"; export type { RestoreImage } from "src/types/restore-image";
export type { IMentionHighlight, IMentionSuggestion } from "src/types/mention-suggestion"; export type { IMentionHighlight, IMentionSuggestion } from "src/types/mention-suggestion";
export type { ISlashCommandItem, CommandProps } from "src/types/slash-commands-suggestion"; export type { ISlashCommandItem, CommandProps } from "src/types/slash-commands-suggestion";

View File

@ -1,21 +1,22 @@
import { Editor, Range } from "@tiptap/core"; import { Editor, Range } from "@tiptap/core";
import { startImageUpload } from "src/ui/plugins/upload-image"; import { startImageUpload } from "src/ui/plugins/upload-image";
import { findTableAncestor } from "src/lib/utils"; import { findTableAncestor } from "src/lib/utils";
import { Selection } from "@tiptap/pm/state";
import { UploadImage } from "src/types/upload-image"; import { UploadImage } from "src/types/upload-image";
export const toggleHeadingOne = (editor: Editor, range?: Range) => { export const toggleHeadingOne = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 1 }).run(); if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 1 }).run();
else editor.chain().focus().clearNodes().toggleHeading({ level: 1 }).run(); else editor.chain().focus().toggleHeading({ level: 1 }).run();
}; };
export const toggleHeadingTwo = (editor: Editor, range?: Range) => { export const toggleHeadingTwo = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 2 }).run(); if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 2 }).run();
else editor.chain().focus().clearNodes().toggleHeading({ level: 2 }).run(); else editor.chain().focus().toggleHeading({ level: 2 }).run();
}; };
export const toggleHeadingThree = (editor: Editor, range?: Range) => { export const toggleHeadingThree = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).clearNodes().setNode("heading", { level: 3 }).run(); if (range) editor.chain().focus().deleteRange(range).setNode("heading", { level: 3 }).run();
else editor.chain().focus().clearNodes().toggleHeading({ level: 3 }).run(); else editor.chain().focus().toggleHeading({ level: 3 }).run();
}; };
export const toggleBold = (editor: Editor, range?: Range) => { export const toggleBold = (editor: Editor, range?: Range) => {
@ -33,48 +34,98 @@ export const toggleUnderline = (editor: Editor, range?: Range) => {
else editor.chain().focus().toggleUnderline().run(); else editor.chain().focus().toggleUnderline().run();
}; };
export const toggleCodeBlock = (editor: Editor, range?: Range) => { const replaceCodeBlockWithContent = (editor: Editor) => {
// Check if code block is active then toggle code block try {
if (editor.isActive("codeBlock")) { const { schema } = editor.state;
if (range) { const { paragraph } = schema.nodes;
editor.chain().focus().deleteRange(range).clearNodes().toggleCodeBlock().run(); let replaced = false;
return;
const replaceCodeBlock = (from: number, to: number, textContent: string) => {
const docSize = editor.state.doc.content.size;
if (from < 0 || to > docSize || from > to) {
console.error("Invalid range for replacement: ", from, to, "in a document of size", docSize);
return;
}
// split the textContent by new lines to handle each line as a separate paragraph
const lines = textContent.split(/\r?\n/);
const tr = editor.state.tr;
// Calculate the position for inserting the first paragraph
let insertPos = from;
// Remove the code block first
tr.delete(from, to);
// For each line, create a paragraph node and insert it
lines.forEach((line) => {
const paragraphNode = paragraph.create({}, schema.text(line));
tr.insert(insertPos, paragraphNode);
// Update insertPos for the next insertion
insertPos += paragraphNode.nodeSize;
});
// Dispatch the transaction
editor.view.dispatch(tr);
replaced = true;
};
editor.state.doc.nodesBetween(editor.state.selection.from, editor.state.selection.to, (node, pos) => {
if (node.type === schema.nodes.codeBlock) {
const startPos = pos;
const endPos = pos + node.nodeSize;
const textContent = node.textContent;
replaceCodeBlock(startPos, endPos, textContent);
return false;
}
});
if (!replaced) {
console.log("No code block to replace.");
} }
editor.chain().focus().clearNodes().toggleCodeBlock().run(); } catch (error) {
return; console.error("An error occurred while replacing code block content:", error);
} }
};
// Check if user hasn't selected any text export const toggleCodeBlock = (editor: Editor, range?: Range) => {
const isSelectionEmpty = editor.state.selection.empty; try {
if (editor.isActive("codeBlock")) {
if (isSelectionEmpty) { replaceCodeBlockWithContent(editor);
if (range) {
editor.chain().focus().deleteRange(range).clearNodes().toggleCodeBlock().run();
return; return;
} }
editor.chain().focus().clearNodes().toggleCodeBlock().run();
} else { const { from, to } = range || editor.state.selection;
if (range) { const text = editor.state.doc.textBetween(from, to, "\n");
editor.chain().focus().deleteRange(range).clearNodes().toggleCode().run(); const isMultiline = text.includes("\n");
return;
if (editor.state.selection.empty) {
editor.chain().focus().toggleCodeBlock().run();
} else if (isMultiline) {
editor.chain().focus().deleteRange({ from, to }).insertContentAt(from, `\`\`\`\n${text}\n\`\`\``).run();
} else {
editor.chain().focus().toggleCode().run();
} }
editor.chain().focus().clearNodes().toggleCode().run(); } catch (error) {
console.error("An error occurred while toggling code block:", error);
} }
}; };
export const toggleOrderedList = (editor: Editor, range?: Range) => { export const toggleOrderedList = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleOrderedList().run(); if (range) editor.chain().focus().deleteRange(range).toggleOrderedList().run();
else editor.chain().focus().clearNodes().toggleOrderedList().run(); else editor.chain().focus().toggleOrderedList().run();
}; };
export const toggleBulletList = (editor: Editor, range?: Range) => { export const toggleBulletList = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleBulletList().run(); if (range) editor.chain().focus().deleteRange(range).toggleBulletList().run();
else editor.chain().focus().clearNodes().toggleBulletList().run(); else editor.chain().focus().toggleBulletList().run();
}; };
export const toggleTaskList = (editor: Editor, range?: Range) => { export const toggleTaskList = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleTaskList().run(); if (range) editor.chain().focus().deleteRange(range).toggleTaskList().run();
else editor.chain().focus().clearNodes().toggleTaskList().run(); else editor.chain().focus().toggleTaskList().run();
}; };
export const toggleStrike = (editor: Editor, range?: Range) => { export const toggleStrike = (editor: Editor, range?: Range) => {
@ -83,17 +134,19 @@ export const toggleStrike = (editor: Editor, range?: Range) => {
}; };
export const toggleBlockquote = (editor: Editor, range?: Range) => { export const toggleBlockquote = (editor: Editor, range?: Range) => {
if (range) editor.chain().focus().deleteRange(range).clearNodes().toggleBlockquote().run(); if (range) editor.chain().focus().deleteRange(range).toggleBlockquote().run();
else editor.chain().focus().clearNodes().toggleBlockquote().run(); else editor.chain().focus().toggleBlockquote().run();
}; };
export const insertTableCommand = (editor: Editor, range?: Range) => { export const insertTableCommand = (editor: Editor, range?: Range) => {
if (typeof window !== "undefined") { if (typeof window !== "undefined") {
const selection: any = window?.getSelection(); const selection = window.getSelection();
if (selection.rangeCount !== 0) { if (selection) {
const range = selection.getRangeAt(0); if (selection.rangeCount !== 0) {
if (findTableAncestor(range.startContainer)) { const range = selection.getRangeAt(0);
return; if (findTableAncestor(range.startContainer)) {
return;
}
} }
} }
} }
@ -112,7 +165,7 @@ export const setLinkEditor = (editor: Editor, url: string) => {
export const insertImageCommand = ( export const insertImageCommand = (
editor: Editor, editor: Editor,
uploadFile: UploadImage, uploadFile: UploadImage,
setIsSubmitting?: (isSubmitting: "submitting" | "submitted" | "saved") => void, savedSelection?: Selection | null,
range?: Range range?: Range
) => { ) => {
if (range) editor.chain().focus().deleteRange(range).run(); if (range) editor.chain().focus().deleteRange(range).run();
@ -122,8 +175,8 @@ export const insertImageCommand = (
input.onchange = async () => { input.onchange = async () => {
if (input.files?.length) { if (input.files?.length) {
const file = input.files[0]; const file = input.files[0];
const pos = editor.view.state.selection.from; const pos = savedSelection?.anchor ?? editor.view.state.selection.from;
startImageUpload(file, editor.view, pos, uploadFile, setIsSubmitting); startImageUpload(editor, file, editor.view, pos, uploadFile);
} }
}; };
input.click(); input.click();

View File

@ -4,15 +4,17 @@ import { twMerge } from "tailwind-merge";
interface EditorClassNames { interface EditorClassNames {
noBorder?: boolean; noBorder?: boolean;
borderOnFocus?: boolean; borderOnFocus?: boolean;
customClassName?: string; containerClassName?: string;
} }
export const getEditorClassNames = ({ noBorder, borderOnFocus, customClassName }: EditorClassNames) => export const getEditorClassNames = ({ noBorder, borderOnFocus, containerClassName }: EditorClassNames) =>
cn( cn(
"relative w-full max-w-full sm:rounded-lg mt-2 p-3 relative focus:outline-none rounded-md", "w-full max-w-full sm:rounded-lg focus:outline-none focus:border-0",
noBorder ? "" : "border border-custom-border-200", {
borderOnFocus ? "focus:border border-custom-border-300" : "focus:border-0", "border border-custom-border-200": !noBorder,
customClassName "focus:border border-custom-border-300": borderOnFocus,
},
containerClassName
); );
export function cn(...inputs: ClassValue[]) { export function cn(...inputs: ClassValue[]) {

View File

@ -7,10 +7,17 @@
} }
/* block quotes */ /* block quotes */
.ProseMirror blockquote {
font-style: normal;
font-weight: 400;
border-left: 3px solid rgb(var(--color-border-300));
}
.ProseMirror blockquote p::before, .ProseMirror blockquote p::before,
.ProseMirror blockquote p::after { .ProseMirror blockquote p::after {
display: none; display: none;
} }
/* end block quotes */
.ProseMirror code::before, .ProseMirror code::before,
.ProseMirror code::after { .ProseMirror code::after {
@ -28,8 +35,8 @@
/* Custom image styles */ /* Custom image styles */
.ProseMirror img { .ProseMirror img {
transition: filter 0.1s ease-in-out; transition: filter 0.1s ease-in-out;
margin-top: 0 !important; margin-top: 8px;
margin-bottom: 0 !important; margin-bottom: 0;
&:hover { &:hover {
cursor: pointer; cursor: pointer;
@ -37,22 +44,49 @@
} }
&.ProseMirror-selectednode { &.ProseMirror-selectednode {
outline: 3px solid #5abbf7; outline: 3px solid rgba(var(--color-primary-100));
filter: brightness(90%); filter: brightness(90%);
} }
} }
.ProseMirror-gapcursor:after { /* Custom gap cursor styles */
.ProseMirror-gapcursor::after {
border-top: 1px solid rgb(var(--color-text-100)) !important; border-top: 1px solid rgb(var(--color-text-100)) !important;
} }
/* Custom TODO list checkboxes shoutout to this awesome tutorial: https://moderncss.dev/pure-css-custom-checkbox-style/ */ /* to-do list */
ul[data-type="taskList"] li {
font-size: 1rem;
line-height: 1.5;
}
ul[data-type="taskList"] li > label { ul[data-type="taskList"] li > label {
margin-right: 0.2rem; margin: 0.1rem 0.15rem 0 0;
user-select: none; user-select: none;
} }
ul[data-type="taskList"] li > label input[type="checkbox"] {
border: 1px solid rgba(var(--color-border-300)) !important;
outline: none;
border-radius: 2px;
transform: scale(1.05);
}
ul[data-type="taskList"] li > label input[type="checkbox"]:hover {
background-color: rgba(var(--color-background-80)) !important;
}
ul[data-type="taskList"] li > label input[type="checkbox"]:checked {
background-color: rgba(var(--color-primary-100)) !important;
border-color: rgba(var(--color-primary-100)) !important;
color: white !important;
}
ul[data-type="taskList"] li > label input[type="checkbox"]:checked:hover {
background-color: rgba(var(--color-primary-300)) !important;
border-color: rgba(var(--color-primary-300)) !important;
}
@media screen and (max-width: 768px) { @media screen and (max-width: 768px) {
ul[data-type="taskList"] li > label { ul[data-type="taskList"] li > label {
margin-right: 0.5rem; margin-right: 0.5rem;
@ -60,6 +94,7 @@ ul[data-type="taskList"] li > label {
} }
ul[data-type="taskList"] li > label input[type="checkbox"] { ul[data-type="taskList"] li > label input[type="checkbox"] {
position: relative;
-webkit-appearance: none; -webkit-appearance: none;
appearance: none; appearance: none;
background-color: rgb(var(--color-background-100)); background-color: rgb(var(--color-background-100));
@ -71,8 +106,6 @@ ul[data-type="taskList"] li > label input[type="checkbox"] {
border: 1.5px solid rgb(var(--color-text-100)); border: 1.5px solid rgb(var(--color-text-100));
margin-right: 0.2rem; margin-right: 0.2rem;
margin-top: 0.15rem; margin-top: 0.15rem;
display: grid;
place-content: center;
&:hover { &:hover {
background-color: rgb(var(--color-background-80)); background-color: rgb(var(--color-background-80));
@ -82,27 +115,32 @@ ul[data-type="taskList"] li > label input[type="checkbox"] {
background-color: rgb(var(--color-background-90)); background-color: rgb(var(--color-background-90));
} }
/* check sign */
&::before { &::before {
content: ""; content: "";
position: absolute;
top: 50%;
left: 50%;
width: 0.5em; width: 0.5em;
height: 0.5em; height: 0.5em;
transform: scale(0); transform: scale(0);
transform-origin: center;
transition: 120ms transform ease-in-out; transition: 120ms transform ease-in-out;
box-shadow: inset 1em 1em; box-shadow: inset 1em 1em;
transform-origin: center;
clip-path: polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%); clip-path: polygon(14% 44%, 0 65%, 50% 100%, 100% 16%, 80% 0%, 43% 62%);
} }
&:checked::before { &:checked::before {
transform: scale(1); transform: scale(1) translate(-50%, -50%);
} }
} }
ul[data-type="taskList"] li[data-checked="true"] > div > p { ul[data-type="taskList"] li[data-checked="true"] > div > p {
color: rgb(var(--color-text-200)); color: rgb(var(--color-text-400));
text-decoration: line-through; text-decoration: line-through;
text-decoration-thickness: 2px; text-decoration-thickness: 2px;
} }
/* end to-do list */
/* Overwrite tippy-box original max-width */ /* Overwrite tippy-box original max-width */
@ -133,12 +171,12 @@ ul[data-type="taskList"] li[data-checked="true"] > div > p {
-moz-appearance: textfield; -moz-appearance: textfield;
} }
.fadeIn { .fade-in {
opacity: 1; opacity: 1;
transition: opacity 0.3s ease-in; transition: opacity 0.3s ease-in;
} }
.fadeOut { .fade-out {
opacity: 0; opacity: 0;
transition: opacity 0.2s ease-out; transition: opacity 0.2s ease-out;
} }
@ -149,7 +187,7 @@ ul[data-type="taskList"] li[data-checked="true"] > div > p {
margin-top: 0 !important; margin-top: 0 !important;
margin-bottom: 0 !important; margin-bottom: 0 !important;
&:before { &::before {
content: ""; content: "";
box-sizing: border-box; box-sizing: border-box;
position: absolute; position: absolute;
@ -175,21 +213,13 @@ ul[data-type="taskList"] li[data-checked="true"] > div > p {
cursor: col-resize; cursor: col-resize;
} }
.ProseMirror table * p {
padding: 0px 1px;
margin: 6px 2px;
}
.ProseMirror table * .is-empty::before { .ProseMirror table * .is-empty::before {
opacity: 0; opacity: 0;
} }
.ProseMirror pre { .ProseMirror pre {
background: rgba(var(--color-background-80)); font-family: JetBrainsMono, monospace;
border-radius: 0.5rem; tab-size: 2;
color: rgba(var(--color-text-100));
font-family: "JetBrainsMono", monospace;
padding: 0.75rem 1rem;
} }
.ProseMirror pre code { .ProseMirror pre code {
@ -206,7 +236,7 @@ div[data-type="horizontalRule"] {
margin-bottom: 0; margin-bottom: 0;
& > div { & > div {
border-bottom: 1px solid rgb(var(--color-text-100)); border-bottom: 2px solid rgb(var(--color-border-200));
} }
} }
@ -214,3 +244,107 @@ div[data-type="horizontalRule"] {
.moveable-control-box { .moveable-control-box {
z-index: 10 !important; z-index: 10 !important;
} }
/* Cursor styles for the inline code blocks */
@keyframes blink {
49% {
border-color: unset;
}
50% {
border-color: transparent;
}
99% {
border-color: transparent;
}
}
.no-cursor {
caret-color: transparent;
}
div:focus .fake-cursor,
span:focus .fake-cursor {
margin-right: -1px;
border-left-width: 1.5px;
border-left-style: solid;
animation: blink 1s;
animation-iteration-count: infinite;
position: relative;
z-index: 1;
}
/* numbered, bulleted and to-do lists spacing */
.prose ol:where(.prose > :first-child):not(:where([class~="not-prose"], [class~="not-prose"] *)),
.prose
ul:not([data-type="taskList"]):where(.prose > :first-child):not(:where([class~="not-prose"], [class~="not-prose"] *)),
.prose ul[data-type="taskList"]:where(.prose > :first-child) {
margin-top: 0.25rem !important;
margin-bottom: 1px !important;
}
.prose ol:not(:where(.prose > :first-child):not(:where([class~="not-prose"], [class~="not-prose"] *))),
.prose
ul:not([data-type="taskList"]):not(
:where(.prose > :first-child):not(:where([class~="not-prose"], [class~="not-prose"] *))
),
.prose ul[data-type="taskList"]:not(:where(.prose > :first-child)) {
margin-top: calc(0.25rem + 3px) !important;
margin-bottom: 1px !important;
}
ol ol,
ol ul:not([data-type="taskList"]),
ul:not([data-type="taskList"]) ul:not([data-type="taskList"]),
ul:not([data-type="taskList"]) ol {
margin-top: 0.45rem !important;
}
ul[data-type="taskList"] ul[data-type="taskList"] {
margin-top: 0.6rem;
}
/* end numbered, bulleted and to-do lists spacing */
/* tailwind typography */
.prose :where(h1):not(:where([class~="not-prose"], [class~="not-prose"] *)) {
margin-top: 2rem;
margin-bottom: 4px;
font-size: 1.875rem;
font-weight: 700;
line-height: 1.3;
}
.prose :where(h2):not(:where([class~="not-prose"], [class~="not-prose"] *)) {
margin-top: 1.4rem;
margin-bottom: 1px;
font-size: 1.5rem;
font-weight: 600;
line-height: 1.3;
}
.prose :where(h3):not(:where([class~="not-prose"], [class~="not-prose"] *)) {
margin-top: 1rem;
margin-bottom: 1px;
font-size: 1.25rem;
line-height: 1.3;
}
.prose :where(p):not(:where([class~="not-prose"], [class~="not-prose"] *)) {
margin-top: 0.25rem;
margin-bottom: 1px;
padding: 3px 2px;
font-size: 1rem;
line-height: 1.5;
}
.prose :where(ol):not(:where([class~="not-prose"], [class~="not-prose"] *)) li p,
.prose :where(ul):not(:where([class~="not-prose"], [class~="not-prose"] *)) li p {
font-size: 1rem;
line-height: 1.5;
}
.prose :where(.prose > :first-child):not(:where([class~="not-prose"], [class~="not-prose"] *)) {
margin-top: 0;
}
/* end tailwind typography */

View File

@ -1,23 +1,25 @@
.tableWrapper { .table-wrapper {
overflow-x: auto; overflow-x: auto;
padding: 2px;
width: fit-content; width: fit-content;
max-width: 100%; max-width: 100%;
} }
.tableWrapper table { .table-wrapper table {
border-collapse: collapse; border-collapse: collapse;
table-layout: fixed; table-layout: fixed;
margin: 0; margin: 0.5rem 0 1rem 0;
margin-bottom: 1rem; border: 1px solid rgba(var(--color-border-200));
border: 2px solid rgba(var(--color-border-300));
width: 100%; width: 100%;
} }
.tableWrapper table td, .table-wrapper table p {
.tableWrapper table th { font-size: 14px;
}
.table-wrapper table td,
.table-wrapper table th {
min-width: 1em; min-width: 1em;
border: 1px solid rgba(var(--color-border-300)); border: 1px solid rgba(var(--color-border-200));
padding: 10px 15px; padding: 10px 15px;
vertical-align: top; vertical-align: top;
box-sizing: border-box; box-sizing: border-box;
@ -29,86 +31,34 @@
} }
} }
.tableWrapper table td > *, .table-wrapper table th {
.tableWrapper table th > * { font-weight: 500;
margin: 0 !important;
padding: 0.25rem 0 !important;
}
.tableWrapper table td.has-focus,
.tableWrapper table th.has-focus {
box-shadow: rgba(var(--color-primary-300), 0.1) 0px 0px 0px 2px inset !important;
}
.tableWrapper table th {
font-weight: bold;
text-align: left; text-align: left;
background-color: #d9e4ff; background-color: rgba(var(--color-background-90));
color: #171717;
} }
.tableWrapper table th * { .table-wrapper table .selectedCell {
font-weight: 600; outline: 0.5px solid rgba(var(--color-primary-100));
} }
.tableWrapper table .selectedCell:after { /* table dropdown */
z-index: 2; .table-wrapper table .column-resize-handle {
position: absolute;
content: "";
left: 0;
right: 0;
top: 0;
bottom: 0;
background-color: rgba(var(--color-primary-300), 0.1);
pointer-events: none;
}
.colorPicker {
display: grid;
padding: 8px 8px;
grid-template-columns: repeat(6, 1fr);
gap: 5px;
}
.colorPickerLabel {
font-size: 0.85rem;
color: #6b7280;
padding: 8px 8px;
padding-bottom: 0px;
}
.colorPickerItem {
margin: 2px 0px;
width: 24px;
height: 24px;
border-radius: 4px;
border: none;
cursor: pointer;
}
.divider {
background-color: #e5e7eb;
height: 1px;
margin: 3px 0;
}
.tableWrapper table .column-resize-handle {
position: absolute; position: absolute;
right: -2px; right: -2px;
top: 0; top: 0;
bottom: -2px; width: 2px;
width: 4px; height: 100%;
z-index: 5; z-index: 5;
background-color: #d9e4ff; background-color: rgba(var(--color-primary-100));
pointer-events: none; pointer-events: none;
} }
.tableWrapper .tableControls { .table-wrapper .table-controls {
position: absolute; position: absolute;
} }
.tableWrapper .tableControls .columnsControl, .table-wrapper .table-controls .columns-control,
.tableWrapper .tableControls .rowsControl { .table-wrapper .table-controls .rows-control {
transition: opacity ease-in 100ms; transition: opacity ease-in 100ms;
position: absolute; position: absolute;
z-index: 5; z-index: 5;
@ -117,124 +67,50 @@
align-items: center; align-items: center;
} }
.tableWrapper .tableControls .columnsControl { .table-wrapper .table-controls .columns-control {
height: 20px; height: 20px;
transform: translateY(-50%); transform: translateY(-50%);
} }
.tableWrapper .tableControls .columnsControl .columnsControlDiv { .table-wrapper .table-controls .columns-control .columns-control-div {
color: white; color: white;
background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' width='24' height='24'%3E%3Cpath fill='none' d='M0 0h24v24H0z'/%3E%3Cpath fill='%238F95B2' d='M4.5 10.5c-.825 0-1.5.675-1.5 1.5s.675 1.5 1.5 1.5S6 12.825 6 12s-.675-1.5-1.5-1.5zm15 0c-.825 0-1.5.675-1.5 1.5s.675 1.5 1.5 1.5S21 12.825 21 12s-.675-1.5-1.5-1.5zm-7.5 0c-.825 0-1.5.675-1.5 1.5s.675 1.5 1.5 1.5 1.5-.675 1.5-1.5-.675-1.5-1.5-1.5z'/%3E%3C/svg%3E"); background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' width='24' height='24'%3E%3Cpath fill='none' d='M0 0h24v24H0z'/%3E%3Cpath fill='%238F95B2' d='M4.5 10.5c-.825 0-1.5.675-1.5 1.5s.675 1.5 1.5 1.5S6 12.825 6 12s-.675-1.5-1.5-1.5zm15 0c-.825 0-1.5.675-1.5 1.5s.675 1.5 1.5 1.5S21 12.825 21 12s-.675-1.5-1.5-1.5zm-7.5 0c-.825 0-1.5.675-1.5 1.5s.675 1.5 1.5 1.5 1.5-.675 1.5-1.5-.675-1.5-1.5-1.5z'/%3E%3C/svg%3E");
width: 30px; width: 30px;
height: 15px; height: 15px;
} }
.tableWrapper .tableControls .rowsControl { .table-wrapper .table-controls .rows-control {
width: 20px; width: 20px;
transform: translateX(-50%); transform: translateX(-50%);
} }
.tableWrapper .tableControls .rowsControl .rowsControlDiv { .table-wrapper .table-controls .rows-control .rows-control-div {
color: white; color: white;
background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' width='24' height='24'%3E%3Cpath fill='none' d='M0 0h24v24H0z'/%3E%3Cpath fill='%238F95B2' d='M12 3c-.825 0-1.5.675-1.5 1.5S11.175 6 12 6s1.5-.675 1.5-1.5S12.825 3 12 3zm0 15c-.825 0-1.5.675-1.5 1.5S11.175 21 12 21s1.5-.675 1.5-1.5S12.825 18 12 18zm0-7.5c-.825 0-1.5.675-1.5 1.5s.675 1.5 1.5 1.5 1.5-.675 1.5-1.5-.675-1.5-1.5-1.5z'/%3E%3C/svg%3E"); background-image: url("data:image/svg+xml,%3Csvg xmlns='http://www.w3.org/2000/svg' viewBox='0 0 24 24' width='24' height='24'%3E%3Cpath fill='none' d='M0 0h24v24H0z'/%3E%3Cpath fill='%238F95B2' d='M12 3c-.825 0-1.5.675-1.5 1.5S11.175 6 12 6s1.5-.675 1.5-1.5S12.825 3 12 3zm0 15c-.825 0-1.5.675-1.5 1.5S11.175 21 12 21s1.5-.675 1.5-1.5S12.825 18 12 18zm0-7.5c-.825 0-1.5.675-1.5 1.5s.675 1.5 1.5 1.5 1.5-.675 1.5-1.5-.675-1.5-1.5-1.5z'/%3E%3C/svg%3E");
height: 30px; height: 30px;
width: 15px; width: 15px;
} }
.tableWrapper .tableControls .rowsControlDiv { .table-wrapper .table-controls .rows-control-div,
background-color: #d9e4ff; .table-wrapper .table-controls .columns-control-div {
border: 1px solid rgba(var(--color-border-200)); background-color: rgba(var(--color-background-80));
border-radius: 2px; border: 0.5px solid rgba(var(--color-border-200));
background-size: 1.25rem;
background-repeat: no-repeat;
background-position: center;
transition:
transform ease-out 100ms,
background-color ease-out 100ms;
outline: none;
box-shadow: #000 0px 2px 4px;
cursor: pointer;
}
.tableWrapper .tableControls .columnsControlDiv {
background-color: #d9e4ff;
border: 1px solid rgba(var(--color-border-200));
border-radius: 2px;
background-size: 1.25rem;
background-repeat: no-repeat;
background-position: center;
transition:
transform ease-out 100ms,
background-color ease-out 100ms;
outline: none;
box-shadow: #000 0px 2px 4px;
cursor: pointer;
}
.tableWrapper .tableControls .tableToolbox,
.tableWrapper .tableControls .tableColorPickerToolbox {
border: 1px solid rgba(var(--color-border-300));
background-color: rgba(var(--color-background-100));
border-radius: 5px;
box-shadow: 0px 2px 4px rgba(0, 0, 0, 0.1);
padding: 0.25rem;
display: flex;
flex-direction: column;
width: max-content;
gap: 0.25rem;
}
.tableWrapper .tableControls .tableToolbox .toolboxItem,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem {
background-color: rgba(var(--color-background-100));
display: flex;
align-items: center;
gap: 0.5rem;
border: none;
padding: 0.3rem 0.5rem 0.1rem 0.1rem;
border-radius: 4px; border-radius: 4px;
background-size: 1.25rem;
background-repeat: no-repeat;
background-position: center;
transition:
transform ease-out 100ms,
background-color ease-out 100ms;
outline: none;
box-shadow: rgba(var(--color-shadow-2xs));
cursor: pointer; cursor: pointer;
transition: all 0.2s;
} }
.tableWrapper .tableControls .tableToolbox .toolboxItem:hover, .resize-cursor .table-wrapper .table-controls .rows-control,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem:hover { .table-wrapper.controls--disabled .table-controls .rows-control,
background-color: rgba(var(--color-background-80), 0.6); .resize-cursor .table-wrapper .table-controls .columns-control,
} .table-wrapper.controls--disabled .table-controls .columns-control {
.tableWrapper .tableControls .tableToolbox .toolboxItem .iconContainer,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .iconContainer,
.tableWrapper .tableControls .tableToolbox .toolboxItem .colorContainer,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .colorContainer {
padding: 4px 0px;
display: flex;
align-items: center;
justify-content: center;
width: 1.75rem;
height: 1.75rem;
}
.tableWrapper .tableControls .tableToolbox .toolboxItem .iconContainer svg,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .iconContainer svg,
.tableWrapper .tableControls .tableToolbox .toolboxItem .colorContainer svg,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .colorContainer svg {
width: 1rem;
height: 1rem;
}
.tableToolbox {
background-color: rgba(var(--color-background-100));
}
.tableWrapper .tableControls .tableToolbox .toolboxItem .label,
.tableWrapper .tableControls .tableColorPickerToolbox .toolboxItem .label {
font-size: 0.85rem;
color: rgba(var(--color-text-300));
}
.resize-cursor .tableWrapper .tableControls .rowsControl,
.tableWrapper.controls--disabled .tableControls .rowsControl,
.resize-cursor .tableWrapper .tableControls .columnsControl,
.tableWrapper.controls--disabled .tableControls .columnsControl {
opacity: 0; opacity: 0;
pointer-events: none; pointer-events: none;
} }

View File

@ -0,0 +1,17 @@
import { IMarking } from "src/helpers/scroll-to-node";
import { EditorMenuItemNames } from "src/ui/menus/menu-items";
export type EditorReadOnlyRefApi = {
getMarkDown: () => string;
clearEditor: () => void;
setEditorValue: (content: string) => void;
scrollSummary: (marking: IMarking) => void;
};
export interface EditorRefApi extends EditorReadOnlyRefApi {
setEditorValueAtCursorPosition: (content: string) => void;
executeMenuItemCommand: (itemName: EditorMenuItemNames) => void;
isMenuItemActive: (itemName: EditorMenuItemNames) => boolean;
onStateChange: (callback: () => void) => () => void;
setFocusAtPosition: (position: number) => void;
}

View File

@ -1,10 +1,18 @@
import { Editor, Range } from "@tiptap/react";
export type IMentionSuggestion = { export type IMentionSuggestion = {
id: string; id: string;
type: string; type: string;
entity_name: string;
entity_identifier: string;
avatar: string; avatar: string;
title: string; title: string;
subtitle: string; subtitle: string;
redirect_uri: string; redirect_uri: string;
}; };
export type CommandProps = {
editor: Editor;
range: Range;
};
export type IMentionHighlight = string; export type IMentionHighlight = string;

View File

@ -4,57 +4,65 @@ import { cn } from "src/lib/utils";
interface EditorContainerProps { interface EditorContainerProps {
editor: Editor | null; editor: Editor | null;
editorClassNames: string; editorContainerClassName: string;
children: ReactNode; children: ReactNode;
hideDragHandle?: () => void; hideDragHandle?: () => void;
} }
export const EditorContainer: FC<EditorContainerProps> = (props) => { export const EditorContainer: FC<EditorContainerProps> = (props) => {
const { editor, editorClassNames, hideDragHandle, children } = props; const { editor, editorContainerClassName, hideDragHandle, children } = props;
const handleContainerClick = () => { const handleContainerClick = () => {
if (!editor) return; if (!editor) return;
if (!editor.isEditable) return; if (!editor.isEditable) return;
if (editor.isFocused) return; // If editor is already focused, do nothing try {
if (editor.isFocused) return; // If editor is already focused, do nothing
const { selection } = editor.state; const { selection } = editor.state;
const currentNode = selection.$from.node(); const currentNode = selection.$from.node();
editor?.chain().focus("end", { scrollIntoView: false }).run(); // Focus the editor at the end editor?.chain().focus("end", { scrollIntoView: false }).run(); // Focus the editor at the end
if ( if (
currentNode.content.size === 0 && // Check if the current node is empty currentNode.content.size === 0 && // Check if the current node is empty
!( !(
editor.isActive("orderedList") || editor.isActive("orderedList") ||
editor.isActive("bulletList") || editor.isActive("bulletList") ||
editor.isActive("taskItem") || editor.isActive("taskItem") ||
editor.isActive("table") || editor.isActive("table") ||
editor.isActive("blockquote") || editor.isActive("blockquote") ||
editor.isActive("codeBlock") editor.isActive("codeBlock")
) // Check if it's an empty node within an orderedList, bulletList, taskItem, table, quote or code block ) // Check if it's an empty node within an orderedList, bulletList, taskItem, table, quote or code block
) { ) {
return; return;
}
// Insert a new paragraph at the end of the document
const endPosition = editor?.state.doc.content.size;
editor?.chain().insertContentAt(endPosition, { type: "paragraph" }).run();
// Focus the newly added paragraph for immediate editing
editor
.chain()
.setTextSelection(endPosition + 1)
.run();
} catch (error) {
console.error("An error occurred while handling container click to insert new empty node at bottom:", error);
} }
// Insert a new paragraph at the end of the document
const endPosition = editor?.state.doc.content.size;
editor?.chain().insertContentAt(endPosition, { type: "paragraph" }).run();
// Focus the newly added paragraph for immediate editing
editor
.chain()
.setTextSelection(endPosition + 1)
.run();
}; };
return ( return (
<div <div
id="editor-container" id="editor-container"
onClick={handleContainerClick} onClick={handleContainerClick}
onMouseLeave={() => { onMouseLeave={hideDragHandle}
hideDragHandle?.(); className={cn(
}} "cursor-text relative",
className={cn(`cursor-text`, { "active-editor": editor?.isFocused && editor?.isEditable }, editorClassNames)} {
"active-editor": editor?.isFocused && editor?.isEditable,
},
editorContainerClassName
)}
> >
{children} {children}
</div> </div>

View File

@ -4,22 +4,15 @@ import { ImageResizer } from "src/ui/extensions/image/image-resize";
interface EditorContentProps { interface EditorContentProps {
editor: Editor | null; editor: Editor | null;
editorContentCustomClassNames: string | undefined;
children?: ReactNode; children?: ReactNode;
tabIndex?: number; tabIndex?: number;
} }
export const EditorContentWrapper: FC<EditorContentProps> = (props) => { export const EditorContentWrapper: FC<EditorContentProps> = (props) => {
const { editor, editorContentCustomClassNames = "", tabIndex, children } = props; const { editor, tabIndex, children } = props;
return ( return (
<div <div tabIndex={tabIndex} onFocus={() => editor?.chain().focus(undefined, { scrollIntoView: false }).run()}>
className={`contentEditor ${editorContentCustomClassNames}`}
tabIndex={tabIndex}
onFocus={() => {
editor?.chain().focus(undefined, { scrollIntoView: false }).run();
}}
>
<EditorContent editor={editor} /> <EditorContent editor={editor} />
{editor?.isActive("image") && editor?.isEditable && <ImageResizer editor={editor} />} {editor?.isActive("image") && editor?.isEditable && <ImageResizer editor={editor} />}
{children} {children}

View File

@ -32,7 +32,8 @@ export const CustomCodeInlineExtension = Mark.create<CodeOptions>({
addOptions() { addOptions() {
return { return {
HTMLAttributes: { HTMLAttributes: {
class: "rounded-md bg-custom-primary-30 mx-1 px-1 py-[2px] font-mono font-medium text-custom-text-1000", class:
"rounded bg-custom-background-80 px-1 py-[2px] font-mono font-medium text-orange-500 border-[0.5px] border-custom-border-200 text-sm",
spellcheck: "false", spellcheck: "false",
}, },
}; };

View File

@ -0,0 +1,30 @@
// import CodeBlock, { CodeBlockOptions } from "@tiptap/extension-code-block";
import { CodeBlockOptions, CodeBlock } from "./code-block";
import { LowlightPlugin } from "./lowlight-plugin";
export interface CodeBlockLowlightOptions extends CodeBlockOptions {
lowlight: any;
defaultLanguage: string | null | undefined;
}
export const CodeBlockLowlight = CodeBlock.extend<CodeBlockLowlightOptions>({
addOptions() {
return {
...this.parent?.(),
lowlight: {},
defaultLanguage: null,
};
},
addProseMirrorPlugins() {
return [
...(this.parent?.() || []),
LowlightPlugin({
name: this.name,
lowlight: this.options.lowlight,
defaultLanguage: this.options.defaultLanguage,
}),
];
},
});

View File

@ -0,0 +1,56 @@
import { useState } from "react";
import { NodeViewWrapper, NodeViewContent } from "@tiptap/react";
import { common, createLowlight } from "lowlight";
import ts from "highlight.js/lib/languages/typescript";
import { CopyIcon, CheckIcon } from "lucide-react";
import { Node as ProseMirrorNode } from "@tiptap/pm/model";
import { cn } from "src/lib/utils";
// we just have ts support for now
const lowlight = createLowlight(common);
lowlight.register("ts", ts);
interface CodeBlockComponentProps {
node: ProseMirrorNode;
}
export const CodeBlockComponent: React.FC<CodeBlockComponentProps> = ({ node }) => {
const [copied, setCopied] = useState(false);
const copyToClipboard = async (e: React.MouseEvent<HTMLButtonElement, MouseEvent>) => {
try {
await navigator.clipboard.writeText(node.textContent);
setCopied(true);
setTimeout(() => setCopied(false), 1000);
} catch (error) {
setCopied(false);
}
e.preventDefault();
e.stopPropagation();
};
return (
<NodeViewWrapper className="code-block relative">
<button
type="button"
className={cn(
"group absolute top-2 right-2 z-10 flex items-center justify-center w-8 h-8 rounded-md bg-custom-background-80 border border-custom-border-200 transition duration-150 ease-in-out",
{
"bg-green-500/10 hover:bg-green-500/10 active:bg-green-500/10": copied,
}
)}
onClick={copyToClipboard}
>
{copied ? (
<CheckIcon className="h-3 w-3 text-green-500" strokeWidth={3} />
) : (
<CopyIcon className="h-3 w-3 text-custom-text-300 group-hover:text-custom-text-100" />
)}
</button>
<pre className="bg-custom-background-90 text-custom-text-100 rounded-lg p-8 pl-9 pr-4">
<NodeViewContent as="code" className="whitespace-[pre-wrap]" />
</pre>
</NodeViewWrapper>
);
};

View File

@ -0,0 +1,346 @@
import { mergeAttributes, Node, textblockTypeInputRule } from "@tiptap/core";
import { Plugin, PluginKey } from "@tiptap/pm/state";
export interface CodeBlockOptions {
/**
* Adds a prefix to language classes that are applied to code tags.
* Defaults to `'language-'`.
*/
languageClassPrefix: string;
/**
* Define whether the node should be exited on triple enter.
* Defaults to `true`.
*/
exitOnTripleEnter: boolean;
/**
* Define whether the node should be exited on arrow down if there is no node after it.
* Defaults to `true`.
*/
exitOnArrowDown: boolean;
/**
* Custom HTML attributes that should be added to the rendered HTML tag.
*/
HTMLAttributes: Record<string, any>;
}
declare module "@tiptap/core" {
interface Commands<ReturnType> {
codeBlock: {
/**
* Set a code block
*/
setCodeBlock: (attributes?: { language: string }) => ReturnType;
/**
* Toggle a code block
*/
toggleCodeBlock: (attributes?: { language: string }) => ReturnType;
};
}
}
export const backtickInputRegex = /^```([a-z]+)?[\s\n]$/;
export const tildeInputRegex = /^~~~([a-z]+)?[\s\n]$/;
export const CodeBlock = Node.create<CodeBlockOptions>({
name: "codeBlock",
addOptions() {
return {
languageClassPrefix: "language-",
exitOnTripleEnter: true,
exitOnArrowDown: true,
HTMLAttributes: {},
};
},
content: "text*",
marks: "",
group: "block",
code: true,
defining: true,
addAttributes() {
return {
language: {
default: null,
parseHTML: (element) => {
const { languageClassPrefix } = this.options;
// @ts-expect-error element is a DOM element
const classNames = [...(element.firstElementChild?.classList || [])];
const languages = classNames
.filter((className) => className.startsWith(languageClassPrefix))
.map((className) => className.replace(languageClassPrefix, ""));
const language = languages[0];
if (!language) {
return null;
}
return language;
},
rendered: false,
},
};
},
parseHTML() {
return [
{
tag: "pre",
preserveWhitespace: "full",
},
];
},
renderHTML({ node, HTMLAttributes }) {
return [
"pre",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes),
[
"code",
{
class: node.attrs.language ? this.options.languageClassPrefix + node.attrs.language : null,
},
0,
],
];
},
addCommands() {
return {
setCodeBlock:
(attributes) =>
({ commands }) =>
commands.setNode(this.name, attributes),
toggleCodeBlock:
(attributes) =>
({ commands }) =>
commands.toggleNode(this.name, "paragraph", attributes),
};
},
addKeyboardShortcuts() {
return {
"Mod-Alt-c": () => this.editor.commands.toggleCodeBlock(),
// remove code block when at start of document or code block is empty
Backspace: () => {
try {
const { empty, $anchor } = this.editor.state.selection;
const isAtStart = $anchor.pos === 1;
if (!empty || $anchor.parent.type.name !== this.name) {
return false;
}
if (isAtStart || !$anchor.parent.textContent.length) {
return this.editor.commands.clearNodes();
}
return false;
} catch (error) {
console.error("Error handling Backspace in code block:", error);
return false;
}
},
// exit node on triple enter
Enter: ({ editor }) => {
try {
if (!this.options.exitOnTripleEnter) {
return false;
}
const { state } = editor;
const { selection } = state;
const { $from, empty } = selection;
if (!empty || $from.parent.type !== this.type) {
return false;
}
const isAtEnd = $from.parentOffset === $from.parent.nodeSize - 2;
const endsWithDoubleNewline = $from.parent.textContent.endsWith("\n\n");
if (!isAtEnd || !endsWithDoubleNewline) {
return false;
}
return editor
.chain()
.command(({ tr }) => {
tr.delete($from.pos - 2, $from.pos);
return true;
})
.exitCode()
.run();
} catch (error) {
console.error("Error handling Enter in code block:", error);
return false;
}
},
// exit node on arrow down
ArrowDown: ({ editor }) => {
try {
if (!this.options.exitOnArrowDown) {
return false;
}
const { state } = editor;
const { selection, doc } = state;
const { $from, empty } = selection;
if (!empty || $from.parent.type !== this.type) {
return false;
}
const isAtEnd = $from.parentOffset === $from.parent.nodeSize - 2;
if (!isAtEnd) {
return false;
}
const after = $from.after();
if (after === undefined) {
return false;
}
const nodeAfter = doc.nodeAt(after);
if (nodeAfter) {
return false;
}
return editor.commands.exitCode();
} catch (error) {
console.error("Error handling ArrowDown in code block:", error);
return false;
}
},
};
},
addInputRules() {
return [
textblockTypeInputRule({
find: backtickInputRegex,
type: this.type,
getAttributes: (match) => ({
language: match[1],
}),
}),
textblockTypeInputRule({
find: tildeInputRegex,
type: this.type,
getAttributes: (match) => ({
language: match[1],
}),
}),
];
},
addProseMirrorPlugins() {
return [
new Plugin({
key: new PluginKey("codeBlockVSCodeHandlerCustom"),
props: {
handlePaste: (view, event) => {
try {
if (!event.clipboardData) {
return false;
}
if (this.editor.isActive(this.type.name)) {
return false;
}
if (this.editor.isActive("code")) {
// Check if it's an inline code block
event.preventDefault();
const text = event.clipboardData.getData("text/plain");
if (!text) {
console.error("Pasted text is empty.");
return false;
}
const { tr } = view.state;
const { $from, $to } = tr.selection;
if ($from.pos > $to.pos) {
console.error("Invalid selection range.");
return false;
}
const docSize = tr.doc.content.size;
if ($from.pos < 0 || $to.pos > docSize) {
console.error("Selection range is out of document bounds.");
return false;
}
// Extend the current selection to replace it with the pasted text
// wrapped in an inline code mark
const codeMark = view.state.schema.marks.code.create();
tr.replaceWith($from.pos, $to.pos, view.state.schema.text(text, [codeMark]));
view.dispatch(tr);
return true;
}
event.preventDefault();
const text = event.clipboardData.getData("text/plain");
const vscode = event.clipboardData.getData("vscode-editor-data");
const vscodeData = vscode ? JSON.parse(vscode) : undefined;
const language = vscodeData?.mode;
if (vscodeData && language) {
const { tr } = view.state;
const { $from } = tr.selection;
// Check if the current line is empty
const isCurrentLineEmpty = !$from.parent.textContent.trim();
let insertPos;
if (isCurrentLineEmpty) {
// If the current line is empty, use the current position
insertPos = $from.pos - 1;
} else {
// If the current line is not empty, insert below the current block node
insertPos = $from.end($from.depth) + 1;
}
// Ensure insertPos is within document bounds
if (insertPos < 0 || insertPos > tr.doc.content.size) {
console.error("Invalid insert position.");
return false;
}
// Create a new code block node with the pasted content
const textNode = view.state.schema.text(text.replace(/\r\n?/g, "\n"));
const codeBlock = this.type.create({ language }, textNode);
if (insertPos <= tr.doc.content.size) {
tr.insert(insertPos, codeBlock);
view.dispatch(tr);
return true;
}
return false;
} else {
// TODO: complicated paste logic, to be handled later
return false;
}
} catch (error) {
console.error("Error handling paste in CodeBlock extension:", error);
return false;
}
},
},
}),
];
},
});

View File

@ -1,5 +1,3 @@
import CodeBlockLowlight from "@tiptap/extension-code-block-lowlight";
import { common, createLowlight } from "lowlight"; import { common, createLowlight } from "lowlight";
import ts from "highlight.js/lib/languages/typescript"; import ts from "highlight.js/lib/languages/typescript";
@ -7,90 +5,112 @@ const lowlight = createLowlight(common);
lowlight.register("ts", ts); lowlight.register("ts", ts);
import { Selection } from "@tiptap/pm/state"; import { Selection } from "@tiptap/pm/state";
import { ReactNodeViewRenderer } from "@tiptap/react";
import { CodeBlockComponent } from "./code-block-node-view";
import { CodeBlockLowlight } from "./code-block-lowlight";
export const CustomCodeBlockExtension = CodeBlockLowlight.extend({ export const CustomCodeBlockExtension = CodeBlockLowlight.extend({
addNodeView() {
return ReactNodeViewRenderer(CodeBlockComponent);
},
addKeyboardShortcuts() { addKeyboardShortcuts() {
return { return {
Tab: ({ editor }) => { Tab: ({ editor }) => {
const { state } = editor; try {
const { selection } = state; const { state } = editor;
const { $from, empty } = selection; const { selection } = state;
const { $from, empty } = selection;
if (!empty || $from.parent.type !== this.type) { if (!empty || $from.parent.type !== this.type) {
return false;
}
// Use ProseMirror's insertText transaction to insert the tab character
const tr = state.tr.insertText("\t", $from.pos, $from.pos);
editor.view.dispatch(tr);
return true;
} catch (error) {
console.error("Error handling Tab in CustomCodeBlockExtension:", error);
return false; return false;
} }
// Use ProseMirror's insertText transaction to insert the tab character
const tr = state.tr.insertText("\t", $from.pos, $from.pos);
editor.view.dispatch(tr);
return true;
}, },
ArrowUp: ({ editor }) => { ArrowUp: ({ editor }) => {
const { state } = editor; try {
const { selection } = state; const { state } = editor;
const { $from, empty } = selection; const { selection } = state;
const { $from, empty } = selection;
if (!empty || $from.parent.type !== this.type) { if (!empty || $from.parent.type !== this.type) {
return false;
}
const isAtStart = $from.parentOffset === 0;
if (!isAtStart) {
return false;
}
// Check if codeBlock is the first node
const isFirstNode = $from.depth === 1 && $from.index($from.depth - 1) === 0;
if (isFirstNode) {
// Insert a new paragraph at the start of the document and move the cursor to it
return editor.commands.command(({ tr }) => {
const node = editor.schema.nodes.paragraph.create();
tr.insert(0, node);
tr.setSelection(Selection.near(tr.doc.resolve(1)));
return true;
});
}
return false;
} catch (error) {
console.error("Error handling ArrowUp in CustomCodeBlockExtension:", error);
return false; return false;
} }
const isAtStart = $from.parentOffset === 0;
if (!isAtStart) {
return false;
}
// Check if codeBlock is the first node
const isFirstNode = $from.depth === 1 && $from.index($from.depth - 1) === 0;
if (isFirstNode) {
// Insert a new paragraph at the start of the document and move the cursor to it
return editor.commands.command(({ tr }) => {
const node = editor.schema.nodes.paragraph.create();
tr.insert(0, node);
tr.setSelection(Selection.near(tr.doc.resolve(1)));
return true;
});
}
return false;
}, },
ArrowDown: ({ editor }) => { ArrowDown: ({ editor }) => {
if (!this.options.exitOnArrowDown) { try {
if (!this.options.exitOnArrowDown) {
return false;
}
const { state } = editor;
const { selection, doc } = state;
const { $from, empty } = selection;
if (!empty || $from.parent.type !== this.type) {
return false;
}
const isAtEnd = $from.parentOffset === $from.parent.nodeSize - 2;
if (!isAtEnd) {
return false;
}
const after = $from.after();
if (after === undefined) {
return false;
}
const nodeAfter = doc.nodeAt(after);
if (nodeAfter) {
return editor.commands.command(({ tr }) => {
tr.setSelection(Selection.near(doc.resolve(after)));
return true;
});
}
return editor.commands.exitCode();
} catch (error) {
console.error("Error handling ArrowDown in CustomCodeBlockExtension:", error);
return false; return false;
} }
const { state } = editor;
const { selection, doc } = state;
const { $from, empty } = selection;
if (!empty || $from.parent.type !== this.type) {
return false;
}
const isAtEnd = $from.parentOffset === $from.parent.nodeSize - 2;
if (!isAtEnd) {
return false;
}
const after = $from.after();
if (after === undefined) {
return false;
}
const nodeAfter = doc.nodeAt(after);
if (nodeAfter) {
return editor.commands.command(({ tr }) => {
tr.setSelection(Selection.near(doc.resolve(after)));
return true;
});
}
return editor.commands.exitCode();
}, },
}; };
}, },

View File

@ -0,0 +1,153 @@
import { findChildren } from "@tiptap/core";
import { Node as ProsemirrorNode } from "@tiptap/pm/model";
import { Plugin, PluginKey } from "@tiptap/pm/state";
import { Decoration, DecorationSet } from "@tiptap/pm/view";
import highlight from "highlight.js/lib/core";
function parseNodes(nodes: any[], className: string[] = []): { text: string; classes: string[] }[] {
return nodes
.map((node) => {
const classes = [...className, ...(node.properties ? node.properties.className : [])];
if (node.children) {
return parseNodes(node.children, classes);
}
return {
text: node.value,
classes,
};
})
.flat();
}
function getHighlightNodes(result: any) {
// `.value` for lowlight v1, `.children` for lowlight v2
return result.value || result.children || [];
}
function registered(aliasOrLanguage: string) {
return Boolean(highlight.getLanguage(aliasOrLanguage));
}
function getDecorations({
doc,
name,
lowlight,
defaultLanguage,
}: {
doc: ProsemirrorNode;
name: string;
lowlight: any;
defaultLanguage: string | null | undefined;
}) {
const decorations: Decoration[] = [];
findChildren(doc, (node) => node.type.name === name).forEach((block) => {
let from = block.pos + 1;
const language = block.node.attrs.language || defaultLanguage;
const languages = lowlight.listLanguages();
const nodes =
language && (languages.includes(language) || registered(language))
? getHighlightNodes(lowlight.highlight(language, block.node.textContent))
: getHighlightNodes(lowlight.highlightAuto(block.node.textContent));
parseNodes(nodes).forEach((node) => {
const to = from + node.text.length;
if (node.classes.length) {
const decoration = Decoration.inline(from, to, {
class: node.classes.join(" "),
});
decorations.push(decoration);
}
from = to;
});
});
return DecorationSet.create(doc, decorations);
}
function isFunction(param: () => any) {
return typeof param === "function";
}
export function LowlightPlugin({
name,
lowlight,
defaultLanguage,
}: {
name: string;
lowlight: any;
defaultLanguage: string | null | undefined;
}) {
if (!["highlight", "highlightAuto", "listLanguages"].every((api) => isFunction(lowlight[api]))) {
throw Error("You should provide an instance of lowlight to use the code-block-lowlight extension");
}
const lowlightPlugin: Plugin<any> = new Plugin({
key: new PluginKey("lowlight"),
state: {
init: (_, { doc }) =>
getDecorations({
doc,
name,
lowlight,
defaultLanguage,
}),
apply: (transaction, decorationSet, oldState, newState) => {
const oldNodeName = oldState.selection.$head.parent.type.name;
const newNodeName = newState.selection.$head.parent.type.name;
const oldNodes = findChildren(oldState.doc, (node) => node.type.name === name);
const newNodes = findChildren(newState.doc, (node) => node.type.name === name);
if (
transaction.docChanged &&
// Apply decorations if:
// selection includes named node,
([oldNodeName, newNodeName].includes(name) ||
// OR transaction adds/removes named node,
newNodes.length !== oldNodes.length ||
// OR transaction has changes that completely encapsulte a node
// (for example, a transaction that affects the entire document).
// Such transactions can happen during collab syncing via y-prosemirror, for example.
transaction.steps.some(
(step) =>
// @ts-ignore
step.from !== undefined &&
// @ts-ignore
step.to !== undefined &&
oldNodes.some(
(node) =>
// @ts-ignore
node.pos >= step.from &&
// @ts-ignore
node.pos + node.node.nodeSize <= step.to
)
))
) {
return getDecorations({
doc: transaction.doc,
name,
lowlight,
defaultLanguage,
});
}
return decorationSet.map(transaction.mapping, transaction.doc);
},
},
props: {
decorations(state) {
return lowlightPlugin.getState(state);
},
},
});
return lowlightPlugin;
}

View File

@ -0,0 +1,9 @@
import { Extension } from "@tiptap/core";
import codemark from "prosemirror-codemark";
export const CustomCodeMarkPlugin = Extension.create({
name: "codemarkPlugin",
addProseMirrorPlugins() {
return codemark({ markType: this.editor.schema.marks.code });
},
});

View File

@ -0,0 +1,363 @@
import { EditorState } from "@tiptap/pm/state";
import { Editor, getNodeType, getNodeAtPosition, isAtEndOfNode, isAtStartOfNode, isNodeActive } from "@tiptap/core";
import { Node, NodeType } from "@tiptap/pm/model";
const findListItemPos = (typeOrName: string | NodeType, state: EditorState) => {
const { $from } = state.selection;
const nodeType = getNodeType(typeOrName, state.schema);
let currentNode = null;
let currentDepth = $from.depth;
let currentPos = $from.pos;
let targetDepth: number | null = null;
while (currentDepth > 0 && targetDepth === null) {
currentNode = $from.node(currentDepth);
if (currentNode.type === nodeType) {
targetDepth = currentDepth;
} else {
currentDepth -= 1;
currentPos -= 1;
}
}
if (targetDepth === null) {
return null;
}
return { $pos: state.doc.resolve(currentPos), depth: targetDepth };
};
const nextListIsDeeper = (typeOrName: string, state: EditorState) => {
const listDepth = getNextListDepth(typeOrName, state);
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos || !listDepth) {
return false;
}
if (listDepth > listItemPos.depth) {
return true;
}
return false;
};
const getNextListDepth = (typeOrName: string, state: EditorState) => {
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos) {
return false;
}
const [, depth] = getNodeAtPosition(state, typeOrName, listItemPos.$pos.pos + 4);
return depth;
};
const getPrevListDepth = (typeOrName: string, state: EditorState) => {
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos) {
return false;
}
let depth = 0;
const pos = listItemPos.$pos;
// Adjust the position to ensure we're within the list item, especially for edge cases
const resolvedPos = state.doc.resolve(Math.max(pos.pos - 1, 0));
// Traverse up the document structure from the adjusted position
for (let d = resolvedPos.depth; d > 0; d--) {
const node = resolvedPos.node(d);
if (node.type.name === "bulletList" || node.type.name === "orderedList") {
// Increment depth for each list ancestor found
depth++;
}
}
// Subtract 1 from the calculated depth to get the parent list's depth
// This adjustment is necessary because the depth calculation includes the current list
// By subtracting 1, we aim to get the depth of the parent list, which helps in identifying if the current list is a sublist
depth = depth > 0 ? depth - 1 : 0;
// Double the depth value to get results as 2, 4, 6, 8, etc.
depth = depth * 2;
return depth;
};
export const handleBackspace = (editor: Editor, name: string, parentListTypes: string[]) => {
// this is required to still handle the undo handling
if (editor.commands.undoInputRule()) {
return true;
}
// Check if a node range is selected, and if so, fall back to default backspace functionality
const { from, to } = editor.state.selection;
if (from !== to) {
// A range is selected, not just a cursor position; fall back to default behavior
return false; // Let the editor handle backspace by default
}
// if the current item is NOT inside a list item &
// the previous item is a list (orderedList or bulletList)
// move the cursor into the list and delete the current item
if (!isNodeActive(editor.state, name) && hasListBefore(editor.state, name, parentListTypes)) {
const { $anchor } = editor.state.selection;
const $listPos = editor.state.doc.resolve($anchor.before() - 1);
const listDescendants: Array<{ node: Node; pos: number }> = [];
$listPos.node().descendants((node, pos) => {
if (node.type.name === name) {
listDescendants.push({ node, pos });
}
});
const lastItem = listDescendants.at(-1);
if (!lastItem) {
return false;
}
const $lastItemPos = editor.state.doc.resolve($listPos.start() + lastItem.pos + 1);
// Check if positions are within the valid range
const startPos = $anchor.start() - 1;
const endPos = $anchor.end() + 1;
if (startPos < 0 || endPos > editor.state.doc.content.size) {
return false; // Invalid position, abort operation
}
return editor.chain().cut({ from: startPos, to: endPos }, $lastItemPos.end()).joinForward().run();
}
// if the cursor is not inside the current node type
// do nothing and proceed
if (!isNodeActive(editor.state, name)) {
return false;
}
// if the cursor is not at the start of a node
// do nothing and proceed
if (!isAtStartOfNode(editor.state)) {
return false;
}
const isParaSibling = isCurrentParagraphASibling(editor.state);
const isCurrentListItemSublist = prevListIsHigher(name, editor.state);
const listItemPos = findListItemPos(name, editor.state);
const nextListItemIsSibling = nextListIsSibling(name, editor.state);
if (!listItemPos) {
return false;
}
const currentNode = listItemPos.$pos.node(listItemPos.depth);
const currentListItemHasSubList = listItemHasSubList(name, editor.state, currentNode);
if (currentListItemHasSubList && isCurrentListItemSublist && isParaSibling) {
return false;
}
if (currentListItemHasSubList && isCurrentListItemSublist) {
editor.chain().liftListItem(name).run();
return editor.commands.joinItemBackward();
}
if (isCurrentListItemSublist && nextListItemIsSibling) {
return false;
}
if (isCurrentListItemSublist) {
return false;
}
if (currentListItemHasSubList) {
return false;
}
if (hasListItemBefore(name, editor.state)) {
return editor.chain().liftListItem(name).run();
}
if (!currentListItemHasSubList) {
return false;
}
// otherwise in the end, a backspace should
// always just lift the list item if
// joining / merging is not possible
return editor.chain().liftListItem(name).run();
};
export const handleDelete = (editor: Editor, name: string) => {
// if the cursor is not inside the current node type
// do nothing and proceed
if (!isNodeActive(editor.state, name)) {
return false;
}
// if the cursor is not at the end of a node
// do nothing and proceed
if (!isAtEndOfNode(editor.state, name)) {
return false;
}
// check if the next node is a list with a deeper depth
if (nextListIsDeeper(name, editor.state)) {
return editor
.chain()
.focus(editor.state.selection.from + 4)
.lift(name)
.joinBackward()
.run();
}
if (nextListIsHigher(name, editor.state)) {
return editor.chain().joinForward().joinBackward().run();
}
return editor.commands.joinItemForward();
};
const hasListBefore = (editorState: EditorState, name: string, parentListTypes: string[]) => {
const { $anchor } = editorState.selection;
const previousNodePos = Math.max(0, $anchor.pos - 2);
const previousNode = editorState.doc.resolve(previousNodePos).node();
if (!previousNode || !parentListTypes.includes(previousNode.type.name)) {
return false;
}
return true;
};
const prevListIsHigher = (typeOrName: string, state: EditorState) => {
const listDepth = getPrevListDepth(typeOrName, state);
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos || !listDepth) {
return false;
}
if (listDepth < listItemPos.depth) {
return true;
}
return false;
};
const nextListIsSibling = (typeOrName: string, state: EditorState) => {
const listDepth = getNextListDepth(typeOrName, state);
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos || !listDepth) {
return false;
}
if (listDepth === listItemPos.depth) {
return true;
}
return false;
};
export const nextListIsHigher = (typeOrName: string, state: EditorState) => {
const listDepth = getNextListDepth(typeOrName, state);
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos || !listDepth) {
return false;
}
if (listDepth < listItemPos.depth) {
return true;
}
return false;
};
const listItemHasSubList = (typeOrName: string, state: EditorState, node?: Node) => {
if (!node) {
return false;
}
const nodeType = getNodeType(typeOrName, state.schema);
let hasSubList = false;
node.descendants((child) => {
if (child.type === nodeType) {
hasSubList = true;
}
});
return hasSubList;
};
const isCurrentParagraphASibling = (state: EditorState): boolean => {
const { $from } = state.selection;
const listItemNode = $from.node(-1); // Get the parent node of the current selection, assuming it's a list item.
const currentParagraphNode = $from.parent; // Get the current node where the selection is.
// Ensure we're in a paragraph and the parent is a list item.
if (currentParagraphNode.type.name === "paragraph" && listItemNode.type.name === "listItem") {
let paragraphNodesCount = 0;
listItemNode.forEach((child) => {
if (child.type.name === "paragraph") {
paragraphNodesCount++;
}
});
// If there are more than one paragraph nodes, the current paragraph is a sibling.
return paragraphNodesCount > 1;
}
return false;
};
export function isCursorInSubList(editor: Editor) {
const { selection } = editor.state;
const { $from } = selection;
// Check if the current node is a list item
const listItem = editor.schema.nodes.listItem;
// Traverse up the document tree from the current position
for (let depth = $from.depth; depth > 0; depth--) {
const node = $from.node(depth);
if (node.type === listItem) {
// If the parent of the list item is also a list, it's a sub-list
const parent = $from.node(depth - 1);
if (
parent &&
(parent.type === editor.schema.nodes.bulletList || parent.type === editor.schema.nodes.orderedList)
) {
return true;
}
}
}
return false;
}
const hasListItemBefore = (typeOrName: string, state: EditorState): boolean => {
const { $anchor } = state.selection;
const $targetPos = state.doc.resolve($anchor.pos - 2);
if ($targetPos.index() === 0) {
return false;
}
if ($targetPos.nodeBefore?.type.name !== typeOrName) {
return false;
}
return true;
};

View File

@ -1,30 +0,0 @@
import { getNodeType } from "@tiptap/core";
import { NodeType } from "@tiptap/pm/model";
import { EditorState } from "@tiptap/pm/state";
export const findListItemPos = (typeOrName: string | NodeType, state: EditorState) => {
const { $from } = state.selection;
const nodeType = getNodeType(typeOrName, state.schema);
let currentNode = null;
let currentDepth = $from.depth;
let currentPos = $from.pos;
let targetDepth: number | null = null;
while (currentDepth > 0 && targetDepth === null) {
currentNode = $from.node(currentDepth);
if (currentNode.type === nodeType) {
targetDepth = currentDepth;
} else {
currentDepth -= 1;
currentPos -= 1;
}
}
if (targetDepth === null) {
return null;
}
return { $pos: state.doc.resolve(currentPos), depth: targetDepth };
};

View File

@ -1,16 +0,0 @@
import { getNodeAtPosition } from "@tiptap/core";
import { EditorState } from "@tiptap/pm/state";
import { findListItemPos } from "src/ui/extensions/custom-list-keymap/list-helpers/find-list-item-pos";
export const getNextListDepth = (typeOrName: string, state: EditorState) => {
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos) {
return false;
}
const [, depth] = getNodeAtPosition(state, typeOrName, listItemPos.$pos.pos + 4);
return depth;
};

View File

@ -1,66 +0,0 @@
import { Editor, isAtStartOfNode, isNodeActive } from "@tiptap/core";
import { Node } from "@tiptap/pm/model";
import { findListItemPos } from "src/ui/extensions/custom-list-keymap/list-helpers/find-list-item-pos";
import { hasListBefore } from "src/ui/extensions/custom-list-keymap/list-helpers/has-list-before";
export const handleBackspace = (editor: Editor, name: string, parentListTypes: string[]) => {
// this is required to still handle the undo handling
if (editor.commands.undoInputRule()) {
return true;
}
// if the cursor is not at the start of a node
// do nothing and proceed
if (!isAtStartOfNode(editor.state)) {
return false;
}
// if the current item is NOT inside a list item &
// the previous item is a list (orderedList or bulletList)
// move the cursor into the list and delete the current item
if (!isNodeActive(editor.state, name) && hasListBefore(editor.state, name, parentListTypes)) {
const { $anchor } = editor.state.selection;
const $listPos = editor.state.doc.resolve($anchor.before() - 1);
const listDescendants: Array<{ node: Node; pos: number }> = [];
$listPos.node().descendants((node, pos) => {
if (node.type.name === name) {
listDescendants.push({ node, pos });
}
});
const lastItem = listDescendants.at(-1);
if (!lastItem) {
return false;
}
const $lastItemPos = editor.state.doc.resolve($listPos.start() + lastItem.pos + 1);
return editor
.chain()
.cut({ from: $anchor.start() - 1, to: $anchor.end() + 1 }, $lastItemPos.end())
.joinForward()
.run();
}
// if the cursor is not inside the current node type
// do nothing and proceed
if (!isNodeActive(editor.state, name)) {
return false;
}
const listItemPos = findListItemPos(name, editor.state);
if (!listItemPos) {
return false;
}
// if current node is a list item and cursor it at start of a list node,
// simply lift the list item i.e. remove it as a list item (task/bullet/ordered)
// irrespective of above node being a list or not
return editor.chain().liftListItem(name).run();
};

View File

@ -1,34 +0,0 @@
import { Editor, isAtEndOfNode, isNodeActive } from "@tiptap/core";
import { nextListIsDeeper } from "src/ui/extensions/custom-list-keymap/list-helpers/next-list-is-deeper";
import { nextListIsHigher } from "src/ui/extensions/custom-list-keymap/list-helpers/next-list-is-higher";
export const handleDelete = (editor: Editor, name: string) => {
// if the cursor is not inside the current node type
// do nothing and proceed
if (!isNodeActive(editor.state, name)) {
return false;
}
// if the cursor is not at the end of a node
// do nothing and proceed
if (!isAtEndOfNode(editor.state, name)) {
return false;
}
// check if the next node is a list with a deeper depth
if (nextListIsDeeper(name, editor.state)) {
return editor
.chain()
.focus(editor.state.selection.from + 4)
.lift(name)
.joinBackward()
.run();
}
if (nextListIsHigher(name, editor.state)) {
return editor.chain().joinForward().joinBackward().run();
}
return editor.commands.joinItemForward();
};

View File

@ -1,15 +0,0 @@
import { EditorState } from "@tiptap/pm/state";
export const hasListBefore = (editorState: EditorState, name: string, parentListTypes: string[]) => {
const { $anchor } = editorState.selection;
const previousNodePos = Math.max(0, $anchor.pos - 2);
const previousNode = editorState.doc.resolve(previousNodePos).node();
if (!previousNode || !parentListTypes.includes(previousNode.type.name)) {
return false;
}
return true;
};

View File

@ -1,17 +0,0 @@
import { EditorState } from "@tiptap/pm/state";
export const hasListItemAfter = (typeOrName: string, state: EditorState): boolean => {
const { $anchor } = state.selection;
const $targetPos = state.doc.resolve($anchor.pos - $anchor.parentOffset - 2);
if ($targetPos.index() === $targetPos.parent.childCount - 1) {
return false;
}
if ($targetPos.nodeAfter?.type.name !== typeOrName) {
return false;
}
return true;
};

View File

@ -1,17 +0,0 @@
import { EditorState } from "@tiptap/pm/state";
export const hasListItemBefore = (typeOrName: string, state: EditorState): boolean => {
const { $anchor } = state.selection;
const $targetPos = state.doc.resolve($anchor.pos - 2);
if ($targetPos.index() === 0) {
return false;
}
if ($targetPos.nodeBefore?.type.name !== typeOrName) {
return false;
}
return true;
};

View File

@ -1,9 +0,0 @@
export * from "./find-list-item-pos";
export * from "./get-next-list-depth";
export * from "./handle-backspace";
export * from "./handle-delete";
export * from "./has-list-before";
export * from "./has-list-item-after";
export * from "./has-list-item-before";
export * from "./next-list-is-deeper";
export * from "./next-list-is-higher";

View File

@ -1,19 +0,0 @@
import { EditorState } from "@tiptap/pm/state";
import { findListItemPos } from "src/ui/extensions/custom-list-keymap/list-helpers/find-list-item-pos";
import { getNextListDepth } from "src/ui/extensions/custom-list-keymap/list-helpers/get-next-list-depth";
export const nextListIsDeeper = (typeOrName: string, state: EditorState) => {
const listDepth = getNextListDepth(typeOrName, state);
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos || !listDepth) {
return false;
}
if (listDepth > listItemPos.depth) {
return true;
}
return false;
};

View File

@ -1,19 +0,0 @@
import { EditorState } from "@tiptap/pm/state";
import { findListItemPos } from "src/ui/extensions/custom-list-keymap/list-helpers/find-list-item-pos";
import { getNextListDepth } from "src/ui/extensions/custom-list-keymap/list-helpers/get-next-list-depth";
export const nextListIsHigher = (typeOrName: string, state: EditorState) => {
const listDepth = getNextListDepth(typeOrName, state);
const listItemPos = findListItemPos(typeOrName, state);
if (!listItemPos || !listDepth) {
return false;
}
if (listDepth < listItemPos.depth) {
return true;
}
return false;
};

View File

@ -9,86 +9,120 @@ export type ListKeymapOptions = {
}>; }>;
}; };
export const ListKeymap = Extension.create<ListKeymapOptions>({ export const ListKeymap = ({ tabIndex }: { tabIndex?: number }) =>
name: "listKeymap", Extension.create<ListKeymapOptions>({
name: "listKeymap",
addOptions() { addOptions() {
return { return {
listTypes: [ listTypes: [
{ {
itemName: "listItem", itemName: "listItem",
wrapperNames: ["bulletList", "orderedList"], wrapperNames: ["bulletList", "orderedList"],
},
{
itemName: "taskItem",
wrapperNames: ["taskList"],
},
],
};
},
addKeyboardShortcuts() {
return {
Tab: () => {
if (this.editor.isActive("listItem") || this.editor.isActive("taskItem")) {
if (this.editor.commands.sinkListItem("listItem")) {
return true;
} else if (this.editor.commands.sinkListItem("taskItem")) {
return true;
}
return true;
}
// if tabIndex is set, we don't want to handle Tab key
if (tabIndex !== undefined && tabIndex !== null) {
return false;
}
return true;
}, },
{ "Shift-Tab": () => {
itemName: "taskItem", if (this.editor.commands.liftListItem("listItem")) {
wrapperNames: ["taskList"], return true;
} else if (this.editor.commands.liftListItem("taskItem")) {
return true;
}
return true;
}, },
], Delete: ({ editor }) => {
}; try {
}, let handled = false;
addKeyboardShortcuts() { this.options.listTypes.forEach(({ itemName }) => {
return { if (editor.state.schema.nodes[itemName] === undefined) {
Delete: ({ editor }) => { return;
let handled = false; }
this.options.listTypes.forEach(({ itemName }) => { if (handleDelete(editor, itemName)) {
if (editor.state.schema.nodes[itemName] === undefined) { handled = true;
return; }
});
return handled;
} catch (e) {
console.log("error in handling Backspac:", e);
return false;
} }
},
"Mod-Delete": ({ editor }) => {
let handled = false;
if (handleDelete(editor, itemName)) { this.options.listTypes.forEach(({ itemName }) => {
handled = true; if (editor.state.schema.nodes[itemName] === undefined) {
return;
}
if (handleDelete(editor, itemName)) {
handled = true;
}
});
return handled;
},
Backspace: ({ editor }) => {
try {
let handled = false;
this.options.listTypes.forEach(({ itemName, wrapperNames }) => {
if (editor.state.schema.nodes[itemName] === undefined) {
return;
}
if (handleBackspace(editor, itemName, wrapperNames)) {
handled = true;
}
});
return handled;
} catch (e) {
console.log("error in handling Backspac:", e);
return false;
} }
}); },
"Mod-Backspace": ({ editor }) => {
let handled = false;
return handled; this.options.listTypes.forEach(({ itemName, wrapperNames }) => {
}, if (editor.state.schema.nodes[itemName] === undefined) {
"Mod-Delete": ({ editor }) => { return;
let handled = false; }
this.options.listTypes.forEach(({ itemName }) => { if (handleBackspace(editor, itemName, wrapperNames)) {
if (editor.state.schema.nodes[itemName] === undefined) { handled = true;
return; }
} });
if (handleDelete(editor, itemName)) { return handled;
handled = true; },
} };
}); },
});
return handled;
},
Backspace: ({ editor }) => {
let handled = false;
this.options.listTypes.forEach(({ itemName, wrapperNames }) => {
if (editor.state.schema.nodes[itemName] === undefined) {
return;
}
if (handleBackspace(editor, itemName, wrapperNames)) {
handled = true;
}
});
return handled;
},
"Mod-Backspace": ({ editor }) => {
let handled = false;
this.options.listTypes.forEach(({ itemName, wrapperNames }) => {
if (editor.state.schema.nodes[itemName] === undefined) {
return;
}
if (handleBackspace(editor, itemName, wrapperNames)) {
handled = true;
}
});
return handled;
},
};
},
});

View File

@ -0,0 +1,45 @@
import { Extension } from "@tiptap/core";
import { Plugin, PluginKey } from "prosemirror-state";
import { UploadImage } from "src/types/upload-image";
import { startImageUpload } from "../plugins/upload-image";
export const DropHandlerExtension = (uploadFile: UploadImage) =>
Extension.create({
name: "dropHandler",
priority: 1000,
addProseMirrorPlugins() {
return [
new Plugin({
key: new PluginKey("drop-handler-plugin"),
props: {
handlePaste: (view, event) => {
if (event.clipboardData && event.clipboardData.files && event.clipboardData.files[0]) {
event.preventDefault();
const file = event.clipboardData.files[0];
const pos = view.state.selection.from;
startImageUpload(this.editor, file, view, pos, uploadFile);
return true;
}
return false;
},
handleDrop: (view, event, _slice, moved) => {
if (!moved && event.dataTransfer && event.dataTransfer.files && event.dataTransfer.files[0]) {
event.preventDefault();
const file = event.dataTransfer.files[0];
const coordinates = view.posAtCoords({
left: event.clientX,
top: event.clientY,
});
if (coordinates) {
startImageUpload(this.editor, file, view, coordinates.pos - 1, uploadFile);
}
return true;
}
return false;
},
},
}),
];
},
});

View File

@ -28,11 +28,22 @@ export const CustomHorizontalRule = Node.create<HorizontalRuleOptions>({
group: "block", group: "block",
parseHTML() { parseHTML() {
return [{ tag: "hr" }]; return [
{
tag: `div[data-type="${this.name}"]`,
},
{ tag: "hr" },
];
}, },
renderHTML({ HTMLAttributes }) { renderHTML({ HTMLAttributes }) {
return ["hr", mergeAttributes(this.options.HTMLAttributes, HTMLAttributes)]; return [
"div",
mergeAttributes(this.options.HTMLAttributes, HTMLAttributes, {
"data-type": this.name,
}),
["div", {}],
];
}, },
addCommands() { addCommands() {

Some files were not shown because too many files have changed in this diff Show More