diff --git a/.env.example b/.env.example index 66237fd6b..1d95c56a0 100644 --- a/.env.example +++ b/.env.example @@ -21,6 +21,8 @@ NEXT_PUBLIC_TRACK_EVENTS=0 NEXT_PUBLIC_SLACK_CLIENT_ID="" # For Telemetry, set it to "app.plane.so" NEXT_PUBLIC_PLAUSIBLE_DOMAIN="" +# public boards deploy url +NEXT_PUBLIC_DEPLOY_URL="" # Backend # Debug value for api server use it as 0 for production use diff --git a/.gitignore b/.gitignore index 921881df4..1e99e102a 100644 --- a/.gitignore +++ b/.gitignore @@ -70,4 +70,6 @@ package-lock.json # lock files package-lock.json pnpm-lock.yaml -pnpm-workspace.yaml \ No newline at end of file +pnpm-workspace.yaml + +.npmrc diff --git a/Dockerfile b/Dockerfile index 5cff15dc5..388c5a4ef 100644 --- a/Dockerfile +++ b/Dockerfile @@ -5,9 +5,11 @@ WORKDIR /app ENV NEXT_PUBLIC_API_BASE_URL=http://NEXT_PUBLIC_API_BASE_URL_PLACEHOLDER RUN yarn global add turbo +RUN apk add tree COPY . . -RUN turbo prune --scope=app --docker +RUN turbo prune --scope=app --scope=plane-deploy --docker +CMD tree -I node_modules/ # Add lockfile and package.json's of isolated subworkspace FROM node:18-alpine AS installer @@ -21,14 +23,14 @@ COPY --from=builder /app/out/json/ . COPY --from=builder /app/out/yarn.lock ./yarn.lock RUN yarn install -# Build the project +# # Build the project COPY --from=builder /app/out/full/ . COPY turbo.json turbo.json COPY replace-env-vars.sh /usr/local/bin/ USER root RUN chmod +x /usr/local/bin/replace-env-vars.sh -RUN yarn turbo run build --filter=app +RUN yarn turbo run build ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL @@ -96,11 +98,16 @@ RUN adduser --system --uid 1001 captain COPY --from=installer /app/apps/app/next.config.js . COPY --from=installer /app/apps/app/package.json . +COPY --from=installer /app/apps/space/next.config.js . +COPY --from=installer /app/apps/space/package.json . COPY --from=installer --chown=captain:plane /app/apps/app/.next/standalone ./ COPY --from=installer --chown=captain:plane /app/apps/app/.next/static ./apps/app/.next/static +COPY --from=installer --chown=captain:plane /app/apps/space/.next/standalone ./ +COPY --from=installer --chown=captain:plane /app/apps/space/.next ./apps/space/.next + ENV NEXT_TELEMETRY_DISABLED 1 # RUN rm /etc/nginx/conf.d/default.conf diff --git a/README.md b/README.md index 20e34b673..2bc2764f3 100644 --- a/README.md +++ b/README.md @@ -61,6 +61,16 @@ chmod +x setup.sh > If running in a cloud env replace localhost with public facing IP address of the VM +- Setup Tiptap Pro + + Visit [Tiptap Pro](https://collab.tiptap.dev/pro-extensions) and signup (it is free). + + Create a **`.npmrc`** file, copy the following and replace your registry token generated from Tiptap Pro. + +``` +@tiptap-pro:registry=https://registry.tiptap.dev/ +//registry.tiptap.dev/:_authToken=YOUR_REGISTRY_TOKEN +``` - Run Docker compose up ```bash diff --git a/apiserver/bin/user_script.py b/apiserver/bin/user_script.py index b554d2c40..e115b20b8 100644 --- a/apiserver/bin/user_script.py +++ b/apiserver/bin/user_script.py @@ -1,4 +1,4 @@ -import os, sys +import os, sys, random, string import uuid sys.path.append("/code") @@ -19,9 +19,9 @@ def populate(): user = User.objects.create(email=default_email, username=uuid.uuid4().hex) user.set_password(default_password) user.save() - print("User created") - - print("Success") + print(f"User created with an email: {default_email}") + else: + print(f"User already exists with the default email: {default_email}") if __name__ == "__main__": diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index 2d38b1139..5855f0413 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -1,10 +1,5 @@ from .base import BaseSerializer -from .people import ( - ChangePasswordSerializer, - ResetPasswordSerializer, - TokenSerializer, -) -from .user import UserSerializer, UserLiteSerializer +from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer from .workspace import ( WorkSpaceSerializer, WorkSpaceMemberSerializer, @@ -12,6 +7,7 @@ from .workspace import ( WorkSpaceMemberInviteSerializer, WorkspaceLiteSerializer, WorkspaceThemeSerializer, + WorkspaceMemberAdminSerializer, ) from .project import ( ProjectSerializer, @@ -22,6 +18,8 @@ from .project import ( ProjectFavoriteSerializer, ProjectLiteSerializer, ProjectMemberLiteSerializer, + ProjectDeployBoardSerializer, + ProjectMemberAdminSerializer, ) from .state import StateSerializer, StateLiteSerializer from .view import IssueViewSerializer, IssueViewFavoriteSerializer @@ -45,6 +43,7 @@ from .issue import ( IssueSubscriberSerializer, IssueReactionSerializer, CommentReactionSerializer, + IssueVoteSerializer, ) from .module import ( @@ -82,3 +81,5 @@ from .inbox import InboxSerializer, InboxIssueSerializer, IssueStateInboxSeriali from .analytic import AnalyticViewSerializer from .notification import NotificationSerializer + +from .exporter import ExporterHistorySerializer diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py index 5b7bb7598..1abd63b7f 100644 --- a/apiserver/plane/api/serializers/cycle.py +++ b/apiserver/plane/api/serializers/cycle.py @@ -41,6 +41,7 @@ class CycleSerializer(BaseSerializer): { "avatar": assignee.avatar, "first_name": assignee.first_name, + "display_name": assignee.display_name, "id": assignee.id, } for issue_cycle in obj.issue_cycle.all() diff --git a/apiserver/plane/api/serializers/exporter.py b/apiserver/plane/api/serializers/exporter.py new file mode 100644 index 000000000..5c78cfa69 --- /dev/null +++ b/apiserver/plane/api/serializers/exporter.py @@ -0,0 +1,26 @@ +# Module imports +from .base import BaseSerializer +from plane.db.models import ExporterHistory +from .user import UserLiteSerializer + + +class ExporterHistorySerializer(BaseSerializer): + initiated_by_detail = UserLiteSerializer(source="initiated_by", read_only=True) + + class Meta: + model = ExporterHistory + fields = [ + "id", + "created_at", + "updated_at", + "project", + "provider", + "status", + "url", + "initiated_by", + "initiated_by_detail", + "token", + "created_by", + "updated_by", + ] + read_only_fields = fields diff --git a/apiserver/plane/api/serializers/issue.py b/apiserver/plane/api/serializers/issue.py index 770880ef0..64ee2b8f7 100644 --- a/apiserver/plane/api/serializers/issue.py +++ b/apiserver/plane/api/serializers/issue.py @@ -31,6 +31,7 @@ from plane.db.models import ( IssueAttachment, IssueReaction, CommentReaction, + IssueVote, ) @@ -111,6 +112,11 @@ class IssueCreateSerializer(BaseSerializer): "updated_at", ] + def validate(self, data): + if data.get("start_date", None) is not None and data.get("target_date", None) is not None and data.get("start_date", None) > data.get("target_date", None): + raise serializers.ValidationError("Start date cannot exceed target date") + return data + def create(self, validated_data): blockers = validated_data.pop("blockers_list", None) assignees = validated_data.pop("assignees_list", None) @@ -549,6 +555,14 @@ class CommentReactionSerializer(BaseSerializer): +class IssueVoteSerializer(BaseSerializer): + + class Meta: + model = IssueVote + fields = ["issue", "vote", "workspace_id", "project_id", "actor"] + read_only_fields = fields + + class IssueCommentSerializer(BaseSerializer): actor_detail = UserLiteSerializer(read_only=True, source="actor") issue_detail = IssueFlatSerializer(read_only=True, source="issue") @@ -568,6 +582,7 @@ class IssueCommentSerializer(BaseSerializer): "updated_by", "created_at", "updated_at", + "access", ] diff --git a/apiserver/plane/api/serializers/people.py b/apiserver/plane/api/serializers/people.py deleted file mode 100644 index b8b59416c..000000000 --- a/apiserver/plane/api/serializers/people.py +++ /dev/null @@ -1,57 +0,0 @@ -from rest_framework.serializers import ( - ModelSerializer, - Serializer, - CharField, - SerializerMethodField, -) -from rest_framework.authtoken.models import Token -from rest_framework_simplejwt.tokens import RefreshToken - - -from plane.db.models import User - - -class UserSerializer(ModelSerializer): - class Meta: - model = User - fields = "__all__" - extra_kwargs = {"password": {"write_only": True}} - - -class ChangePasswordSerializer(Serializer): - model = User - - """ - Serializer for password change endpoint. - """ - old_password = CharField(required=True) - new_password = CharField(required=True) - - -class ResetPasswordSerializer(Serializer): - model = User - - """ - Serializer for password change endpoint. - """ - new_password = CharField(required=True) - confirm_password = CharField(required=True) - - -class TokenSerializer(ModelSerializer): - - user = UserSerializer() - access_token = SerializerMethodField() - refresh_token = SerializerMethodField() - - def get_access_token(self, obj): - refresh_token = RefreshToken.for_user(obj.user) - return str(refresh_token.access_token) - - def get_refresh_token(self, obj): - refresh_token = RefreshToken.for_user(obj.user) - return str(refresh_token) - - class Meta: - model = Token - fields = "__all__" diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index fa97c5a6d..1f7a973c1 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -7,13 +7,14 @@ from rest_framework import serializers # Module imports from .base import BaseSerializer from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer -from plane.api.serializers.user import UserLiteSerializer +from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer from plane.db.models import ( Project, ProjectMember, ProjectMemberInvite, ProjectIdentifier, ProjectFavorite, + ProjectDeployBoard, ) @@ -80,7 +81,15 @@ class ProjectSerializer(BaseSerializer): class ProjectLiteSerializer(BaseSerializer): class Meta: model = Project - fields = ["id", "identifier", "name"] + fields = [ + "id", + "identifier", + "name", + "cover_image", + "icon_prop", + "emoji", + "description", + ] read_only_fields = fields @@ -94,6 +103,8 @@ class ProjectDetailSerializer(BaseSerializer): total_modules = serializers.IntegerField(read_only=True) is_member = serializers.BooleanField(read_only=True) sort_order = serializers.FloatField(read_only=True) + member_role = serializers.IntegerField(read_only=True) + is_deployed = serializers.BooleanField(read_only=True) class Meta: model = Project @@ -110,6 +121,16 @@ class ProjectMemberSerializer(BaseSerializer): fields = "__all__" +class ProjectMemberAdminSerializer(BaseSerializer): + workspace = WorkspaceLiteSerializer(read_only=True) + project = ProjectLiteSerializer(read_only=True) + member = UserAdminLiteSerializer(read_only=True) + + class Meta: + model = ProjectMember + fields = "__all__" + + class ProjectMemberInviteSerializer(BaseSerializer): project = ProjectLiteSerializer(read_only=True) workspace = WorkspaceLiteSerializer(read_only=True) @@ -137,8 +158,6 @@ class ProjectFavoriteSerializer(BaseSerializer): ] - - class ProjectMemberLiteSerializer(BaseSerializer): member = UserLiteSerializer(read_only=True) is_subscribed = serializers.BooleanField(read_only=True) @@ -147,3 +166,16 @@ class ProjectMemberLiteSerializer(BaseSerializer): model = ProjectMember fields = ["member", "id", "is_subscribed"] read_only_fields = fields + + +class ProjectDeployBoardSerializer(BaseSerializer): + project_details = ProjectLiteSerializer(read_only=True, source="project") + workspace_detail = WorkspaceLiteSerializer(read_only=True, source="workspace") + + class Meta: + model = ProjectDeployBoard + fields = "__all__" + read_only_fields = [ + "workspace", + "project" "anchor", + ] diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py index d8978479e..dcb00c6cb 100644 --- a/apiserver/plane/api/serializers/user.py +++ b/apiserver/plane/api/serializers/user.py @@ -1,3 +1,6 @@ +# Third party imports +from rest_framework import serializers + # Module import from .base import BaseSerializer from plane.db.models import User @@ -37,11 +40,50 @@ class UserLiteSerializer(BaseSerializer): "id", "first_name", "last_name", - "email", "avatar", "is_bot", + "display_name", ] read_only_fields = [ "id", "is_bot", ] + + +class UserAdminLiteSerializer(BaseSerializer): + + class Meta: + model = User + fields = [ + "id", + "first_name", + "last_name", + "avatar", + "is_bot", + "display_name", + "email", + ] + read_only_fields = [ + "id", + "is_bot", + ] + + +class ChangePasswordSerializer(serializers.Serializer): + model = User + + """ + Serializer for password change endpoint. + """ + old_password = serializers.CharField(required=True) + new_password = serializers.CharField(required=True) + + +class ResetPasswordSerializer(serializers.Serializer): + model = User + + """ + Serializer for password change endpoint. + """ + new_password = serializers.CharField(required=True) + confirm_password = serializers.CharField(required=True) diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py index 4d83d6262..d27b66481 100644 --- a/apiserver/plane/api/serializers/workspace.py +++ b/apiserver/plane/api/serializers/workspace.py @@ -3,7 +3,7 @@ from rest_framework import serializers # Module imports from .base import BaseSerializer -from .user import UserLiteSerializer +from .user import UserLiteSerializer, UserAdminLiteSerializer from plane.db.models import ( User, @@ -33,10 +33,30 @@ class WorkSpaceSerializer(BaseSerializer): "owner", ] +class WorkspaceLiteSerializer(BaseSerializer): + class Meta: + model = Workspace + fields = [ + "name", + "slug", + "id", + ] + read_only_fields = fields + + class WorkSpaceMemberSerializer(BaseSerializer): member = UserLiteSerializer(read_only=True) - workspace = WorkSpaceSerializer(read_only=True) + workspace = WorkspaceLiteSerializer(read_only=True) + + class Meta: + model = WorkspaceMember + fields = "__all__" + + +class WorkspaceMemberAdminSerializer(BaseSerializer): + member = UserAdminLiteSerializer(read_only=True) + workspace = WorkspaceLiteSerializer(read_only=True) class Meta: model = WorkspaceMember @@ -101,17 +121,6 @@ class TeamSerializer(BaseSerializer): return super().update(instance, validated_data) -class WorkspaceLiteSerializer(BaseSerializer): - class Meta: - model = Workspace - fields = [ - "name", - "slug", - "id", - ] - read_only_fields = fields - - class WorkspaceThemeSerializer(BaseSerializer): class Meta: model = WorkspaceTheme diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls.py index c8b5e7b5e..b8743476e 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/api/urls.py @@ -32,6 +32,7 @@ from plane.api.views import ( InviteWorkspaceEndpoint, JoinWorkspaceEndpoint, WorkSpaceMemberViewSet, + WorkspaceMembersEndpoint, WorkspaceInvitationsViewset, UserWorkspaceInvitationsEndpoint, WorkspaceMemberUserEndpoint, @@ -59,6 +60,7 @@ from plane.api.views import ( ProjectViewSet, InviteProjectEndpoint, ProjectMemberViewSet, + ProjectMemberEndpoint, ProjectMemberInvitationsViewset, ProjectMemberUserEndpoint, AddMemberToProjectEndpoint, @@ -84,8 +86,10 @@ from plane.api.views import ( IssueAttachmentEndpoint, IssueArchiveViewSet, IssueSubscriberViewSet, + IssueCommentPublicViewSet, IssueReactionViewSet, CommentReactionViewSet, + ExportIssuesEndpoint, ## End Issues # States StateViewSet, @@ -162,6 +166,15 @@ from plane.api.views import ( NotificationViewSet, UnreadNotificationEndpoint, ## End Notification + # Public Boards + ProjectDeployBoardViewSet, + ProjectDeployBoardIssuesPublicEndpoint, + ProjectDeployBoardPublicSettingsEndpoint, + IssueReactionPublicViewSet, + CommentReactionPublicViewSet, + InboxIssuePublicViewSet, + IssueVotePublicViewSet, + ## End Public Boards ) @@ -334,6 +347,11 @@ urlpatterns = [ ), name="workspace", ), + path( + "workspaces//workspace-members/", + WorkspaceMembersEndpoint.as_view(), + name="workspace-members", + ), path( "workspaces//teams/", TeamMemberViewSet.as_view( @@ -467,6 +485,11 @@ urlpatterns = [ ), name="project", ), + path( + "workspaces//projects//project-members/", + ProjectMemberEndpoint.as_view(), + name="project", + ), path( "workspaces//projects//members/add/", AddMemberToProjectEndpoint.as_view(), @@ -808,6 +831,11 @@ urlpatterns = [ IssueAttachmentEndpoint.as_view(), name="project-issue-attachments", ), + path( + "workspaces//export-issues/", + ExportIssuesEndpoint.as_view(), + name="export-issues", + ), ## End Issues ## Issue Activity path( @@ -1463,4 +1491,128 @@ urlpatterns = [ name="unread-notifications", ), ## End Notification + # Public Boards + path( + "workspaces//projects//project-deploy-boards/", + ProjectDeployBoardViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-deploy-board", + ), + path( + "workspaces//projects//project-deploy-boards//", + ProjectDeployBoardViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-deploy-board", + ), + path( + "public/workspaces//project-boards//settings/", + ProjectDeployBoardPublicSettingsEndpoint.as_view(), + name="project-deploy-board-settings", + ), + path( + "public/workspaces//project-boards//issues/", + ProjectDeployBoardIssuesPublicEndpoint.as_view(), + name="project-deploy-board", + ), + path( + "public/workspaces//project-boards//issues//comments/", + IssueCommentPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="issue-comments-project-board", + ), + path( + "public/workspaces//project-boards//issues//comments//", + IssueCommentPublicViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="issue-comments-project-board", + ), + path( + "public/workspaces//project-boards//issues//reactions/", + IssueReactionPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="issue-reactions-project-board", + ), + path( + "public/workspaces//project-boards//issues//reactions//", + IssueReactionPublicViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-reactions-project-board", + ), + path( + "public/workspaces//project-boards//comments//reactions/", + CommentReactionPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="comment-reactions-project-board", + ), + path( + "public/workspaces//project-boards//comments//reactions//", + CommentReactionPublicViewSet.as_view( + { + "delete": "destroy", + } + ), + name="comment-reactions-project-board", + ), + path( + "public/workspaces//project-boards//inboxes//inbox-issues/", + InboxIssuePublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox-issue", + ), + path( + "public/workspaces//project-boards//inboxes//inbox-issues//", + InboxIssuePublicViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox-issue", + ), + path( + "public/workspaces//project-boards//issues//votes/", + IssueVotePublicViewSet.as_view( + { + "get": "list", + "post": "create", + "delete": "destroy", + } + ), + name="issue-vote-project-board", + ), + ## End Public Boards ] diff --git a/apiserver/plane/api/views/__init__.py b/apiserver/plane/api/views/__init__.py index 75509a16c..11223f90a 100644 --- a/apiserver/plane/api/views/__init__.py +++ b/apiserver/plane/api/views/__init__.py @@ -12,8 +12,12 @@ from .project import ( ProjectUserViewsEndpoint, ProjectMemberUserEndpoint, ProjectFavoritesViewSet, + ProjectDeployBoardIssuesPublicEndpoint, + ProjectDeployBoardViewSet, + ProjectDeployBoardPublicSettingsEndpoint, + ProjectMemberEndpoint, ) -from .people import ( +from .user import ( UserEndpoint, UpdateUserOnBoardedEndpoint, UpdateUserTourCompletedEndpoint, @@ -47,6 +51,7 @@ from .workspace import ( WorkspaceUserProfileEndpoint, WorkspaceUserProfileIssuesEndpoint, WorkspaceLabelsEndpoint, + WorkspaceMembersEndpoint, ) from .state import StateViewSet from .view import IssueViewViewSet, ViewIssuesEndpoint, IssueViewFavoriteViewSet @@ -73,8 +78,12 @@ from .issue import ( IssueAttachmentEndpoint, IssueArchiveViewSet, IssueSubscriberViewSet, + IssueCommentPublicViewSet, CommentReactionViewSet, IssueReactionViewSet, + IssueReactionPublicViewSet, + CommentReactionPublicViewSet, + IssueVotePublicViewSet, ) from .auth_extended import ( @@ -142,7 +151,7 @@ from .estimate import ( from .release import ReleaseNotesEndpoint -from .inbox import InboxViewSet, InboxIssueViewSet +from .inbox import InboxViewSet, InboxIssueViewSet, InboxIssuePublicViewSet from .analytic import ( AnalyticsEndpoint, @@ -152,4 +161,8 @@ from .analytic import ( DefaultAnalyticsEndpoint, ) -from .notification import NotificationViewSet, UnreadNotificationEndpoint \ No newline at end of file +from .notification import NotificationViewSet, UnreadNotificationEndpoint + +from .exporter import ( + ExportIssuesEndpoint, +) \ No newline at end of file diff --git a/apiserver/plane/api/views/analytic.py b/apiserver/plane/api/views/analytic.py index e537af84a..feb766b46 100644 --- a/apiserver/plane/api/views/analytic.py +++ b/apiserver/plane/api/views/analytic.py @@ -79,12 +79,12 @@ class AnalyticsEndpoint(BaseAPIView): ) assignee_details = {} - if x_axis in ["assignees__email"] or segment in ["assignees__email"]: + if x_axis in ["assignees__id"] or segment in ["assignees__id"]: assignee_details = ( Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) .order_by("assignees__id") .distinct("assignees__id") - .values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name") + .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") ) @@ -243,21 +243,21 @@ class DefaultAnalyticsEndpoint(BaseAPIView): ) most_issue_created_user = ( queryset.exclude(created_by=None) - .values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__email") + .values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id") .annotate(count=Count("id")) .order_by("-count") )[:5] most_issue_closed_user = ( queryset.filter(completed_at__isnull=False, assignees__isnull=False) - .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email") + .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") .annotate(count=Count("id")) .order_by("-count") )[:5] pending_issue_user = ( queryset.filter(completed_at__isnull=True) - .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__email") + .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") .annotate(count=Count("id")) .order_by("-count") ) diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py index 56dc091f4..df3f3aaca 100644 --- a/apiserver/plane/api/views/auth_extended.py +++ b/apiserver/plane/api/views/auth_extended.py @@ -22,7 +22,7 @@ from sentry_sdk import capture_exception ## Module imports from . import BaseAPIView -from plane.api.serializers.people import ( +from plane.api.serializers import ( ChangePasswordSerializer, ResetPasswordSerializer, ) diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index 268485b6e..a3d89fa81 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -165,6 +165,9 @@ class CycleViewSet(BaseViewSet): try: queryset = self.get_queryset() cycle_view = request.GET.get("cycle_view", "all") + order_by = request.GET.get("order_by", "sort_order") + + queryset = queryset.order_by(order_by) # All Cycles if cycle_view == "all": @@ -370,7 +373,8 @@ class CycleViewSet(BaseViewSet): .annotate(last_name=F("assignees__last_name")) .annotate(assignee_id=F("assignees__id")) .annotate(avatar=F("assignees__avatar")) - .values("first_name", "last_name", "assignee_id", "avatar") + .annotate(display_name=F("assignees__display_name")) + .values("first_name", "last_name", "assignee_id", "avatar", "display_name") .annotate(total_issues=Count("assignee_id")) .annotate( completed_issues=Count( diff --git a/apiserver/plane/api/views/exporter.py b/apiserver/plane/api/views/exporter.py new file mode 100644 index 000000000..7e14aa82f --- /dev/null +++ b/apiserver/plane/api/views/exporter.py @@ -0,0 +1,100 @@ +# Third Party imports +from rest_framework.response import Response +from rest_framework import status +from sentry_sdk import capture_exception + +# Module imports +from . import BaseAPIView +from plane.api.permissions import WorkSpaceAdminPermission +from plane.bgtasks.export_task import issue_export_task +from plane.db.models import Project, ExporterHistory, Workspace + +from plane.api.serializers import ExporterHistorySerializer + + +class ExportIssuesEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + model = ExporterHistory + serializer_class = ExporterHistorySerializer + + def post(self, request, slug): + try: + # Get the workspace + workspace = Workspace.objects.get(slug=slug) + + provider = request.data.get("provider", False) + multiple = request.data.get("multiple", False) + project_ids = request.data.get("project", []) + + if provider in ["csv", "xlsx", "json"]: + if not project_ids: + project_ids = Project.objects.filter( + workspace__slug=slug + ).values_list("id", flat=True) + project_ids = [str(project_id) for project_id in project_ids] + + exporter = ExporterHistory.objects.create( + workspace=workspace, + project=project_ids, + initiated_by=request.user, + provider=provider, + ) + + issue_export_task.delay( + provider=exporter.provider, + workspace_id=workspace.id, + project_ids=project_ids, + token_id=exporter.token, + multiple=multiple, + slug=slug, + ) + return Response( + { + "message": f"Once the export is ready you will be able to download it" + }, + status=status.HTTP_200_OK, + ) + else: + return Response( + {"error": f"Provider '{provider}' not found."}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Workspace.DoesNotExist: + return Response( + {"error": "Workspace does not exists"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def get(self, request, slug): + try: + exporter_history = ExporterHistory.objects.filter( + workspace__slug=slug + ).select_related("workspace","initiated_by") + + if request.GET.get("per_page", False) and request.GET.get("cursor", False): + return self.paginate( + request=request, + queryset=exporter_history, + on_results=lambda exporter_history: ExporterHistorySerializer( + exporter_history, many=True + ).data, + ) + else: + return Response( + {"error": "per_page and cursor are required"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/api/views/importer.py b/apiserver/plane/api/views/importer.py index 4fc7ad483..0a92b3850 100644 --- a/apiserver/plane/api/views/importer.py +++ b/apiserver/plane/api/views/importer.py @@ -458,7 +458,7 @@ class BulkImportIssuesEndpoint(BaseAPIView): actor=request.user, project_id=project_id, workspace_id=project.workspace_id, - comment=f"{request.user.email} importer the issue from {service}", + comment=f"imported the issue from {service}", verb="created", created_by=request.user, ) diff --git a/apiserver/plane/api/views/inbox.py b/apiserver/plane/api/views/inbox.py index ada76c9b3..4fbea5f87 100644 --- a/apiserver/plane/api/views/inbox.py +++ b/apiserver/plane/api/views/inbox.py @@ -15,7 +15,6 @@ from sentry_sdk import capture_exception from .base import BaseViewSet from plane.api.permissions import ProjectBasePermission, ProjectLitePermission from plane.db.models import ( - Project, Inbox, InboxIssue, Issue, @@ -23,6 +22,7 @@ from plane.db.models import ( IssueLink, IssueAttachment, ProjectMember, + ProjectDeployBoard, ) from plane.api.serializers import ( IssueSerializer, @@ -377,4 +377,269 @@ class InboxIssueViewSet(BaseViewSet): return Response( {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, - ) \ No newline at end of file + ) + + +class InboxIssuePublicViewSet(BaseViewSet): + serializer_class = InboxIssueSerializer + model = InboxIssue + + filterset_fields = [ + "status", + ] + + def get_queryset(self): + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=self.kwargs.get("slug"), project_id=self.kwargs.get("project_id")) + if project_deploy_board is not None: + return self.filter_queryset( + super() + .get_queryset() + .filter( + Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + project_id=self.kwargs.get("project_id"), + workspace__slug=self.kwargs.get("slug"), + inbox_id=self.kwargs.get("inbox_id"), + ) + .select_related("issue", "workspace", "project") + ) + else: + return InboxIssue.objects.none() + + def list(self, request, slug, project_id, inbox_id): + try: + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + + filters = issue_filters(request.query_params, "GET") + issues = ( + Issue.objects.filter( + issue_inbox__inbox_id=inbox_id, + workspace__slug=slug, + project_id=project_id, + ) + .filter(**filters) + .annotate(bridge_id=F("issue_inbox__id")) + .select_related("workspace", "project", "state", "parent") + .prefetch_related("assignees", "labels") + .order_by("issue_inbox__snoozed_till", "issue_inbox__status") + .annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .prefetch_related( + Prefetch( + "issue_inbox", + queryset=InboxIssue.objects.only( + "status", "duplicate_to", "snoozed_till", "source" + ), + ) + ) + ) + issues_data = IssueStateInboxSerializer(issues, many=True).data + return Response( + issues_data, + status=status.HTTP_200_OK, + ) + except ProjectDeployBoard.DoesNotExist: + return Response({"error": "Project Deploy Board does not exist"}, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def create(self, request, slug, project_id, inbox_id): + try: + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + + if not request.data.get("issue", {}).get("name", False): + return Response( + {"error": "Name is required"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Check for valid priority + if not request.data.get("issue", {}).get("priority", None) in [ + "low", + "medium", + "high", + "urgent", + None, + ]: + return Response( + {"error": "Invalid priority"}, status=status.HTTP_400_BAD_REQUEST + ) + + # Create or get state + state, _ = State.objects.get_or_create( + name="Triage", + group="backlog", + description="Default state for managing all Inbox Issues", + project_id=project_id, + color="#ff7700", + ) + + # create an issue + issue = Issue.objects.create( + name=request.data.get("issue", {}).get("name"), + description=request.data.get("issue", {}).get("description", {}), + description_html=request.data.get("issue", {}).get( + "description_html", "

" + ), + priority=request.data.get("issue", {}).get("priority", "low"), + project_id=project_id, + state=state, + ) + + # Create an Issue Activity + issue_activity.delay( + type="issue.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=None, + ) + # create an inbox issue + InboxIssue.objects.create( + inbox_id=inbox_id, + project_id=project_id, + issue=issue, + source=request.data.get("source", "in-app"), + ) + + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def partial_update(self, request, slug, project_id, inbox_id, pk): + try: + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + # Get the project member + if str(inbox_issue.created_by_id) != str(request.user.id): + return Response({"error": "You cannot edit inbox issues"}, status=status.HTTP_400_BAD_REQUEST) + + # Get issue data + issue_data = request.data.pop("issue", False) + + + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + # viewers and guests since only viewers and guests + issue_data = { + "name": issue_data.get("name", issue.name), + "description_html": issue_data.get("description_html", issue.description_html), + "description": issue_data.get("description", issue.description) + } + + issue_serializer = IssueCreateSerializer( + issue, data=issue_data, partial=True + ) + + if issue_serializer.is_valid(): + current_instance = issue + # Log all the updates + requested_data = json.dumps(issue_data, cls=DjangoJSONEncoder) + if issue is not None: + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue.id), + project_id=str(project_id), + current_instance=json.dumps( + IssueSerializer(current_instance).data, + cls=DjangoJSONEncoder, + ), + ) + issue_serializer.save() + return Response(issue_serializer.data, status=status.HTTP_200_OK) + return Response(issue_serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except InboxIssue.DoesNotExist: + return Response( + {"error": "Inbox Issue does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def retrieve(self, request, slug, project_id, inbox_id, pk): + try: + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + issue = Issue.objects.get( + pk=inbox_issue.issue_id, workspace__slug=slug, project_id=project_id + ) + serializer = IssueStateInboxSerializer(issue) + return Response(serializer.data, status=status.HTTP_200_OK) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def destroy(self, request, slug, project_id, inbox_id, pk): + try: + project_deploy_board = ProjectDeployBoard.objects.get(workspace__slug=slug, project_id=project_id) + if project_deploy_board.inbox is None: + return Response({"error": "Inbox is not enabled for this Project Board"}, status=status.HTTP_400_BAD_REQUEST) + + inbox_issue = InboxIssue.objects.get( + pk=pk, workspace__slug=slug, project_id=project_id, inbox_id=inbox_id + ) + + if str(inbox_issue.created_by_id) != str(request.user.id): + return Response({"error": "You cannot delete inbox issue"}, status=status.HTTP_400_BAD_REQUEST) + + inbox_issue.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + except InboxIssue.DoesNotExist: + return Response({"error": "Inbox Issue does not exists"}, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + diff --git a/apiserver/plane/api/views/integration/slack.py b/apiserver/plane/api/views/integration/slack.py index 06e2dfe39..498dd0607 100644 --- a/apiserver/plane/api/views/integration/slack.py +++ b/apiserver/plane/api/views/integration/slack.py @@ -20,6 +20,17 @@ class SlackProjectSyncViewSet(BaseViewSet): serializer_class = SlackProjectSyncSerializer model = SlackProjectSync + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + .filter(project__project_projectmember__member=self.request.user) + ) + def create(self, request, slug, project_id, workspace_integration_id): try: serializer = SlackProjectSyncSerializer(data=request.data) @@ -45,7 +56,10 @@ class SlackProjectSyncViewSet(BaseViewSet): return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) except IntegrityError: - return Response({"error": "Slack is already enabled for the project"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Slack is already enabled for the project"}, + status=status.HTTP_400_BAD_REQUEST, + ) except WorkspaceIntegration.DoesNotExist: return Response( {"error": "Workspace Integration does not exist"}, diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index 9369ccf2b..6f0f1e6ae 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -48,6 +48,7 @@ from plane.api.serializers import ( ProjectMemberLiteSerializer, IssueReactionSerializer, CommentReactionSerializer, + IssueVoteSerializer, ) from plane.api.permissions import ( WorkspaceEntityPermission, @@ -70,6 +71,8 @@ from plane.db.models import ( ProjectMember, IssueReaction, CommentReaction, + ProjectDeployBoard, + IssueVote, ) from plane.bgtasks.issue_activites_task import issue_activity from plane.utils.grouper import group_results @@ -168,7 +171,6 @@ class IssueViewSet(BaseViewSet): def list(self, request, slug, project_id): try: filters = issue_filters(request.query_params, "GET") - print(filters) # Custom ordering for priority and state priority_order = ["urgent", "high", "medium", "low", None] @@ -361,8 +363,14 @@ class UserWorkSpaceIssues(BaseAPIView): .annotate(count=Func(F("id"), function="Count")) .values("count") ) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) .filter(**filters) - ) + ).distinct() # Priority Ordering if order_by_param == "priority" or order_by_param == "-priority": @@ -743,21 +751,25 @@ class SubIssuesEndpoint(BaseAPIView): .annotate(count=Func(F("id"), function="Count")) .values("count") ) + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) ) state_distribution = ( - State.objects.filter(~Q(name="Triage"), workspace__slug=slug) - .annotate( - state_count=Count( - "state_issue", - filter=Q(state_issue__parent_id=issue_id), - ) + State.objects.filter( + workspace__slug=slug, state_issue__parent_id=issue_id ) - .order_by("group") - .values("group", "state_count") + .annotate(state_group=F("group")) + .values("state_group") + .annotate(state_count=Count("state_group")) + .order_by("state_group") ) - result = {item["group"]: item["state_count"] for item in state_distribution} + result = {item["state_group"]: item["state_count"] for item in state_distribution} serializer = IssueLiteSerializer( sub_issues, @@ -1445,3 +1457,398 @@ class CommentReactionViewSet(BaseViewSet): {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) + + +class IssueCommentPublicViewSet(BaseViewSet): + serializer_class = IssueCommentSerializer + model = IssueComment + + filterset_fields = [ + "issue__id", + "workspace__id", + ] + + def get_queryset(self): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + if project_deploy_board.comments: + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(issue_id=self.kwargs.get("issue_id")) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .distinct() + ) + else: + return IssueComment.objects.none() + + def create(self, request, slug, project_id, issue_id): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.comments: + return Response( + {"error": "Comments are not enabled for this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + access = ( + "INTERNAL" + if ProjectMember.objects.filter( + project_id=project_id, member=request.user + ).exists() + else "EXTERNAL" + ) + + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, + access=access, + ) + issue_activity.delay( + type="comment.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def partial_update(self, request, slug, project_id, issue_id, pk): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.comments: + return Response( + {"error": "Comments are not enabled for this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + comment = IssueComment.objects.get( + workspace__slug=slug, pk=pk, actor=request.user + ) + serializer = IssueCommentSerializer( + comment, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=json.dumps( + IssueCommentSerializer(comment).data, + cls=DjangoJSONEncoder, + ), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist): + return Response( + {"error": "IssueComent Does not exists"}, + status=status.HTTP_400_BAD_REQUEST,) + + def destroy(self, request, slug, project_id, issue_id, pk): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.comments: + return Response( + {"error": "Comments are not enabled for this project"}, + status=status.HTTP_400_BAD_REQUEST, + ) + comment = IssueComment.objects.get( + workspace__slug=slug, pk=pk, project_id=project_id, actor=request.user + ) + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=json.dumps( + IssueCommentSerializer(comment).data, + cls=DjangoJSONEncoder, + ), + ) + comment.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + except (IssueComment.DoesNotExist, ProjectDeployBoard.DoesNotExist): + return Response( + {"error": "IssueComent Does not exists"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class IssueReactionPublicViewSet(BaseViewSet): + serializer_class = IssueReactionSerializer + model = IssueReaction + + def get_queryset(self): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + if project_deploy_board.reactions: + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .order_by("-created_at") + .distinct() + ) + else: + return IssueReaction.objects.none() + + def create(self, request, slug, project_id, issue_id): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this project board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = IssueReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, issue_id=issue_id, actor=request.user + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except ProjectDeployBoard.DoesNotExist: + return Response( + {"error": "Project board does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def destroy(self, request, slug, project_id, issue_id, reaction_code): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this project board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + issue_reaction = IssueReaction.objects.get( + workspace__slug=slug, + issue_id=issue_id, + reaction=reaction_code, + actor=request.user, + ) + issue_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + except IssueReaction.DoesNotExist: + return Response( + {"error": "Issue reaction does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class CommentReactionPublicViewSet(BaseViewSet): + serializer_class = CommentReactionSerializer + model = CommentReaction + + def get_queryset(self): + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + if project_deploy_board.reactions: + return ( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(comment_id=self.kwargs.get("comment_id")) + .order_by("-created_at") + .distinct() + ) + else: + return CommentReaction.objects.none() + + def create(self, request, slug, project_id, comment_id): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + serializer = CommentReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, comment_id=comment_id, actor=request.user + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except ProjectDeployBoard.DoesNotExist: + return Response( + {"error": "Project board does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def destroy(self, request, slug, project_id, comment_id, reaction_code): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + if not project_deploy_board.reactions: + return Response( + {"error": "Reactions are not enabled for this board"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + comment_reaction = CommentReaction.objects.get( + project_id=project_id, + workspace__slug=slug, + comment_id=comment_id, + reaction=reaction_code, + actor=request.user, + ) + comment_reaction.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + except CommentReaction.DoesNotExist: + return Response( + {"error": "Comment reaction does not exist"}, + status=status.HTTP_400_BAD_REQUEST, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class IssueVotePublicViewSet(BaseViewSet): + model = IssueVote + serializer_class = IssueVoteSerializer + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + ) + + def create(self, request, slug, project_id, issue_id): + try: + issue_vote, _ = IssueVote.objects.get_or_create( + actor_id=request.user.id, + project_id=project_id, + issue_id=issue_id, + vote=request.data.get("vote", 1), + ) + serializer = IssueVoteSerializer(issue_vote) + return Response(serializer.data, status=status.HTTP_201_CREATED) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def destroy(self, request, slug, project_id, issue_id): + try: + issue_vote = IssueVote.objects.get( + workspace__slug=slug, + project_id=project_id, + issue_id=issue_id, + actor_id=request.user.id, + ) + issue_vote.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ExportIssuesEndpoint(BaseAPIView): + permission_classes = [ + WorkSpaceAdminPermission, + ] + + def post(self, request, slug): + try: + + issue_export_task.delay( + email=request.user.email, data=request.data, slug=slug ,exporter_name=request.user.first_name + ) + + return Response( + { + "message": f"Once the export is ready it will be emailed to you at {str(request.user.email)}" + }, + status=status.HTTP_200_OK, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) \ No newline at end of file diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 2a7532ecf..1cd741f84 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -53,6 +53,8 @@ class ModuleViewSet(BaseViewSet): ) def get_queryset(self): + order_by = self.request.GET.get("order_by", "sort_order") + subquery = ModuleFavorite.objects.filter( user=self.request.user, module_id=OuterRef("pk"), @@ -106,7 +108,7 @@ class ModuleViewSet(BaseViewSet): filter=Q(issue_module__issue__state__group="backlog"), ) ) - .order_by("-is_favorite", "name") + .order_by(order_by, "name") ) def perform_destroy(self, instance): @@ -173,8 +175,9 @@ class ModuleViewSet(BaseViewSet): .annotate(first_name=F("assignees__first_name")) .annotate(last_name=F("assignees__last_name")) .annotate(assignee_id=F("assignees__id")) + .annotate(display_name=F("assignees__display_name")) .annotate(avatar=F("assignees__avatar")) - .values("first_name", "last_name", "assignee_id", "avatar") + .values("first_name", "last_name", "assignee_id", "avatar", "display_name") .annotate(total_issues=Count("assignee_id")) .annotate( completed_issues=Count( diff --git a/apiserver/plane/api/views/page.py b/apiserver/plane/api/views/page.py index edca47ffe..d9fad9eaa 100644 --- a/apiserver/plane/api/views/page.py +++ b/apiserver/plane/api/views/page.py @@ -301,7 +301,7 @@ class CreateIssueFromPageBlockEndpoint(BaseAPIView): issue=issue, actor=request.user, project_id=project_id, - comment=f"{request.user.email} created the issue from {page_block.name} block", + comment=f"created the issue from {page_block.name} block", verb="created", ) diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index 31741f10c..6adee0016 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -5,7 +5,21 @@ from datetime import datetime # Django imports from django.core.exceptions import ValidationError from django.db import IntegrityError -from django.db.models import Q, Exists, OuterRef, Func, F, Min, Subquery +from django.db.models import ( + Q, + Exists, + OuterRef, + Func, + F, + Max, + CharField, + Func, + Subquery, + Prefetch, + When, + Case, + Value, +) from django.core.validators import validate_email from django.conf import settings @@ -13,6 +27,7 @@ from django.conf import settings from rest_framework.response import Response from rest_framework import status from rest_framework import serializers +from rest_framework.permissions import AllowAny from sentry_sdk import capture_exception # Module imports @@ -23,9 +38,16 @@ from plane.api.serializers import ( ProjectDetailSerializer, ProjectMemberInviteSerializer, ProjectFavoriteSerializer, + IssueLiteSerializer, + ProjectDeployBoardSerializer, + ProjectMemberAdminSerializer, ) -from plane.api.permissions import ProjectBasePermission +from plane.api.permissions import ( + ProjectBasePermission, + ProjectEntityPermission, + ProjectMemberPermission, +) from plane.db.models import ( Project, @@ -48,9 +70,17 @@ from plane.db.models import ( IssueAssignee, ModuleMember, Inbox, + ProjectDeployBoard, + Issue, + IssueReaction, + IssueLink, + IssueAttachment, + Label, ) from plane.bgtasks.project_invitation_task import project_invitation +from plane.utils.grouper import group_results +from plane.utils.issue_filters import issue_filters class ProjectViewSet(BaseViewSet): @@ -92,7 +122,9 @@ class ProjectViewSet(BaseViewSet): ) ) .annotate( - total_members=ProjectMember.objects.filter(project_id=OuterRef("id")) + total_members=ProjectMember.objects.filter( + project_id=OuterRef("id"), member__is_bot=False + ) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") @@ -109,6 +141,20 @@ class ProjectViewSet(BaseViewSet): .annotate(count=Func(F("id"), function="Count")) .values("count") ) + .annotate( + member_role=ProjectMember.objects.filter( + project_id=OuterRef("pk"), + member_id=self.request.user.id, + ).values("role") + ) + .annotate( + is_deployed=Exists( + ProjectDeployBoard.objects.filter( + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) .distinct() ) @@ -180,7 +226,9 @@ class ProjectViewSet(BaseViewSet): project_id=serializer.data["id"], member=request.user, role=20 ) - if serializer.data["project_lead"] is not None: + if serializer.data["project_lead"] is not None and str( + serializer.data["project_lead"] + ) != str(request.user.id): ProjectMember.objects.create( project_id=serializer.data["id"], member_id=serializer.data["project_lead"], @@ -347,7 +395,9 @@ class InviteProjectEndpoint(BaseAPIView): validate_email(email) # Check if user is already a member of workspace if ProjectMember.objects.filter( - project_id=project_id, member__email=email + project_id=project_id, + member__email=email, + member__is_bot=False, ).exists(): return Response( {"error": "User is already member of workspace"}, @@ -451,14 +501,14 @@ class UserProjectInvitationsViewset(BaseViewSet): class ProjectMemberViewSet(BaseViewSet): - serializer_class = ProjectMemberSerializer + serializer_class = ProjectMemberAdminSerializer model = ProjectMember permission_classes = [ ProjectBasePermission, ] search_fields = [ - "member__email", + "member__display_name", "member__first_name", ] @@ -984,3 +1034,255 @@ class ProjectFavoritesViewSet(BaseViewSet): {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) + + +class ProjectDeployBoardViewSet(BaseViewSet): + permission_classes = [ + ProjectMemberPermission, + ] + serializer_class = ProjectDeployBoardSerializer + model = ProjectDeployBoard + + def get_queryset(self): + return ( + super() + .get_queryset() + .filter( + workspace__slug=self.kwargs.get("slug"), + project_id=self.kwargs.get("project_id"), + ) + .select_related("project") + ) + + def create(self, request, slug, project_id): + try: + comments = request.data.get("comments", False) + reactions = request.data.get("reactions", False) + inbox = request.data.get("inbox", None) + votes = request.data.get("votes", False) + views = request.data.get( + "views", + { + "list": True, + "kanban": True, + "calendar": True, + "gantt": True, + "spreadsheet": True, + }, + ) + + project_deploy_board, _ = ProjectDeployBoard.objects.get_or_create( + anchor=f"{slug}/{project_id}", + project_id=project_id, + ) + project_deploy_board.comments = comments + project_deploy_board.reactions = reactions + project_deploy_board.inbox = inbox + project_deploy_board.votes = votes + project_deploy_board.views = views + + project_deploy_board.save() + + serializer = ProjectDeployBoardSerializer(project_deploy_board) + return Response(serializer.data, status=status.HTTP_200_OK) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ProjectMemberEndpoint(BaseAPIView): + permission_classes = [ + ProjectEntityPermission, + ] + + def get(self, request, slug, project_id): + try: + project_members = ProjectMember.objects.filter( + project_id=project_id, + workspace__slug=slug, + member__is_bot=False, + ).select_related("project", "member") + serializer = ProjectMemberSerializer(project_members, many=True) + return Response(serializer.data, status=status.HTTP_200_OK) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ProjectDeployBoardPublicSettingsEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def get(self, request, slug, project_id): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + serializer = ProjectDeployBoardSerializer(project_deploy_board) + return Response(serializer.data, status=status.HTTP_200_OK) + except ProjectDeployBoard.DoesNotExist: + return Response( + {"error": "Project Deploy Board does not exists"}, + status=status.HTTP_404_NOT_FOUND, + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + +class ProjectDeployBoardIssuesPublicEndpoint(BaseAPIView): + permission_classes = [ + AllowAny, + ] + + def get(self, request, slug, project_id): + try: + project_deploy_board = ProjectDeployBoard.objects.get( + workspace__slug=slug, project_id=project_id + ) + + filters = issue_filters(request.query_params, "GET") + + # Custom ordering for priority and state + priority_order = ["urgent", "high", "medium", "low", None] + state_order = ["backlog", "unstarted", "started", "completed", "cancelled"] + + order_by_param = request.GET.get("order_by", "-created_at") + + issue_queryset = ( + Issue.issue_objects.annotate( + sub_issues_count=Issue.issue_objects.filter(parent=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .filter(project_id=project_id) + .filter(workspace__slug=slug) + .select_related("project", "workspace", "state", "parent") + .prefetch_related("assignees", "labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) + .filter(**filters) + .annotate(cycle_id=F("issue_cycle__cycle_id")) + .annotate(module_id=F("issue_module__module_id")) + .annotate( + link_count=IssueLink.objects.filter(issue=OuterRef("id")) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + .annotate( + attachment_count=IssueAttachment.objects.filter( + issue=OuterRef("id") + ) + .order_by() + .annotate(count=Func(F("id"), function="Count")) + .values("count") + ) + ) + + # Priority Ordering + if order_by_param == "priority" or order_by_param == "-priority": + priority_order = ( + priority_order + if order_by_param == "priority" + else priority_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + priority_order=Case( + *[ + When(priority=p, then=Value(i)) + for i, p in enumerate(priority_order) + ], + output_field=CharField(), + ) + ).order_by("priority_order") + + # State Ordering + elif order_by_param in [ + "state__name", + "state__group", + "-state__name", + "-state__group", + ]: + state_order = ( + state_order + if order_by_param in ["state__name", "state__group"] + else state_order[::-1] + ) + issue_queryset = issue_queryset.annotate( + state_order=Case( + *[ + When(state__group=state_group, then=Value(i)) + for i, state_group in enumerate(state_order) + ], + default=Value(len(state_order)), + output_field=CharField(), + ) + ).order_by("state_order") + # assignee and label ordering + elif order_by_param in [ + "labels__name", + "-labels__name", + "assignees__first_name", + "-assignees__first_name", + ]: + issue_queryset = issue_queryset.annotate( + max_values=Max( + order_by_param[1::] + if order_by_param.startswith("-") + else order_by_param + ) + ).order_by( + "-max_values" if order_by_param.startswith("-") else "max_values" + ) + else: + issue_queryset = issue_queryset.order_by(order_by_param) + + issues = IssueLiteSerializer(issue_queryset, many=True).data + + states = State.objects.filter( + workspace__slug=slug, project_id=project_id + ).values("name", "group", "color", "id") + + labels = Label.objects.filter( + workspace__slug=slug, project_id=project_id + ).values("id", "name", "color", "parent") + + ## Grouping the results + group_by = request.GET.get("group_by", False) + if group_by: + issues = group_results(issues, group_by) + + return Response( + { + "issues": issues, + "states": states, + "labels": labels, + }, + status=status.HTTP_200_OK, + ) + except ProjectDeployBoard.DoesNotExist: + return Response( + {"error": "Board does not exists"}, status=status.HTTP_404_NOT_FOUND + ) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/api/views/people.py b/apiserver/plane/api/views/user.py similarity index 100% rename from apiserver/plane/api/views/people.py rename to apiserver/plane/api/views/user.py diff --git a/apiserver/plane/api/views/view.py b/apiserver/plane/api/views/view.py index 874bb94fb..32ba24c8b 100644 --- a/apiserver/plane/api/views/view.py +++ b/apiserver/plane/api/views/view.py @@ -19,6 +19,7 @@ from plane.db.models import ( IssueView, Issue, IssueViewFavorite, + IssueReaction, ) from plane.utils.issue_filters import issue_filters @@ -77,6 +78,12 @@ class ViewIssuesEndpoint(BaseAPIView): .select_related("parent") .prefetch_related("assignees") .prefetch_related("labels") + .prefetch_related( + Prefetch( + "issue_reactions", + queryset=IssueReaction.objects.select_related("actor"), + ) + ) ) serializer = IssueLiteSerializer(issues, many=True) diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py index a862c0b4c..cfdd0dd9b 100644 --- a/apiserver/plane/api/views/workspace.py +++ b/apiserver/plane/api/views/workspace.py @@ -47,6 +47,7 @@ from plane.api.serializers import ( WorkspaceThemeSerializer, IssueActivitySerializer, IssueLiteSerializer, + WorkspaceMemberAdminSerializer, ) from plane.api.views.base import BaseAPIView from . import BaseViewSet @@ -106,7 +107,9 @@ class WorkSpaceViewSet(BaseViewSet): def get_queryset(self): member_count = ( - WorkspaceMember.objects.filter(workspace=OuterRef("id")) + WorkspaceMember.objects.filter( + workspace=OuterRef("id"), member__is_bot=False + ) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") @@ -191,7 +194,9 @@ class UserWorkSpacesEndpoint(BaseAPIView): def get(self, request): try: member_count = ( - WorkspaceMember.objects.filter(workspace=OuterRef("id")) + WorkspaceMember.objects.filter( + workspace=OuterRef("id"), member__is_bot=False + ) .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") @@ -537,7 +542,7 @@ class UserWorkspaceInvitationsEndpoint(BaseViewSet): class WorkSpaceMemberViewSet(BaseViewSet): - serializer_class = WorkSpaceMemberSerializer + serializer_class = WorkspaceMemberAdminSerializer model = WorkspaceMember permission_classes = [ @@ -545,7 +550,7 @@ class WorkSpaceMemberViewSet(BaseViewSet): ] search_fields = [ - "member__email", + "member__display_name", "member__first_name", ] @@ -624,7 +629,9 @@ class WorkSpaceMemberViewSet(BaseViewSet): if ( workspace_member.role == 20 and WorkspaceMember.objects.filter( - workspace__slug=slug, role=20 + workspace__slug=slug, + role=20, + member__is_bot=False, ).count() == 1 ): @@ -690,7 +697,7 @@ class TeamMemberViewSet(BaseViewSet): ] search_fields = [ - "member__email", + "member__display_name", "member__first_name", ] @@ -987,11 +994,11 @@ class UserWorkspaceDashboardEndpoint(BaseAPIView): upcoming_issues = Issue.issue_objects.filter( ~Q(state__group__in=["completed", "cancelled"]), - target_date__gte=timezone.now(), + start_date__gte=timezone.now(), workspace__slug=slug, assignees__in=[request.user], completed_at__isnull=True, - ).values("id", "name", "workspace__slug", "project_id", "target_date") + ).values("id", "name", "workspace__slug", "project_id", "start_date") return Response( { @@ -1048,7 +1055,6 @@ class WorkspaceThemeViewSet(BaseViewSet): class WorkspaceUserProfileStatsEndpoint(BaseAPIView): - def get(self, request, slug, user_id): try: filters = issue_filters(request.query_params, "GET") @@ -1077,6 +1083,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView): .filter(**filters) .values("priority") .annotate(priority_count=Count("priority")) + .filter(priority_count__gte=1) .annotate( priority_order=Case( *[ @@ -1146,14 +1153,18 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView): upcoming_cycles = CycleIssue.objects.filter( workspace__slug=slug, cycle__start_date__gt=timezone.now().date(), - issue__assignees__in=[user_id,] + issue__assignees__in=[ + user_id, + ], ).values("cycle__name", "cycle__id", "cycle__project_id") present_cycle = CycleIssue.objects.filter( workspace__slug=slug, cycle__start_date__lt=timezone.now().date(), cycle__end_date__gt=timezone.now().date(), - issue__assignees__in=[user_id,] + issue__assignees__in=[ + user_id, + ], ).values("cycle__name", "cycle__id", "cycle__project_id") return Response( @@ -1166,7 +1177,7 @@ class WorkspaceUserProfileStatsEndpoint(BaseAPIView): "pending_issues": pending_issues_count, "subscribed_issues": subscribed_issues_count, "present_cycles": present_cycle, - "upcoming_cycles": upcoming_cycles, + "upcoming_cycles": upcoming_cycles, } ) except Exception as e: @@ -1184,7 +1195,6 @@ class WorkspaceUserActivityEndpoint(BaseAPIView): def get(self, request, slug, user_id): try: - projects = request.query_params.getlist("project", []) queryset = IssueActivity.objects.filter( @@ -1212,12 +1222,13 @@ class WorkspaceUserActivityEndpoint(BaseAPIView): class WorkspaceUserProfileEndpoint(BaseAPIView): - def get(self, request, slug, user_id): try: user_data = User.objects.get(pk=user_id) - requesting_workspace_member = WorkspaceMember.objects.get(workspace__slug=slug, member=request.user) + requesting_workspace_member = WorkspaceMember.objects.get( + workspace__slug=slug, member=request.user + ) projects = [] if requesting_workspace_member.role >= 10: projects = ( @@ -1227,7 +1238,8 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): ) .annotate( created_issues=Count( - "project_issue", filter=Q(project_issue__created_by_id=user_id) + "project_issue", + filter=Q(project_issue__created_by_id=user_id), ) ) .annotate( @@ -1282,6 +1294,7 @@ class WorkspaceUserProfileEndpoint(BaseAPIView): "cover_image": user_data.cover_image, "date_joined": user_data.date_joined, "user_timezone": user_data.user_timezone, + "display_name": user_data.display_name, }, }, status=status.HTTP_200_OK, @@ -1439,3 +1452,24 @@ class WorkspaceLabelsEndpoint(BaseAPIView): {"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST, ) + + +class WorkspaceMembersEndpoint(BaseAPIView): + permission_classes = [ + WorkspaceEntityPermission, + ] + + def get(self, request, slug): + try: + workspace_members = WorkspaceMember.objects.filter( + workspace__slug=slug, + member__is_bot=False, + ).select_related("workspace", "member") + serialzier = WorkSpaceMemberSerializer(workspace_members, many=True) + return Response(serialzier.data, status=status.HTTP_200_OK) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) diff --git a/apiserver/plane/bgtasks/analytic_plot_export.py b/apiserver/plane/bgtasks/analytic_plot_export.py index 27b625445..492be8870 100644 --- a/apiserver/plane/bgtasks/analytic_plot_export.py +++ b/apiserver/plane/bgtasks/analytic_plot_export.py @@ -21,7 +21,7 @@ row_mapping = { "state__name": "State", "state__group": "State Group", "labels__name": "Label", - "assignees__email": "Assignee Name", + "assignees__display_name": "Assignee Name", "start_date": "Start Date", "target_date": "Due Date", "completed_at": "Completed At", @@ -51,12 +51,12 @@ def analytic_export_task(email, data, slug): segmented = segment assignee_details = {} - if x_axis in ["assignees__email"] or segment in ["assignees__email"]: + if x_axis in ["assignees__id"] or segment in ["assignees__id"]: assignee_details = ( Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) .order_by("assignees__id") .distinct("assignees__id") - .values("assignees__avatar", "assignees__email", "assignees__first_name", "assignees__last_name") + .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") ) if segment: @@ -93,19 +93,19 @@ def analytic_export_task(email, data, slug): else: generated_row.append("0") # x-axis replacement for names - if x_axis in ["assignees__email"]: - assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(item)] + if x_axis in ["assignees__id"]: + assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] if len(assignee): generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) rows.append(tuple(generated_row)) - # If segment is ["assignees__email"] then replace segment_zero rows with first and last names - if segmented in ["assignees__email"]: + # If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names + if segmented in ["assignees__id"]: for index, segm in enumerate(row_zero[2:]): # find the name of the user - assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(segm)] + assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)] if len(assignee): - row_zero[index] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) + row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) rows = [tuple(row_zero)] + rows csv_buffer = io.StringIO() @@ -141,8 +141,8 @@ def analytic_export_task(email, data, slug): else distribution.get(item)[0].get("estimate "), ] # x-axis replacement to names - if x_axis in ["assignees__email"]: - assignee = [user for user in assignee_details if str(user.get("assignees__email")) == str(item)] + if x_axis in ["assignees__id"]: + assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] if len(assignee): row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) diff --git a/apiserver/plane/bgtasks/export_task.py b/apiserver/plane/bgtasks/export_task.py new file mode 100644 index 000000000..d8cb9f0cd --- /dev/null +++ b/apiserver/plane/bgtasks/export_task.py @@ -0,0 +1,346 @@ +# Python imports +import csv +import io +import json +import boto3 +import zipfile + +# Django imports +from django.conf import settings +from django.utils import timezone + +# Third party imports +from celery import shared_task +from sentry_sdk import capture_exception +from botocore.client import Config +from openpyxl import Workbook + +# Module imports +from plane.db.models import Issue, ExporterHistory + + +def dateTimeConverter(time): + if time: + return time.strftime("%a, %d %b %Y %I:%M:%S %Z%z") + +def dateConverter(time): + if time: + return time.strftime("%a, %d %b %Y") + +def create_csv_file(data): + csv_buffer = io.StringIO() + csv_writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) + + for row in data: + csv_writer.writerow(row) + + csv_buffer.seek(0) + return csv_buffer.getvalue() + + +def create_json_file(data): + return json.dumps(data) + + +def create_xlsx_file(data): + workbook = Workbook() + sheet = workbook.active + + for row in data: + sheet.append(row) + + xlsx_buffer = io.BytesIO() + workbook.save(xlsx_buffer) + xlsx_buffer.seek(0) + return xlsx_buffer.getvalue() + + +def create_zip_file(files): + zip_buffer = io.BytesIO() + with zipfile.ZipFile(zip_buffer, "w", zipfile.ZIP_DEFLATED) as zipf: + for filename, file_content in files: + zipf.writestr(filename, file_content) + + zip_buffer.seek(0) + return zip_buffer + + +def upload_to_s3(zip_file, workspace_id, token_id, slug): + s3 = boto3.client( + "s3", + region_name="ap-south-1", + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version="s3v4"), + ) + file_name = f"{workspace_id}/export-{slug}-{token_id[:6]}-{timezone.now()}.zip" + + s3.upload_fileobj( + zip_file, + settings.AWS_S3_BUCKET_NAME, + file_name, + ExtraArgs={"ACL": "public-read", "ContentType": "application/zip"}, + ) + + expires_in = 7 * 24 * 60 * 60 + presigned_url = s3.generate_presigned_url( + "get_object", + Params={"Bucket": settings.AWS_S3_BUCKET_NAME, "Key": file_name}, + ExpiresIn=expires_in, + ) + + exporter_instance = ExporterHistory.objects.get(token=token_id) + + if presigned_url: + exporter_instance.url = presigned_url + exporter_instance.status = "completed" + exporter_instance.key = file_name + else: + exporter_instance.status = "failed" + + exporter_instance.save(update_fields=["status", "url","key"]) + + +def generate_table_row(issue): + return [ + f"""{issue["project__identifier"]}-{issue["sequence_id"]}""", + issue["project__name"], + issue["name"], + issue["description_stripped"], + issue["state__name"], + issue["priority"], + f"{issue['created_by__first_name']} {issue['created_by__last_name']}" + if issue["created_by__first_name"] and issue["created_by__last_name"] + else "", + f"{issue['assignees__first_name']} {issue['assignees__last_name']}" + if issue["assignees__first_name"] and issue["assignees__last_name"] + else "", + issue["labels__name"], + issue["issue_cycle__cycle__name"], + dateConverter(issue["issue_cycle__cycle__start_date"]), + dateConverter(issue["issue_cycle__cycle__end_date"]), + issue["issue_module__module__name"], + dateConverter(issue["issue_module__module__start_date"]), + dateConverter(issue["issue_module__module__target_date"]), + dateTimeConverter(issue["created_at"]), + dateTimeConverter(issue["updated_at"]), + dateTimeConverter(issue["completed_at"]), + dateTimeConverter(issue["archived_at"]), + ] + + +def generate_json_row(issue): + return { + "ID": f"""{issue["project__identifier"]}-{issue["sequence_id"]}""", + "Project": issue["project__name"], + "Name": issue["name"], + "Description": issue["description_stripped"], + "State": issue["state__name"], + "Priority": issue["priority"], + "Created By": f"{issue['created_by__first_name']} {issue['created_by__last_name']}" + if issue["created_by__first_name"] and issue["created_by__last_name"] + else "", + "Assignee": f"{issue['assignees__first_name']} {issue['assignees__last_name']}" + if issue["assignees__first_name"] and issue["assignees__last_name"] + else "", + "Labels": issue["labels__name"], + "Cycle Name": issue["issue_cycle__cycle__name"], + "Cycle Start Date": dateConverter(issue["issue_cycle__cycle__start_date"]), + "Cycle End Date": dateConverter(issue["issue_cycle__cycle__end_date"]), + "Module Name": issue["issue_module__module__name"], + "Module Start Date": dateConverter(issue["issue_module__module__start_date"]), + "Module Target Date": dateConverter(issue["issue_module__module__target_date"]), + "Created At": dateTimeConverter(issue["created_at"]), + "Updated At": dateTimeConverter(issue["updated_at"]), + "Completed At": dateTimeConverter(issue["completed_at"]), + "Archived At": dateTimeConverter(issue["archived_at"]), + } + + +def update_json_row(rows, row): + matched_index = next( + ( + index + for index, existing_row in enumerate(rows) + if existing_row["ID"] == row["ID"] + ), + None, + ) + + if matched_index is not None: + existing_assignees, existing_labels = ( + rows[matched_index]["Assignee"], + rows[matched_index]["Labels"], + ) + assignee, label = row["Assignee"], row["Labels"] + + if assignee is not None and assignee not in existing_assignees: + rows[matched_index]["Assignee"] += f", {assignee}" + if label is not None and label not in existing_labels: + rows[matched_index]["Labels"] += f", {label}" + else: + rows.append(row) + + +def update_table_row(rows, row): + matched_index = next( + (index for index, existing_row in enumerate(rows) if existing_row[0] == row[0]), + None, + ) + + if matched_index is not None: + existing_assignees, existing_labels = rows[matched_index][7:9] + assignee, label = row[7:9] + + if assignee is not None and assignee not in existing_assignees: + rows[matched_index][7] += f", {assignee}" + if label is not None and label not in existing_labels: + rows[matched_index][8] += f", {label}" + else: + rows.append(row) + + +def generate_csv(header, project_id, issues, files): + """ + Generate CSV export for all the passed issues. + """ + rows = [ + header, + ] + for issue in issues: + row = generate_table_row(issue) + update_table_row(rows, row) + csv_file = create_csv_file(rows) + files.append((f"{project_id}.csv", csv_file)) + + +def generate_json(header, project_id, issues, files): + rows = [] + for issue in issues: + row = generate_json_row(issue) + update_json_row(rows, row) + json_file = create_json_file(rows) + files.append((f"{project_id}.json", json_file)) + + +def generate_xlsx(header, project_id, issues, files): + rows = [header] + for issue in issues: + row = generate_table_row(issue) + update_table_row(rows, row) + xlsx_file = create_xlsx_file(rows) + files.append((f"{project_id}.xlsx", xlsx_file)) + + +@shared_task +def issue_export_task(provider, workspace_id, project_ids, token_id, multiple, slug): + try: + exporter_instance = ExporterHistory.objects.get(token=token_id) + exporter_instance.status = "processing" + exporter_instance.save(update_fields=["status"]) + + workspace_issues = ( + ( + Issue.objects.filter( + workspace__id=workspace_id, project_id__in=project_ids + ) + .select_related("project", "workspace", "state", "parent", "created_by") + .prefetch_related( + "assignees", "labels", "issue_cycle__cycle", "issue_module__module" + ) + .values( + "id", + "project__identifier", + "project__name", + "project__id", + "sequence_id", + "name", + "description_stripped", + "priority", + "state__name", + "created_at", + "updated_at", + "completed_at", + "archived_at", + "issue_cycle__cycle__name", + "issue_cycle__cycle__start_date", + "issue_cycle__cycle__end_date", + "issue_module__module__name", + "issue_module__module__start_date", + "issue_module__module__target_date", + "created_by__first_name", + "created_by__last_name", + "assignees__first_name", + "assignees__last_name", + "labels__name", + ) + ) + .order_by("project__identifier","sequence_id") + .distinct() + ) + # CSV header + header = [ + "ID", + "Project", + "Name", + "Description", + "State", + "Priority", + "Created By", + "Assignee", + "Labels", + "Cycle Name", + "Cycle Start Date", + "Cycle End Date", + "Module Name", + "Module Start Date", + "Module Target Date", + "Created At", + "Updated At", + "Completed At", + "Archived At", + ] + + EXPORTER_MAPPER = { + "csv": generate_csv, + "json": generate_json, + "xlsx": generate_xlsx, + } + + files = [] + if multiple: + for project_id in project_ids: + issues = workspace_issues.filter(project__id=project_id) + exporter = EXPORTER_MAPPER.get(provider) + if exporter is not None: + exporter( + header, + project_id, + issues, + files, + ) + + else: + exporter = EXPORTER_MAPPER.get(provider) + if exporter is not None: + exporter( + header, + workspace_id, + workspace_issues, + files, + ) + + zip_buffer = create_zip_file(files) + upload_to_s3(zip_buffer, workspace_id, token_id, slug) + + except Exception as e: + exporter_instance = ExporterHistory.objects.get(token=token_id) + exporter_instance.status = "failed" + exporter_instance.reason = str(e) + exporter_instance.save(update_fields=["status", "reason"]) + + # Print logs if in DEBUG mode + if settings.DEBUG: + print(e) + capture_exception(e) + return diff --git a/apiserver/plane/bgtasks/exporter_expired_task.py b/apiserver/plane/bgtasks/exporter_expired_task.py new file mode 100644 index 000000000..799904347 --- /dev/null +++ b/apiserver/plane/bgtasks/exporter_expired_task.py @@ -0,0 +1,38 @@ +# Python imports +import boto3 +from datetime import timedelta + +# Django imports +from django.conf import settings +from django.utils import timezone +from django.db.models import Q + +# Third party imports +from celery import shared_task +from botocore.client import Config + +# Module imports +from plane.db.models import ExporterHistory + + +@shared_task +def delete_old_s3_link(): + # Get a list of keys and IDs to process + expired_exporter_history = ExporterHistory.objects.filter( + Q(url__isnull=False) & Q(created_at__lte=timezone.now() - timedelta(days=8)) + ).values_list("key", "id") + + s3 = boto3.client( + "s3", + region_name="ap-south-1", + aws_access_key_id=settings.AWS_ACCESS_KEY_ID, + aws_secret_access_key=settings.AWS_SECRET_ACCESS_KEY, + config=Config(signature_version="s3v4"), + ) + + for file_name, exporter_id in expired_exporter_history: + # Delete object from S3 + if file_name: + s3.delete_object(Bucket=settings.AWS_S3_BUCKET_NAME, Key=file_name) + + ExporterHistory.objects.filter(id=exporter_id).update(url=None) diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index 8f34daf52..1cc6c85cc 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -48,7 +48,7 @@ def track_name( field="name", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the name to {requested_data.get('name')}", + comment=f"updated the name to {requested_data.get('name')}", ) ) @@ -75,7 +75,7 @@ def track_parent( field="parent", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the parent issue to None", + comment=f"updated the parent issue to None", old_identifier=old_parent.id, new_identifier=None, ) @@ -95,7 +95,7 @@ def track_parent( field="parent", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the parent issue to {new_parent.name}", + comment=f"updated the parent issue to {new_parent.name}", old_identifier=old_parent.id if old_parent is not None else None, new_identifier=new_parent.id, ) @@ -123,7 +123,7 @@ def track_priority( field="priority", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the priority to None", + comment=f"updated the priority to None", ) ) else: @@ -137,7 +137,7 @@ def track_priority( field="priority", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the priority to {requested_data.get('priority')}", + comment=f"updated the priority to {requested_data.get('priority')}", ) ) @@ -165,7 +165,7 @@ def track_state( field="state", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the state to {new_state.name}", + comment=f"updated the state to {new_state.name}", old_identifier=old_state.id, new_identifier=new_state.id, ) @@ -184,19 +184,24 @@ def track_description( if current_instance.get("description_html") != requested_data.get( "description_html" ): - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("description_html"), - new_value=requested_data.get("description_html"), - field="description", - project=project, - workspace=project.workspace, - comment=f"{actor.email} updated the description to {requested_data.get('description_html')}", - ) - ) + last_activity = IssueActivity.objects.filter(issue_id=issue_id).order_by("-created_at").first() + if(last_activity is not None and last_activity.field == "description" and actor.id == last_activity.actor_id): + last_activity.created_at = timezone.now() + last_activity.save(update_fields=["created_at"]) + else: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor=actor, + verb="updated", + old_value=current_instance.get("description_html"), + new_value=requested_data.get("description_html"), + field="description", + project=project, + workspace=project.workspace, + comment=f"updated the description to {requested_data.get('description_html')}", + ) + ) # Track changes in issue target date @@ -220,7 +225,7 @@ def track_target_date( field="target_date", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the target date to None", + comment=f"updated the target date to None", ) ) else: @@ -234,7 +239,7 @@ def track_target_date( field="target_date", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the target date to {requested_data.get('target_date')}", + comment=f"updated the target date to {requested_data.get('target_date')}", ) ) @@ -260,7 +265,7 @@ def track_start_date( field="start_date", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the start date to None", + comment=f"updated the start date to None", ) ) else: @@ -274,7 +279,7 @@ def track_start_date( field="start_date", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the start date to {requested_data.get('start_date')}", + comment=f"updated the start date to {requested_data.get('start_date')}", ) ) @@ -303,7 +308,7 @@ def track_labels( field="labels", project=project, workspace=project.workspace, - comment=f"{actor.email} added label {label.name}", + comment=f"added label {label.name}", new_identifier=label.id, old_identifier=None, ) @@ -324,7 +329,7 @@ def track_labels( field="labels", project=project, workspace=project.workspace, - comment=f"{actor.email} removed label {label.name}", + comment=f"removed label {label.name}", old_identifier=label.id, new_identifier=None, ) @@ -353,12 +358,12 @@ def track_assignees( actor=actor, verb="updated", old_value="", - new_value=assignee.email, + new_value=assignee.display_name, field="assignees", project=project, workspace=project.workspace, - comment=f"{actor.email} added assignee {assignee.email}", - new_identifier=actor.id, + comment=f"added assignee {assignee.display_name}", + new_identifier=assignee.id, ) ) @@ -374,13 +379,13 @@ def track_assignees( issue_id=issue_id, actor=actor, verb="updated", - old_value=assignee.email, + old_value=assignee.display_name, new_value="", field="assignees", project=project, workspace=project.workspace, - comment=f"{actor.email} removed assignee {assignee.email}", - old_identifier=actor.id, + comment=f"removed assignee {assignee.display_name}", + old_identifier=assignee.id, ) ) @@ -419,7 +424,7 @@ def track_blocks( field="blocks", project=project, workspace=project.workspace, - comment=f"{actor.email} added blocking issue {issue.project.identifier}-{issue.sequence_id}", + comment=f"added blocking issue {project.identifier}-{issue.sequence_id}", new_identifier=issue.id, ) ) @@ -441,7 +446,7 @@ def track_blocks( field="blocks", project=project, workspace=project.workspace, - comment=f"{actor.email} removed blocking issue {issue.project.identifier}-{issue.sequence_id}", + comment=f"removed blocking issue {project.identifier}-{issue.sequence_id}", old_identifier=issue.id, ) ) @@ -481,7 +486,7 @@ def track_blockings( field="blocking", project=project, workspace=project.workspace, - comment=f"{actor.email} added blocked by issue {issue.project.identifier}-{issue.sequence_id}", + comment=f"added blocked by issue {project.identifier}-{issue.sequence_id}", new_identifier=issue.id, ) ) @@ -503,7 +508,7 @@ def track_blockings( field="blocking", project=project, workspace=project.workspace, - comment=f"{actor.email} removed blocked by issue {issue.project.identifier}-{issue.sequence_id}", + comment=f"removed blocked by issue {project.identifier}-{issue.sequence_id}", old_identifier=issue.id, ) ) @@ -517,7 +522,7 @@ def create_issue_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} created the issue", + comment=f"created the issue", verb="created", actor=actor, ) @@ -539,7 +544,7 @@ def track_estimate_points( field="estimate_point", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the estimate point to None", + comment=f"updated the estimate point to None", ) ) else: @@ -553,7 +558,7 @@ def track_estimate_points( field="estimate_point", project=project, workspace=project.workspace, - comment=f"{actor.email} updated the estimate point to {requested_data.get('estimate_point')}", + comment=f"updated the estimate point to {requested_data.get('estimate_point')}", ) ) @@ -567,7 +572,7 @@ def track_archive_at( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} has restored the issue", + comment=f"has restored the issue", verb="updated", actor=actor, field="archived_at", @@ -661,7 +666,7 @@ def delete_issue_activity( IssueActivity( project=project, workspace=project.workspace, - comment=f"{actor.email} deleted the issue", + comment=f"deleted the issue", verb="deleted", actor=actor, field="issue", @@ -682,7 +687,7 @@ def create_comment_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} created a comment", + comment=f"created a comment", verb="created", actor=actor, field="comment", @@ -707,7 +712,7 @@ def update_comment_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} updated a comment", + comment=f"updated a comment", verb="updated", actor=actor, field="comment", @@ -728,7 +733,7 @@ def delete_comment_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} deleted the comment", + comment=f"deleted the comment", verb="deleted", actor=actor, field="comment", @@ -766,7 +771,7 @@ def create_cycle_issue_activity( field="cycles", project=project, workspace=project.workspace, - comment=f"{actor.email} updated cycle from {old_cycle.name} to {new_cycle.name}", + comment=f"updated cycle from {old_cycle.name} to {new_cycle.name}", old_identifier=old_cycle.id, new_identifier=new_cycle.id, ) @@ -787,7 +792,7 @@ def create_cycle_issue_activity( field="cycles", project=project, workspace=project.workspace, - comment=f"{actor.email} added cycle {cycle.name}", + comment=f"added cycle {cycle.name}", new_identifier=cycle.id, ) ) @@ -816,7 +821,7 @@ def delete_cycle_issue_activity( field="cycles", project=project, workspace=project.workspace, - comment=f"{actor.email} removed this issue from {cycle.name if cycle is not None else None}", + comment=f"removed this issue from {cycle.name if cycle is not None else None}", old_identifier=cycle.id if cycle is not None else None, ) ) @@ -852,7 +857,7 @@ def create_module_issue_activity( field="modules", project=project, workspace=project.workspace, - comment=f"{actor.email} updated module from {old_module.name} to {new_module.name}", + comment=f"updated module from {old_module.name} to {new_module.name}", old_identifier=old_module.id, new_identifier=new_module.id, ) @@ -872,7 +877,7 @@ def create_module_issue_activity( field="modules", project=project, workspace=project.workspace, - comment=f"{actor.email} added module {module.name}", + comment=f"added module {module.name}", new_identifier=module.id, ) ) @@ -901,7 +906,7 @@ def delete_module_issue_activity( field="modules", project=project, workspace=project.workspace, - comment=f"{actor.email} removed this issue from {module.name if module is not None else None}", + comment=f"removed this issue from {module.name if module is not None else None}", old_identifier=module.id if module is not None else None, ) ) @@ -920,7 +925,7 @@ def create_link_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} created a link", + comment=f"created a link", verb="created", actor=actor, field="link", @@ -944,7 +949,7 @@ def update_link_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} updated a link", + comment=f"updated a link", verb="updated", actor=actor, field="link", @@ -969,7 +974,7 @@ def delete_link_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} deleted the link", + comment=f"deleted the link", verb="deleted", actor=actor, field="link", @@ -992,7 +997,7 @@ def create_attachment_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} created an attachment", + comment=f"created an attachment", verb="created", actor=actor, field="attachment", @@ -1010,7 +1015,7 @@ def delete_attachment_activity( issue_id=issue_id, project=project, workspace=project.workspace, - comment=f"{actor.email} deleted the attachment", + comment=f"deleted the attachment", verb="deleted", actor=actor, field="attachment", diff --git a/apiserver/plane/celery.py b/apiserver/plane/celery.py index ed0dc419e..15fe8af52 100644 --- a/apiserver/plane/celery.py +++ b/apiserver/plane/celery.py @@ -20,6 +20,10 @@ app.conf.beat_schedule = { "task": "plane.bgtasks.issue_automation_task.archive_and_close_old_issues", "schedule": crontab(hour=0, minute=0), }, + "check-every-day-to-delete_exporter_history": { + "task": "plane.bgtasks.exporter_expired_task.delete_old_s3_link", + "schedule": crontab(hour=0, minute=0), + }, } # Load task modules from all registered Django app configs. diff --git a/apiserver/plane/db/migrations/0041_cycle_sort_order_issuecomment_access_and_more.py b/apiserver/plane/db/migrations/0041_cycle_sort_order_issuecomment_access_and_more.py new file mode 100644 index 000000000..07c302c76 --- /dev/null +++ b/apiserver/plane/db/migrations/0041_cycle_sort_order_issuecomment_access_and_more.py @@ -0,0 +1,243 @@ +# Generated by Django 4.2.3 on 2023-08-14 07:12 + +from django.conf import settings +import django.contrib.postgres.fields +from django.db import migrations, models +import django.db.models.deletion +import plane.db.models.exporter +import plane.db.models.project +import uuid +import random +import string + +def generate_display_name(apps, schema_editor): + UserModel = apps.get_model("db", "User") + updated_users = [] + for obj in UserModel.objects.all(): + obj.display_name = ( + obj.email.split("@")[0] + if len(obj.email.split("@")) + else "".join(random.choice(string.ascii_letters) for _ in range(6)) + ) + updated_users.append(obj) + UserModel.objects.bulk_update(updated_users, ["display_name"], batch_size=100) + + +def rectify_field_issue_activity(apps, schema_editor): + Model = apps.get_model("db", "IssueActivity") + updated_activity = [] + for obj in Model.objects.filter(field="assignee"): + obj.field = "assignees" + updated_activity.append(obj) + + Model.objects.bulk_update(updated_activity, ["field"], batch_size=100) + + +def update_assignee_issue_activity(apps, schema_editor): + Model = apps.get_model("db", "IssueActivity") + updated_activity = [] + + # Get all the users + User = apps.get_model("db", "User") + users = User.objects.values("id", "email", "display_name") + + for obj in Model.objects.filter(field="assignees"): + if bool(obj.new_value) and not bool(obj.old_value): + # Get user from list + assigned_user = [ + user for user in users if user.get("email") == obj.new_value + ] + if assigned_user: + obj.new_value = assigned_user[0].get("display_name") + obj.new_identifier = assigned_user[0].get("id") + # Update the comment + words = obj.comment.split() + words[-1] = assigned_user[0].get("display_name") + obj.comment = " ".join(words) + + if bool(obj.old_value) and not bool(obj.new_value): + # Get user from list + assigned_user = [ + user for user in users if user.get("email") == obj.old_value + ] + if assigned_user: + obj.old_value = assigned_user[0].get("display_name") + obj.old_identifier = assigned_user[0].get("id") + # Update the comment + words = obj.comment.split() + words[-1] = assigned_user[0].get("display_name") + obj.comment = " ".join(words) + + updated_activity.append(obj) + + Model.objects.bulk_update( + updated_activity, + ["old_value", "new_value", "old_identifier", "new_identifier", "comment"], + batch_size=200, + ) + + +def update_name_activity(apps, schema_editor): + Model = apps.get_model("db", "IssueActivity") + update_activity = [] + for obj in Model.objects.filter(field="name"): + obj.comment = obj.comment.replace("start date", "name") + update_activity.append(obj) + + Model.objects.bulk_update(update_activity, ["comment"], batch_size=1000) + + +def random_cycle_order(apps, schema_editor): + CycleModel = apps.get_model("db", "Cycle") + updated_cycles = [] + for obj in CycleModel.objects.all(): + obj.sort_order = random.randint(1, 65536) + updated_cycles.append(obj) + CycleModel.objects.bulk_update(updated_cycles, ["sort_order"], batch_size=100) + + +def random_module_order(apps, schema_editor): + ModuleModel = apps.get_model("db", "Module") + updated_modules = [] + for obj in ModuleModel.objects.all(): + obj.sort_order = random.randint(1, 65536) + updated_modules.append(obj) + ModuleModel.objects.bulk_update(updated_modules, ["sort_order"], batch_size=100) + + +def update_user_issue_properties(apps, schema_editor): + IssuePropertyModel = apps.get_model("db", "IssueProperty") + updated_issue_properties = [] + for obj in IssuePropertyModel.objects.all(): + obj.properties["start_date"] = True + updated_issue_properties.append(obj) + IssuePropertyModel.objects.bulk_update( + updated_issue_properties, ["properties"], batch_size=100 + ) + + +def workspace_member_properties(apps, schema_editor): + WorkspaceMemberModel = apps.get_model("db", "WorkspaceMember") + updated_workspace_members = [] + for obj in WorkspaceMemberModel.objects.all(): + obj.view_props["properties"]["start_date"] = True + obj.default_props["properties"]["start_date"] = True + updated_workspace_members.append(obj) + + WorkspaceMemberModel.objects.bulk_update( + updated_workspace_members, ["view_props", "default_props"], batch_size=100 + ) + +class Migration(migrations.Migration): + + dependencies = [ + ('db', '0040_projectmember_preferences_user_cover_image_and_more'), + ] + + operations = [ + migrations.AddField( + model_name='cycle', + name='sort_order', + field=models.FloatField(default=65535), + ), + migrations.AddField( + model_name='issuecomment', + name='access', + field=models.CharField(choices=[('INTERNAL', 'INTERNAL'), ('EXTERNAL', 'EXTERNAL')], default='INTERNAL', max_length=100), + ), + migrations.AddField( + model_name='module', + name='sort_order', + field=models.FloatField(default=65535), + ), + migrations.AddField( + model_name='user', + name='display_name', + field=models.CharField(default='', max_length=255), + ), + migrations.CreateModel( + name='ExporterHistory', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('project', django.contrib.postgres.fields.ArrayField(base_field=models.UUIDField(default=uuid.uuid4), blank=True, null=True, size=None)), + ('provider', models.CharField(choices=[('json', 'json'), ('csv', 'csv'), ('xlsx', 'xlsx')], max_length=50)), + ('status', models.CharField(choices=[('queued', 'Queued'), ('processing', 'Processing'), ('completed', 'Completed'), ('failed', 'Failed')], default='queued', max_length=50)), + ('reason', models.TextField(blank=True)), + ('key', models.TextField(blank=True)), + ('url', models.URLField(blank=True, max_length=800, null=True)), + ('token', models.CharField(default=plane.db.models.exporter.generate_token, max_length=255, unique=True)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('initiated_by', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_exporters', to=settings.AUTH_USER_MODEL)), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_exporters', to='db.workspace')), + ], + options={ + 'verbose_name': 'Exporter', + 'verbose_name_plural': 'Exporters', + 'db_table': 'exporters', + 'ordering': ('-created_at',), + }, + ), + migrations.CreateModel( + name='ProjectDeployBoard', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('anchor', models.CharField(db_index=True, default=plane.db.models.project.get_anchor, max_length=255, unique=True)), + ('comments', models.BooleanField(default=False)), + ('reactions', models.BooleanField(default=False)), + ('votes', models.BooleanField(default=False)), + ('views', models.JSONField(default=plane.db.models.project.get_default_views)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('inbox', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='bord_inbox', to='db.inbox')), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')), + ], + options={ + 'verbose_name': 'Project Deploy Board', + 'verbose_name_plural': 'Project Deploy Boards', + 'db_table': 'project_deploy_boards', + 'ordering': ('-created_at',), + 'unique_together': {('project', 'anchor')}, + }, + ), + migrations.CreateModel( + name='IssueVote', + fields=[ + ('created_at', models.DateTimeField(auto_now_add=True, verbose_name='Created At')), + ('updated_at', models.DateTimeField(auto_now=True, verbose_name='Last Modified At')), + ('id', models.UUIDField(db_index=True, default=uuid.uuid4, editable=False, primary_key=True, serialize=False, unique=True)), + ('vote', models.IntegerField(choices=[(-1, 'DOWNVOTE'), (1, 'UPVOTE')])), + ('actor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='votes', to=settings.AUTH_USER_MODEL)), + ('created_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_created_by', to=settings.AUTH_USER_MODEL, verbose_name='Created By')), + ('issue', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='votes', to='db.issue')), + ('project', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='project_%(class)s', to='db.project')), + ('updated_by', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, related_name='%(class)s_updated_by', to=settings.AUTH_USER_MODEL, verbose_name='Last Modified By')), + ('workspace', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='workspace_%(class)s', to='db.workspace')), + ], + options={ + 'verbose_name': 'Issue Vote', + 'verbose_name_plural': 'Issue Votes', + 'db_table': 'issue_votes', + 'ordering': ('-created_at',), + 'unique_together': {('issue', 'actor')}, + }, + ), + migrations.AlterField( + model_name='modulelink', + name='title', + field=models.CharField(blank=True, max_length=255, null=True), + ), + migrations.RunPython(generate_display_name), + migrations.RunPython(rectify_field_issue_activity), + migrations.RunPython(update_assignee_issue_activity), + migrations.RunPython(update_name_activity), + migrations.RunPython(random_cycle_order), + migrations.RunPython(random_module_order), + migrations.RunPython(update_user_issue_properties), + migrations.RunPython(workspace_member_properties), + ] diff --git a/apiserver/plane/db/models/__init__.py b/apiserver/plane/db/models/__init__.py index 959dea5f7..659eea3eb 100644 --- a/apiserver/plane/db/models/__init__.py +++ b/apiserver/plane/db/models/__init__.py @@ -18,6 +18,7 @@ from .project import ( ProjectMemberInvite, ProjectIdentifier, ProjectFavorite, + ProjectDeployBoard, ) from .issue import ( @@ -36,6 +37,7 @@ from .issue import ( IssueSubscriber, IssueReaction, CommentReaction, + IssueVote, ) from .asset import FileAsset @@ -72,4 +74,6 @@ from .inbox import Inbox, InboxIssue from .analytic import AnalyticView -from .notification import Notification \ No newline at end of file +from .notification import Notification + +from .exporter import ExporterHistory \ No newline at end of file diff --git a/apiserver/plane/db/models/cycle.py b/apiserver/plane/db/models/cycle.py index c8c43cef4..56301e3d3 100644 --- a/apiserver/plane/db/models/cycle.py +++ b/apiserver/plane/db/models/cycle.py @@ -17,6 +17,7 @@ class Cycle(ProjectBaseModel): related_name="owned_by_cycle", ) view_props = models.JSONField(default=dict) + sort_order = models.FloatField(default=65535) class Meta: verbose_name = "Cycle" @@ -24,6 +25,17 @@ class Cycle(ProjectBaseModel): db_table = "cycles" ordering = ("-created_at",) + def save(self, *args, **kwargs): + if self._state.adding: + smallest_sort_order = Cycle.objects.filter( + project=self.project + ).aggregate(smallest=models.Min("sort_order"))["smallest"] + + if smallest_sort_order is not None: + self.sort_order = smallest_sort_order - 10000 + + super(Cycle, self).save(*args, **kwargs) + def __str__(self): """Return name of the cycle""" return f"{self.name} <{self.project.name}>" diff --git a/apiserver/plane/db/models/exporter.py b/apiserver/plane/db/models/exporter.py new file mode 100644 index 000000000..fce31c8e7 --- /dev/null +++ b/apiserver/plane/db/models/exporter.py @@ -0,0 +1,56 @@ +import uuid + +# Python imports +from uuid import uuid4 + +# Django imports +from django.db import models +from django.conf import settings +from django.contrib.postgres.fields import ArrayField + +# Module imports +from . import BaseModel + +def generate_token(): + return uuid4().hex + +class ExporterHistory(BaseModel): + workspace = models.ForeignKey( + "db.WorkSpace", on_delete=models.CASCADE, related_name="workspace_exporters" + ) + project = ArrayField(models.UUIDField(default=uuid.uuid4), blank=True, null=True) + provider = models.CharField( + max_length=50, + choices=( + ("json", "json"), + ("csv", "csv"), + ("xlsx", "xlsx"), + ), + ) + status = models.CharField( + max_length=50, + choices=( + ("queued", "Queued"), + ("processing", "Processing"), + ("completed", "Completed"), + ("failed", "Failed"), + ), + default="queued", + ) + reason = models.TextField(blank=True) + key = models.TextField(blank=True) + url = models.URLField(max_length=800, blank=True, null=True) + token = models.CharField(max_length=255, default=generate_token, unique=True) + initiated_by = models.ForeignKey( + settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="workspace_exporters" + ) + + class Meta: + verbose_name = "Exporter" + verbose_name_plural = "Exporters" + db_table = "exporters" + ordering = ("-created_at",) + + def __str__(self): + """Return name of the service""" + return f"{self.provider} <{self.workspace.name}>" \ No newline at end of file diff --git a/apiserver/plane/db/models/issue.py b/apiserver/plane/db/models/issue.py index 2a4462942..7af9e6e14 100644 --- a/apiserver/plane/db/models/issue.py +++ b/apiserver/plane/db/models/issue.py @@ -108,11 +108,7 @@ class Issue(ProjectBaseModel): ~models.Q(name="Triage"), project=self.project ).first() self.state = random_state - if random_state.group == "started": - self.start_date = timezone.now().date() else: - if default_state.group == "started": - self.start_date = timezone.now().date() self.state = default_state except ImportError: pass @@ -127,8 +123,6 @@ class Issue(ProjectBaseModel): PageBlock.objects.filter(issue_id=self.id).filter().update( completed_at=timezone.now() ) - elif self.state.group == "started": - self.start_date = timezone.now().date() else: PageBlock.objects.filter(issue_id=self.id).filter().update( completed_at=None @@ -153,9 +147,6 @@ class Issue(ProjectBaseModel): if largest_sort_order is not None: self.sort_order = largest_sort_order + 10000 - # If adding it to started state - if self.state.group == "started": - self.start_date = timezone.now().date() # Strip the html tags using html parser self.description_stripped = ( None @@ -310,6 +301,14 @@ class IssueComment(ProjectBaseModel): related_name="comments", null=True, ) + access = models.CharField( + choices=( + ("INTERNAL", "INTERNAL"), + ("EXTERNAL", "EXTERNAL"), + ), + default="INTERNAL", + max_length=100, + ) def save(self, *args, **kwargs): self.comment_stripped = ( @@ -425,13 +424,14 @@ class IssueSubscriber(ProjectBaseModel): class IssueReaction(ProjectBaseModel): - actor = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="issue_reactions", ) - issue = models.ForeignKey(Issue, on_delete=models.CASCADE, related_name="issue_reactions") + issue = models.ForeignKey( + Issue, on_delete=models.CASCADE, related_name="issue_reactions" + ) reaction = models.CharField(max_length=20) class Meta: @@ -446,13 +446,14 @@ class IssueReaction(ProjectBaseModel): class CommentReaction(ProjectBaseModel): - actor = models.ForeignKey( settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="comment_reactions", ) - comment = models.ForeignKey(IssueComment, on_delete=models.CASCADE, related_name="comment_reactions") + comment = models.ForeignKey( + IssueComment, on_delete=models.CASCADE, related_name="comment_reactions" + ) reaction = models.CharField(max_length=20) class Meta: @@ -466,6 +467,27 @@ class CommentReaction(ProjectBaseModel): return f"{self.issue.name} {self.actor.email}" +class IssueVote(ProjectBaseModel): + issue = models.ForeignKey(Issue, on_delete=models.CASCADE, related_name="votes") + actor = models.ForeignKey( + settings.AUTH_USER_MODEL, on_delete=models.CASCADE, related_name="votes" + ) + vote = models.IntegerField( + choices=( + (-1, "DOWNVOTE"), + (1, "UPVOTE"), + ) + ) + class Meta: + unique_together = ["issue", "actor"] + verbose_name = "Issue Vote" + verbose_name_plural = "Issue Votes" + db_table = "issue_votes" + ordering = ("-created_at",) + + def __str__(self): + return f"{self.issue.name} {self.actor.email}" + # TODO: Find a better method to save the model @receiver(post_save, sender=Issue) diff --git a/apiserver/plane/db/models/module.py b/apiserver/plane/db/models/module.py index 8ad0ec838..e286d297a 100644 --- a/apiserver/plane/db/models/module.py +++ b/apiserver/plane/db/models/module.py @@ -40,6 +40,7 @@ class Module(ProjectBaseModel): through_fields=("module", "member"), ) view_props = models.JSONField(default=dict) + sort_order = models.FloatField(default=65535) class Meta: unique_together = ["name", "project"] @@ -48,6 +49,17 @@ class Module(ProjectBaseModel): db_table = "modules" ordering = ("-created_at",) + def save(self, *args, **kwargs): + if self._state.adding: + smallest_sort_order = Module.objects.filter( + project=self.project + ).aggregate(smallest=models.Min("sort_order"))["smallest"] + + if smallest_sort_order is not None: + self.sort_order = smallest_sort_order - 10000 + + super(Module, self).save(*args, **kwargs) + def __str__(self): return f"{self.name} {self.start_date} {self.target_date}" @@ -86,7 +98,7 @@ class ModuleIssue(ProjectBaseModel): class ModuleLink(ProjectBaseModel): - title = models.CharField(max_length=255, null=True) + title = models.CharField(max_length=255, blank=True, null=True) url = models.URLField() module = models.ForeignKey( Module, on_delete=models.CASCADE, related_name="link_module" diff --git a/apiserver/plane/db/models/project.py b/apiserver/plane/db/models/project.py index 2cbd70369..0c2b5cb96 100644 --- a/apiserver/plane/db/models/project.py +++ b/apiserver/plane/db/models/project.py @@ -1,3 +1,6 @@ +# Python imports +from uuid import uuid4 + # Django imports from django.db import models from django.conf import settings @@ -31,12 +34,9 @@ def get_default_props(): "showEmptyGroups": True, } + def get_default_preferences(): - return { - "pages": { - "block_display": True - } - } + return {"pages": {"block_display": True}} class Project(BaseModel): @@ -157,7 +157,6 @@ class ProjectMember(ProjectBaseModel): preferences = models.JSONField(default=get_default_preferences) sort_order = models.FloatField(default=65535) - def save(self, *args, **kwargs): if self._state.adding: smallest_sort_order = ProjectMember.objects.filter( @@ -217,3 +216,41 @@ class ProjectFavorite(ProjectBaseModel): def __str__(self): """Return user of the project""" return f"{self.user.email} <{self.project.name}>" + + +def get_anchor(): + return uuid4().hex + + +def get_default_views(): + return { + "list": True, + "kanban": True, + "calendar": True, + "gantt": True, + "spreadsheet": True, + } + + +class ProjectDeployBoard(ProjectBaseModel): + anchor = models.CharField( + max_length=255, default=get_anchor, unique=True, db_index=True + ) + comments = models.BooleanField(default=False) + reactions = models.BooleanField(default=False) + inbox = models.ForeignKey( + "db.Inbox", related_name="bord_inbox", on_delete=models.SET_NULL, null=True + ) + votes = models.BooleanField(default=False) + views = models.JSONField(default=get_default_views) + + class Meta: + unique_together = ["project", "anchor"] + verbose_name = "Project Deploy Board" + verbose_name_plural = "Project Deploy Boards" + db_table = "project_deploy_boards" + ordering = ("-created_at",) + + def __str__(self): + """Return project and anchor""" + return f"{self.anchor} <{self.project.name}>" diff --git a/apiserver/plane/db/models/user.py b/apiserver/plane/db/models/user.py index 0b643271e..3975a3b93 100644 --- a/apiserver/plane/db/models/user.py +++ b/apiserver/plane/db/models/user.py @@ -1,6 +1,7 @@ # Python imports -from enum import unique import uuid +import string +import random # Django imports from django.db import models @@ -18,6 +19,7 @@ from sentry_sdk import capture_exception from slack_sdk import WebClient from slack_sdk.errors import SlackApiError + def get_default_onboarding(): return { "profile_complete": False, @@ -26,6 +28,7 @@ def get_default_onboarding(): "workspace_join": False, } + class User(AbstractBaseUser, PermissionsMixin): id = models.UUIDField( default=uuid.uuid4, unique=True, editable=False, db_index=True, primary_key=True @@ -81,6 +84,7 @@ class User(AbstractBaseUser, PermissionsMixin): role = models.CharField(max_length=300, null=True, blank=True) is_bot = models.BooleanField(default=False) theme = models.JSONField(default=dict) + display_name = models.CharField(max_length=255, default="") is_tour_completed = models.BooleanField(default=False) onboarding_step = models.JSONField(default=get_default_onboarding) @@ -107,6 +111,13 @@ class User(AbstractBaseUser, PermissionsMixin): self.token = uuid.uuid4().hex + uuid.uuid4().hex self.token_updated_at = timezone.now() + if not self.display_name: + self.display_name = ( + self.email.split("@")[0] + if len(self.email.split("@")) + else "".join(random.choice(string.ascii_letters) for _ in range(6)) + ) + if self.is_superuser: self.is_staff = True diff --git a/apiserver/plane/db/models/workspace.py b/apiserver/plane/db/models/workspace.py index 09db42002..48d8c9f2d 100644 --- a/apiserver/plane/db/models/workspace.py +++ b/apiserver/plane/db/models/workspace.py @@ -33,6 +33,7 @@ def get_default_props(): "estimate": True, "created_on": True, "updated_on": True, + "start_date": True, }, "showEmptyGroups": True, } diff --git a/apiserver/plane/settings/common.py b/apiserver/plane/settings/common.py index e3a918c18..59e0bd31b 100644 --- a/apiserver/plane/settings/common.py +++ b/apiserver/plane/settings/common.py @@ -214,4 +214,4 @@ SIMPLE_JWT = { CELERY_TIMEZONE = TIME_ZONE CELERY_TASK_SERIALIZER = 'json' CELERY_ACCEPT_CONTENT = ['application/json'] -CELERY_IMPORTS = ("plane.bgtasks.issue_automation_task",) +CELERY_IMPORTS = ("plane.bgtasks.issue_automation_task","plane.bgtasks.exporter_expired_task") diff --git a/apiserver/plane/utils/issue_filters.py b/apiserver/plane/utils/issue_filters.py index a7a946e60..34e1e8203 100644 --- a/apiserver/plane/utils/issue_filters.py +++ b/apiserver/plane/utils/issue_filters.py @@ -124,10 +124,11 @@ def filter_created_at(params, filter, method): else: if params.get("created_at", None) and len(params.get("created_at")): for query in params.get("created_at"): - if query.get("timeline", "after") == "after": - filter["created_at__date__gte"] = query.get("datetime") + created_at_query = query.split(";") + if len(created_at_query) == 2 and "after" in created_at_query: + filter["created_at__date__gte"] = created_at_query[0] else: - filter["created_at__date__lte"] = query.get("datetime") + filter["created_at__date__lte"] = created_at_query[0] return filter @@ -144,10 +145,11 @@ def filter_updated_at(params, filter, method): else: if params.get("updated_at", None) and len(params.get("updated_at")): for query in params.get("updated_at"): - if query.get("timeline", "after") == "after": - filter["updated_at__date__gte"] = query.get("datetime") + updated_at_query = query.split(";") + if len(updated_at_query) == 2 and "after" in updated_at_query: + filter["updated_at__date__gte"] = updated_at_query[0] else: - filter["updated_at__date__lte"] = query.get("datetime") + filter["updated_at__date__lte"] = updated_at_query[0] return filter @@ -164,10 +166,11 @@ def filter_start_date(params, filter, method): else: if params.get("start_date", None) and len(params.get("start_date")): for query in params.get("start_date"): - if query.get("timeline", "after") == "after": - filter["start_date__gte"] = query.get("datetime") + start_date_query = query.split(";") + if len(start_date_query) == 2 and "after" in start_date_query: + filter["start_date__gte"] = start_date_query[0] else: - filter["start_date__lte"] = query.get("datetime") + filter["start_date__lte"] = start_date_query[0] return filter @@ -184,10 +187,11 @@ def filter_target_date(params, filter, method): else: if params.get("target_date", None) and len(params.get("target_date")): for query in params.get("target_date"): - if query.get("timeline", "after") == "after": - filter["target_date__gt"] = query.get("datetime") + target_date_query = query.split(";") + if len(target_date_query) == 2 and "after" in target_date_query: + filter["target_date__gt"] = target_date_query[0] else: - filter["target_date__lt"] = query.get("datetime") + filter["target_date__lt"] = target_date_query[0] return filter @@ -205,10 +209,11 @@ def filter_completed_at(params, filter, method): else: if params.get("completed_at", None) and len(params.get("completed_at")): for query in params.get("completed_at"): - if query.get("timeline", "after") == "after": - filter["completed_at__date__gte"] = query.get("datetime") + completed_at_query = query.split(";") + if len(completed_at_query) == 2 and "after" in completed_at_query: + filter["completed_at__date__gte"] = completed_at_query[0] else: - filter["completed_at__lte"] = query.get("datetime") + filter["completed_at__lte"] = completed_at_query[0] return filter @@ -292,9 +297,16 @@ def filter_subscribed_issues(params, filter, method): return filter +def filter_start_target_date_issues(params, filter, method): + start_target_date = params.get("start_target_date", "false") + if start_target_date == "true": + filter["target_date__isnull"] = False + filter["start_date__isnull"] = False + return filter + + def issue_filters(query_params, method): filter = dict() - print(query_params) ISSUE_FILTER = { "state": filter_state, @@ -318,6 +330,7 @@ def issue_filters(query_params, method): "inbox_status": filter_inbox_status, "sub_issue": filter_sub_issue_toggle, "subscriber": filter_subscribed_issues, + "start_target_date": filter_start_target_date_issues, } for key, value in ISSUE_FILTER.items(): diff --git a/apiserver/requirements/base.txt b/apiserver/requirements/base.txt index 76c3dace9..ca9d881ef 100644 --- a/apiserver/requirements/base.txt +++ b/apiserver/requirements/base.txt @@ -32,4 +32,5 @@ celery==5.3.1 django_celery_beat==2.5.0 psycopg-binary==3.1.9 psycopg-c==3.1.9 -scout-apm==2.26.1 \ No newline at end of file +scout-apm==2.26.1 +openpyxl==3.1.2 \ No newline at end of file diff --git a/apiserver/templates/emails/exports/issues.html b/apiserver/templates/emails/exports/issues.html new file mode 100644 index 000000000..a97432b9b --- /dev/null +++ b/apiserver/templates/emails/exports/issues.html @@ -0,0 +1,9 @@ + + + Dear {{username}},
+ Your requested Issue's data has been successfully exported from Plane. The export includes all relevant information about issues you requested from your selected projects.
+ Please find the attachment and download the CSV file. If you have any questions or need further assistance, please don't hesitate to contact our support team at engineering@plane.so. We're here to help!
+ Thank you for using Plane. We hope this export will aid you in effectively managing your projects.
+ Regards, + Team Plane + diff --git a/apps/app/Dockerfile.web b/apps/app/Dockerfile.web index e0b5f29c1..2b28e1fd1 100644 --- a/apps/app/Dockerfile.web +++ b/apps/app/Dockerfile.web @@ -33,8 +33,8 @@ RUN yarn turbo run build --filter=app ENV NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL \ BUILT_NEXT_PUBLIC_API_BASE_URL=$NEXT_PUBLIC_API_BASE_URL - -RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} +ENV NEXT_PUBLIC_DEPLOY_URL=${NEXT_PUBLIC_DEPLOY_URL} +RUN /usr/local/bin/replace-env-vars.sh http://NEXT_PUBLIC_WEBAPP_URL_PLACEHOLDER ${NEXT_PUBLIC_API_BASE_URL} app FROM node:18-alpine AS runner WORKDIR /app diff --git a/apps/app/components/analytics/custom-analytics/graph/custom-tooltip.tsx b/apps/app/components/analytics/custom-analytics/graph/custom-tooltip.tsx index 573cda25c..e58d81666 100644 --- a/apps/app/components/analytics/custom-analytics/graph/custom-tooltip.tsx +++ b/apps/app/components/analytics/custom-analytics/graph/custom-tooltip.tsx @@ -14,17 +14,17 @@ type Props = { export const CustomTooltip: React.FC = ({ datum, analytics, params }) => { let tooltipValue: string | number = ""; + const renderAssigneeName = (assigneeId: string): string => { + const assignee = analytics.extras.assignee_details.find((a) => a.assignees__id === assigneeId); + + if (!assignee) return "No assignee"; + + return assignee.assignees__display_name || "No assignee"; + }; + if (params.segment) { if (DATE_KEYS.includes(params.segment)) tooltipValue = renderMonthAndYear(datum.id); - else if (params.segment === "assignees__email") { - const assignee = analytics.extras.assignee_details.find( - (a) => a.assignees__email === datum.id - ); - - if (assignee) - tooltipValue = assignee.assignees__first_name + " " + assignee.assignees__last_name; - else tooltipValue = "No assignees"; - } else tooltipValue = datum.id; + else tooltipValue = datum.id; } else { if (DATE_KEYS.includes(params.x_axis)) tooltipValue = datum.indexValue; else tooltipValue = datum.id === "count" ? "Issue count" : "Estimate"; @@ -49,7 +49,10 @@ export const CustomTooltip: React.FC = ({ datum, analytics, params }) => : "" }`} > - {tooltipValue}: + {params.segment === "assignees__id" + ? renderAssigneeName(tooltipValue.toString()) + : tooltipValue} + : {datum.value} diff --git a/apps/app/components/analytics/custom-analytics/graph/index.tsx b/apps/app/components/analytics/custom-analytics/graph/index.tsx index 3f70dddc5..349f9884d 100644 --- a/apps/app/components/analytics/custom-analytics/graph/index.tsx +++ b/apps/app/components/analytics/custom-analytics/graph/index.tsx @@ -29,6 +29,14 @@ export const AnalyticsGraph: React.FC = ({ yAxisKey, fullScreen, }) => { + const renderAssigneeName = (assigneeId: string): string => { + const assignee = analytics.extras.assignee_details.find((a) => a.assignees__id === assigneeId); + + if (!assignee) return "?"; + + return assignee.assignees__display_name || "?"; + }; + const generateYAxisTickValues = () => { if (!analytics) return []; @@ -70,17 +78,17 @@ export const AnalyticsGraph: React.FC = ({ height={fullScreen ? "400px" : "300px"} margin={{ right: 20, - bottom: params.x_axis === "assignees__email" ? 50 : longestXAxisLabel.length * 5 + 20, + bottom: params.x_axis === "assignees__id" ? 50 : longestXAxisLabel.length * 5 + 20, }} axisBottom={{ tickSize: 0, tickPadding: 10, tickRotation: barGraphData.data.length > 7 ? -45 : 0, renderTick: - params.x_axis === "assignees__email" + params.x_axis === "assignees__id" ? (datum) => { const avatar = analytics.extras.assignee_details?.find( - (a) => a?.assignees__email === datum?.value + (a) => a?.assignees__display_name === datum?.value )?.assignees__avatar; if (avatar && avatar !== "") @@ -101,7 +109,11 @@ export const AnalyticsGraph: React.FC = ({ - {datum.value && datum.value !== "None" + {params.x_axis === "assignees__id" + ? datum.value && datum.value !== "None" + ? renderAssigneeName(datum.value)[0].toUpperCase() + : "?" + : datum.value && datum.value !== "None" ? `${datum.value}`.toUpperCase()[0] : "?"} diff --git a/apps/app/components/analytics/custom-analytics/sidebar.tsx b/apps/app/components/analytics/custom-analytics/sidebar.tsx index d1a29da41..6189f325b 100644 --- a/apps/app/components/analytics/custom-analytics/sidebar.tsx +++ b/apps/app/components/analytics/custom-analytics/sidebar.tsx @@ -277,9 +277,7 @@ export const AnalyticsSidebar: React.FC = ({
Lead
- - {cycleDetails.owned_by?.first_name} {cycleDetails.owned_by?.last_name} - + {cycleDetails.owned_by?.display_name}
Start Date
@@ -305,10 +303,7 @@ export const AnalyticsSidebar: React.FC = ({
Lead
- - {moduleDetails.lead_detail?.first_name}{" "} - {moduleDetails.lead_detail?.last_name} - + {moduleDetails.lead_detail?.display_name}
Start Date
diff --git a/apps/app/components/analytics/custom-analytics/table.tsx b/apps/app/components/analytics/custom-analytics/table.tsx index 92eac5085..75d1d7d40 100644 --- a/apps/app/components/analytics/custom-analytics/table.tsx +++ b/apps/app/components/analytics/custom-analytics/table.tsx @@ -22,15 +22,12 @@ type Props = { }; export const AnalyticsTable: React.FC = ({ analytics, barGraphData, params, yAxisKey }) => { - const renderAssigneeName = (email: string): string => { - const assignee = analytics.extras.assignee_details.find((a) => a.assignees__email === email); + const renderAssigneeName = (assigneeId: string): string => { + const assignee = analytics.extras.assignee_details.find((a) => a.assignees__id === assigneeId); if (!assignee) return "No assignee"; - if (assignee.assignees__first_name !== "") - return assignee.assignees__first_name + " " + assignee.assignees__last_name; - - return email; + return assignee.assignees__display_name || "No assignee"; }; return ( @@ -65,10 +62,10 @@ export const AnalyticsTable: React.FC = ({ analytics, barGraphData, param }} /> )} - {DATE_KEYS.includes(params.segment ?? "") - ? renderMonthAndYear(key) - : params.segment === "assignees__email" + {params.segment === "assignees__id" ? renderAssigneeName(key) + : DATE_KEYS.includes(params.segment ?? "") + ? renderMonthAndYear(key) : key}
@@ -108,7 +105,7 @@ export const AnalyticsTable: React.FC = ({ analytics, barGraphData, param }} /> )} - {params.x_axis === "assignees__email" + {params.x_axis === "assignees__id" ? renderAssigneeName(`${item.name}`) : addSpaceIfCamelCase(`${item.name}`)} diff --git a/apps/app/components/analytics/scope-and-demand/leaderboard.tsx b/apps/app/components/analytics/scope-and-demand/leaderboard.tsx index 081fc6302..e52657c03 100644 --- a/apps/app/components/analytics/scope-and-demand/leaderboard.tsx +++ b/apps/app/components/analytics/scope-and-demand/leaderboard.tsx @@ -1,22 +1,38 @@ +// ui +import { ProfileEmptyState } from "components/ui"; +// image +import emptyUsers from "public/empty-state/empty_users.svg"; + type Props = { users: { avatar: string | null; - email: string | null; + display_name: string | null; firstName: string; lastName: string; count: number; + id: string; }[]; title: string; + emptyStateMessage: string; + workspaceSlug: string; }; -export const AnalyticsLeaderboard: React.FC = ({ users, title }) => ( +export const AnalyticsLeaderboard: React.FC = ({ + users, + title, + emptyStateMessage, + workspaceSlug, +}) => (
{title}
{users.length > 0 ? (
{users.map((user) => ( -
@@ -25,24 +41,26 @@ export const AnalyticsLeaderboard: React.FC = ({ users, title }) => ( {user.email
) : (
- {user.firstName !== "" ? user.firstName[0] : "?"} + {user.display_name !== "" ? user?.display_name?.[0] : "?"}
)} - {user.firstName !== "" ? `${user.firstName} ${user.lastName}` : "No assignee"} + {user.display_name !== "" ? `${user.display_name}` : "No assignee"}
{user.count} -
+ ))}
) : ( -
No matching data found.
+
+ +
)}
); diff --git a/apps/app/components/analytics/scope-and-demand/scope-and-demand.tsx b/apps/app/components/analytics/scope-and-demand/scope-and-demand.tsx index cfc315ac1..dc7e65515 100644 --- a/apps/app/components/analytics/scope-and-demand/scope-and-demand.tsx +++ b/apps/app/components/analytics/scope-and-demand/scope-and-demand.tsx @@ -56,22 +56,28 @@ export const ScopeAndDemand: React.FC = ({ fullScreen = true }) => { ({ avatar: user?.created_by__avatar, - email: user?.created_by__email, firstName: user?.created_by__first_name, lastName: user?.created_by__last_name, + display_name: user?.created_by__display_name, count: user?.count, + id: user?.created_by__id, }))} title="Most issues created" + emptyStateMessage="Co-workers and the number issues created by them appears here." + workspaceSlug={workspaceSlug?.toString() ?? ""} /> ({ avatar: user?.assignees__avatar, - email: user?.assignees__email, firstName: user?.assignees__first_name, lastName: user?.assignees__last_name, + display_name: user?.assignees__display_name, count: user?.count, + id: user?.assignees__id, }))} title="Most issues closed" + emptyStateMessage="Co-workers and the number issues closed by them appears here." + workspaceSlug={workspaceSlug?.toString() ?? ""} />
diff --git a/apps/app/components/analytics/scope-and-demand/scope.tsx b/apps/app/components/analytics/scope-and-demand/scope.tsx index ac605c8f9..b01354b93 100644 --- a/apps/app/components/analytics/scope-and-demand/scope.tsx +++ b/apps/app/components/analytics/scope-and-demand/scope.tsx @@ -1,5 +1,7 @@ // ui -import { BarGraph } from "components/ui"; +import { BarGraph, ProfileEmptyState } from "components/ui"; +// image +import emptyBarGraph from "public/empty-state/empty_bar_graph.svg"; // types import { IDefaultAnalyticsResponse } from "types"; @@ -16,23 +18,20 @@ export const AnalyticsScope: React.FC = ({ defaultAnalytics }) => ( {defaultAnalytics.pending_issue_user.length > 0 ? ( `#f97316`} customYAxisTickValues={defaultAnalytics.pending_issue_user.map((d) => d.count)} tooltip={(datum) => { const assignee = defaultAnalytics.pending_issue_user.find( - (a) => a.assignees__email === `${datum.indexValue}` + (a) => a.assignees__display_name === `${datum.indexValue}` ); return (
- {assignee - ? assignee.assignees__first_name + " " + assignee.assignees__last_name - : "No assignee"} - :{" "} + {assignee ? assignee.assignees__display_name : "No assignee"}:{" "} {datum.value}
@@ -73,8 +72,12 @@ export const AnalyticsScope: React.FC = ({ defaultAnalytics }) => ( }} /> ) : ( -
- No matching data found. +
+
)}
diff --git a/apps/app/components/analytics/scope-and-demand/year-wise-issues.tsx b/apps/app/components/analytics/scope-and-demand/year-wise-issues.tsx index 621706f0e..87127ed60 100644 --- a/apps/app/components/analytics/scope-and-demand/year-wise-issues.tsx +++ b/apps/app/components/analytics/scope-and-demand/year-wise-issues.tsx @@ -1,5 +1,7 @@ // ui -import { LineGraph } from "components/ui"; +import { LineGraph, ProfileEmptyState } from "components/ui"; +// image +import emptyGraph from "public/empty-state/empty_graph.svg"; // types import { IDefaultAnalyticsResponse } from "types"; // constants @@ -48,7 +50,13 @@ export const AnalyticsYearWiseIssues: React.FC = ({ defaultAnalytics }) = enableArea /> ) : ( -
No matching data found.
+
+ +
)}
); diff --git a/apps/app/components/command-palette/change-interface-theme.tsx b/apps/app/components/command-palette/change-interface-theme.tsx index b34212b7f..87d1289ae 100644 --- a/apps/app/components/command-palette/change-interface-theme.tsx +++ b/apps/app/components/command-palette/change-interface-theme.tsx @@ -7,12 +7,20 @@ import { useTheme } from "next-themes"; import { SettingIcon } from "components/icons"; import userService from "services/user.service"; import useUser from "hooks/use-user"; +// helper +import { unsetCustomCssVariables } from "helpers/theme.helper"; +// mobx react lite +import { observer } from "mobx-react-lite"; +// mobx store +import { useMobxStore } from "lib/mobx/store-provider"; type Props = { setIsPaletteOpen: Dispatch>; }; -export const ChangeInterfaceTheme: React.FC = ({ setIsPaletteOpen }) => { +export const ChangeInterfaceTheme: React.FC = observer(({ setIsPaletteOpen }) => { + const store: any = useMobxStore(); + const [mounted, setMounted] = useState(false); const { setTheme } = useTheme(); @@ -21,27 +29,11 @@ export const ChangeInterfaceTheme: React.FC = ({ setIsPaletteOpen }) => { const updateUserTheme = (newTheme: string) => { if (!user) return; - setTheme(newTheme); - - mutateUser((prevData) => { - if (!prevData) return prevData; - - return { - ...prevData, - theme: { - ...prevData.theme, - theme: newTheme, - }, - }; - }, false); - - userService.updateUser({ - theme: { - ...user.theme, - theme: newTheme, - }, - }); + return store.user + .updateCurrentUserSettings({ theme: { ...user.theme, theme: newTheme } }) + .then((response: any) => response) + .catch((error: any) => error); }; // useEffect only runs on the client, so now we can safely show the UI @@ -70,4 +62,4 @@ export const ChangeInterfaceTheme: React.FC = ({ setIsPaletteOpen }) => { ))} ); -}; +}); diff --git a/apps/app/components/command-palette/command-k.tsx b/apps/app/components/command-palette/command-k.tsx index 75d7e5bcc..a1525a348 100644 --- a/apps/app/components/command-palette/command-k.tsx +++ b/apps/app/components/command-palette/command-k.tsx @@ -354,8 +354,8 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal { - router.push(currentSection.path(item)); setIsPaletteOpen(false); + router.push(currentSection.path(item)); }} value={`${key}-${item?.name}`} className="focus:outline-none" @@ -379,6 +379,7 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal { + setIsPaletteOpen(false); setPlaceholder("Change state..."); setSearchTerm(""); setPages([...pages, "change-issue-state"]); @@ -460,6 +461,7 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal { + setIsPaletteOpen(false); const e = new KeyboardEvent("keydown", { key: "c", }); @@ -479,6 +481,7 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal { + setIsPaletteOpen(false); const e = new KeyboardEvent("keydown", { key: "p", }); @@ -500,6 +503,7 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal { + setIsPaletteOpen(false); const e = new KeyboardEvent("keydown", { key: "q", }); @@ -517,6 +521,7 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal { + setIsPaletteOpen(false); const e = new KeyboardEvent("keydown", { key: "m", }); @@ -534,6 +539,7 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal { + setIsPaletteOpen(false); const e = new KeyboardEvent("keydown", { key: "v", }); @@ -551,6 +557,7 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal { + setIsPaletteOpen(false); const e = new KeyboardEvent("keydown", { key: "d", }); @@ -568,11 +575,12 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal {projectDetails && projectDetails.inbox_view && ( + onSelect={() => { + setIsPaletteOpen(false); redirect( `/${workspaceSlug}/projects/${projectId}/inbox/${inboxList?.[0]?.id}` - ) - } + ); + }} className="focus:outline-none" >
@@ -731,12 +739,21 @@ export const CommandK: React.FC = ({ deleteIssue, isPaletteOpen, setIsPal
redirect(`/${workspaceSlug}/settings/import-export`)} + onSelect={() => redirect(`/${workspaceSlug}/settings/imports`)} className="focus:outline-none" >
- Import/Export + Import +
+
+ redirect(`/${workspaceSlug}/settings/exports`)} + className="focus:outline-none" + > +
+ + Export
diff --git a/apps/app/components/command-palette/command-pallette.tsx b/apps/app/components/command-palette/command-pallette.tsx index 0b4c9577b..4dc29afec 100644 --- a/apps/app/components/command-palette/command-pallette.tsx +++ b/apps/app/components/command-palette/command-pallette.tsx @@ -1,11 +1,8 @@ import React, { useCallback, useEffect, useState } from "react"; import { useRouter } from "next/router"; - import useSWR from "swr"; - // hooks -import useTheme from "hooks/use-theme"; import useToast from "hooks/use-toast"; import useUser from "hooks/use-user"; // components @@ -24,8 +21,14 @@ import issuesService from "services/issues.service"; import inboxService from "services/inbox.service"; // fetch keys import { INBOX_LIST, ISSUE_DETAILS } from "constants/fetch-keys"; +// mobx store +import { useMobxStore } from "lib/mobx/store-provider"; +import { observable } from "mobx"; +import { observer } from "mobx-react-lite"; + +export const CommandPalette: React.FC = observer(() => { + const store: any = useMobxStore(); -export const CommandPalette: React.FC = () => { const [isPaletteOpen, setIsPaletteOpen] = useState(false); const [isIssueModalOpen, setIsIssueModalOpen] = useState(false); const [isProjectModalOpen, setIsProjectModalOpen] = useState(false); @@ -43,13 +46,12 @@ export const CommandPalette: React.FC = () => { const { user } = useUser(); const { setToastAlert } = useToast(); - const { toggleCollapsed } = useTheme(); const { data: issueDetails } = useSWR( workspaceSlug && projectId && issueId ? ISSUE_DETAILS(issueId as string) : null, workspaceSlug && projectId && issueId ? () => - issuesService.retrieve(workspaceSlug as string, projectId as string, issueId as string) + issuesService.retrieve(workspaceSlug as string, projectId as string, issueId as string) : null ); @@ -74,53 +76,52 @@ export const CommandPalette: React.FC = () => { const handleKeyDown = useCallback( (e: KeyboardEvent) => { - const singleShortcutKeys = ["p", "v", "d", "h", "q", "m"]; const { key, ctrlKey, metaKey, altKey, shiftKey } = e; if (!key) return; + const keyPressed = key.toLowerCase(); + const cmdClicked = ctrlKey || metaKey; + // if on input, textarea or editor, don't do anything if ( - !(e.target instanceof HTMLTextAreaElement) && - !(e.target instanceof HTMLInputElement) && - !(e.target as Element).classList?.contains("remirror-editor") - ) { - if ((ctrlKey || metaKey) && keyPressed === "k") { - e.preventDefault(); - setIsPaletteOpen(true); - } else if ((ctrlKey || metaKey) && keyPressed === "c") { - if (altKey) { + e.target instanceof HTMLTextAreaElement || + e.target instanceof HTMLInputElement || + (e.target as Element).classList?.contains("ProseMirror") + ) + return; + + if (cmdClicked) { + if (keyPressed === "k") { + e.preventDefault(); + setIsPaletteOpen(true); + } else if (keyPressed === "c" && altKey) { e.preventDefault(); copyIssueUrlToClipboard(); + } else if (keyPressed === "b") { + e.preventDefault(); + store.theme.setSidebarCollapsed(!store?.theme?.sidebarCollapsed); + } + } else { + if (keyPressed === "c") { + setIsIssueModalOpen(true); + } else if (keyPressed === "p") { + setIsProjectModalOpen(true); + } else if (keyPressed === "v") { + setIsCreateViewModalOpen(true); + } else if (keyPressed === "d") { + setIsCreateUpdatePageModalOpen(true); + } else if (keyPressed === "h") { + setIsShortcutsModalOpen(true); + } else if (keyPressed === "q") { + setIsCreateCycleModalOpen(true); + } else if (keyPressed === "m") { + setIsCreateModuleModalOpen(true); + } else if (keyPressed === "backspace" || keyPressed === "delete") { + e.preventDefault(); + setIsBulkDeleteIssuesModalOpen(true); } - } else if (keyPressed === "c") { - e.preventDefault(); - setIsIssueModalOpen(true); - } else if ((ctrlKey || metaKey) && keyPressed === "b") { - e.preventDefault(); - toggleCollapsed(); - } else if (key === "Delete") { - e.preventDefault(); - setIsBulkDeleteIssuesModalOpen(true); - } else if ( - singleShortcutKeys.includes(keyPressed) && - (ctrlKey || metaKey || altKey || shiftKey) - ) { - e.preventDefault(); - } else if (keyPressed === "p") { - setIsProjectModalOpen(true); - } else if (keyPressed === "v") { - setIsCreateViewModalOpen(true); - } else if (keyPressed === "d") { - setIsCreateUpdatePageModalOpen(true); - } else if (keyPressed === "h") { - setIsShortcutsModalOpen(true); - } else if (keyPressed === "q") { - setIsCreateCycleModalOpen(true); - } else if (keyPressed === "m") { - setIsCreateModuleModalOpen(true); } - } }, - [toggleCollapsed, copyIssueUrlToClipboard] + [copyIssueUrlToClipboard] ); useEffect(() => { @@ -195,4 +196,4 @@ export const CommandPalette: React.FC = () => { /> ); -}; +}) \ No newline at end of file diff --git a/apps/app/components/command-palette/issue/change-issue-assignee.tsx b/apps/app/components/command-palette/issue/change-issue-assignee.tsx index e272839bd..ad3d4dfdb 100644 --- a/apps/app/components/command-palette/issue/change-issue-assignee.tsx +++ b/apps/app/components/command-palette/issue/change-issue-assignee.tsx @@ -34,15 +34,12 @@ export const ChangeIssueAssignee: React.FC = ({ setIsPaletteOpen, issue, const options = members?.map(({ member }) => ({ value: member.id, - query: - (member.first_name && member.first_name !== "" ? member.first_name : member.email) + - " " + - member.last_name ?? "", + query: member.display_name, content: ( <>
- {member.first_name && member.first_name !== "" ? member.first_name : member.email} + {member.display_name}
{issue.assignees.includes(member.id) && (
diff --git a/apps/app/components/core/activity.tsx b/apps/app/components/core/activity.tsx index f7622baa6..7ddf9c33c 100644 --- a/apps/app/components/core/activity.tsx +++ b/apps/app/components/core/activity.tsx @@ -35,6 +35,22 @@ const IssueLink = ({ activity }: { activity: IIssueActivity }) => { ); }; +const UserLink = ({ activity }: { activity: IIssueActivity }) => { + const router = useRouter(); + const { workspaceSlug } = router.query; + + return ( + + {activity.new_value && activity.new_value !== "" ? activity.new_value : activity.old_value} + + ); +}; + const activityDetails: { [key: string]: { message: (activity: IIssueActivity, showIssue: boolean) => React.ReactNode; @@ -46,8 +62,7 @@ const activityDetails: { if (activity.old_value === "") return ( <> - added a new assignee{" "} - {activity.new_value} + added a new assignee {showIssue && ( <> {" "} @@ -60,8 +75,7 @@ const activityDetails: { else return ( <> - removed the assignee{" "} - {activity.old_value} + removed the assignee {showIssue && ( <> {" "} @@ -428,6 +442,40 @@ const activityDetails: { ), icon: