diff --git a/apiserver/plane/api/serializers/__init__.py b/apiserver/plane/api/serializers/__init__.py index dbf7ca049..f1a7de3b8 100644 --- a/apiserver/plane/api/serializers/__init__.py +++ b/apiserver/plane/api/serializers/__init__.py @@ -1,5 +1,13 @@ from .base import BaseSerializer -from .user import UserSerializer, UserLiteSerializer, ChangePasswordSerializer, ResetPasswordSerializer, UserAdminLiteSerializer +from .user import ( + UserSerializer, + UserLiteSerializer, + ChangePasswordSerializer, + ResetPasswordSerializer, + UserAdminLiteSerializer, + UserMeSerializer, + UserMeSettingsSerializer, +) from .workspace import ( WorkSpaceSerializer, WorkSpaceMemberSerializer, @@ -8,9 +16,11 @@ from .workspace import ( WorkspaceLiteSerializer, WorkspaceThemeSerializer, WorkspaceMemberAdminSerializer, + WorkspaceMemberMeSerializer, ) from .project import ( ProjectSerializer, + ProjectListSerializer, ProjectDetailSerializer, ProjectMemberSerializer, ProjectMemberInviteSerializer, @@ -20,11 +30,16 @@ from .project import ( ProjectMemberLiteSerializer, ProjectDeployBoardSerializer, ProjectMemberAdminSerializer, - ProjectPublicMemberSerializer + ProjectPublicMemberSerializer, ) from .state import StateSerializer, StateLiteSerializer from .view import GlobalViewSerializer, IssueViewSerializer, IssueViewFavoriteSerializer -from .cycle import CycleSerializer, CycleIssueSerializer, CycleFavoriteSerializer, CycleWriteSerializer +from .cycle import ( + CycleSerializer, + CycleIssueSerializer, + CycleFavoriteSerializer, + CycleWriteSerializer, +) from .asset import FileAssetSerializer from .issue import ( IssueCreateSerializer, diff --git a/apiserver/plane/api/serializers/base.py b/apiserver/plane/api/serializers/base.py index 0c6bba468..89c9725d9 100644 --- a/apiserver/plane/api/serializers/base.py +++ b/apiserver/plane/api/serializers/base.py @@ -3,3 +3,56 @@ from rest_framework import serializers class BaseSerializer(serializers.ModelSerializer): id = serializers.PrimaryKeyRelatedField(read_only=True) + +class DynamicBaseSerializer(BaseSerializer): + + def __init__(self, *args, **kwargs): + # If 'fields' is provided in the arguments, remove it and store it separately. + # This is done so as not to pass this custom argument up to the superclass. + fields = kwargs.pop("fields", None) + + # Call the initialization of the superclass. + super().__init__(*args, **kwargs) + + # If 'fields' was provided, filter the fields of the serializer accordingly. + if fields is not None: + self.fields = self._filter_fields(fields) + + def _filter_fields(self, fields): + """ + Adjust the serializer's fields based on the provided 'fields' list. + + :param fields: List or dictionary specifying which fields to include in the serializer. + :return: The updated fields for the serializer. + """ + # Check each field_name in the provided fields. + for field_name in fields: + # If the field is a dictionary (indicating nested fields), + # loop through its keys and values. + if isinstance(field_name, dict): + for key, value in field_name.items(): + # If the value of this nested field is a list, + # perform a recursive filter on it. + if isinstance(value, list): + self._filter_fields(self.fields[key], value) + + # Create a list to store allowed fields. + allowed = [] + for item in fields: + # If the item is a string, it directly represents a field's name. + if isinstance(item, str): + allowed.append(item) + # If the item is a dictionary, it represents a nested field. + # Add the key of this dictionary to the allowed list. + elif isinstance(item, dict): + allowed.append(list(item.keys())[0]) + + # Convert the current serializer's fields and the allowed fields to sets. + existing = set(self.fields) + allowed = set(allowed) + + # Remove fields from the serializer that aren't in the 'allowed' list. + for field_name in (existing - allowed): + self.fields.pop(field_name) + + return self.fields diff --git a/apiserver/plane/api/serializers/cycle.py b/apiserver/plane/api/serializers/cycle.py index ad214c52a..54aa4fd0c 100644 --- a/apiserver/plane/api/serializers/cycle.py +++ b/apiserver/plane/api/serializers/cycle.py @@ -12,10 +12,14 @@ from .workspace import WorkspaceLiteSerializer from .project import ProjectLiteSerializer from plane.db.models import Cycle, CycleIssue, CycleFavorite -class CycleWriteSerializer(BaseSerializer): +class CycleWriteSerializer(BaseSerializer): def validate(self, data): - if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): + if ( + data.get("start_date", None) is not None + and data.get("end_date", None) is not None + and data.get("start_date", None) > data.get("end_date", None) + ): raise serializers.ValidationError("Start date cannot exceed end date") return data @@ -41,10 +45,14 @@ class CycleSerializer(BaseSerializer): project_detail = ProjectLiteSerializer(read_only=True, source="project") def validate(self, data): - if data.get("start_date", None) is not None and data.get("end_date", None) is not None and data.get("start_date", None) > data.get("end_date", None): + if ( + data.get("start_date", None) is not None + and data.get("end_date", None) is not None + and data.get("start_date", None) > data.get("end_date", None) + ): raise serializers.ValidationError("Start date cannot exceed end date") return data - + def get_assignees(self, obj): members = [ { @@ -52,7 +60,9 @@ class CycleSerializer(BaseSerializer): "display_name": assignee.display_name, "id": assignee.id, } - for issue_cycle in obj.issue_cycle.prefetch_related("issue__assignees").all() + for issue_cycle in obj.issue_cycle.prefetch_related( + "issue__assignees" + ).all() for assignee in issue_cycle.issue.assignees.all() ] # Use a set comprehension to return only the unique objects diff --git a/apiserver/plane/api/serializers/project.py b/apiserver/plane/api/serializers/project.py index 49d986cae..169b0c319 100644 --- a/apiserver/plane/api/serializers/project.py +++ b/apiserver/plane/api/serializers/project.py @@ -5,7 +5,7 @@ from django.db import IntegrityError from rest_framework import serializers # Module imports -from .base import BaseSerializer +from .base import BaseSerializer, DynamicBaseSerializer from plane.api.serializers.workspace import WorkSpaceSerializer, WorkspaceLiteSerializer from plane.api.serializers.user import UserLiteSerializer, UserAdminLiteSerializer from plane.db.models import ( @@ -94,8 +94,33 @@ class ProjectLiteSerializer(BaseSerializer): read_only_fields = fields +class ProjectListSerializer(DynamicBaseSerializer): + is_favorite = serializers.BooleanField(read_only=True) + total_members = serializers.IntegerField(read_only=True) + total_cycles = serializers.IntegerField(read_only=True) + total_modules = serializers.IntegerField(read_only=True) + is_member = serializers.BooleanField(read_only=True) + sort_order = serializers.FloatField(read_only=True) + member_role = serializers.IntegerField(read_only=True) + is_deployed = serializers.BooleanField(read_only=True) + members = serializers.SerializerMethodField() + + def get_members(self, obj): + project_members = ProjectMember.objects.filter(project_id=obj.id).values( + "id", + "member_id", + "member__display_name", + "member__avatar", + ) + return project_members + + class Meta: + model = Project + fields = "__all__" + + class ProjectDetailSerializer(BaseSerializer): - workspace = WorkSpaceSerializer(read_only=True) + # workspace = WorkSpaceSerializer(read_only=True) default_assignee = UserLiteSerializer(read_only=True) project_lead = UserLiteSerializer(read_only=True) is_favorite = serializers.BooleanField(read_only=True) @@ -148,8 +173,6 @@ class ProjectIdentifierSerializer(BaseSerializer): class ProjectFavoriteSerializer(BaseSerializer): - project_detail = ProjectLiteSerializer(source="project", read_only=True) - class Meta: model = ProjectFavorite fields = "__all__" @@ -178,12 +201,12 @@ class ProjectDeployBoardSerializer(BaseSerializer): fields = "__all__" read_only_fields = [ "workspace", - "project", "anchor", + "project", + "anchor", ] class ProjectPublicMemberSerializer(BaseSerializer): - class Meta: model = ProjectPublicMember fields = "__all__" diff --git a/apiserver/plane/api/serializers/user.py b/apiserver/plane/api/serializers/user.py index dcb00c6cb..ab28d0174 100644 --- a/apiserver/plane/api/serializers/user.py +++ b/apiserver/plane/api/serializers/user.py @@ -3,7 +3,7 @@ from rest_framework import serializers # Module import from .base import BaseSerializer -from plane.db.models import User +from plane.db.models import User, Workspace, WorkspaceMemberInvite class UserSerializer(BaseSerializer): @@ -33,6 +33,81 @@ class UserSerializer(BaseSerializer): return bool(obj.first_name) or bool(obj.last_name) +class UserMeSerializer(BaseSerializer): + class Meta: + model = User + fields = [ + "id", + "avatar", + "cover_image", + "date_joined", + "display_name", + "email", + "first_name", + "last_name", + "is_active", + "is_bot", + "is_email_verified", + "is_managed", + "is_onboarded", + "is_tour_completed", + "mobile_number", + "role", + "onboarding_step", + "user_timezone", + "username", + "theme", + "last_workspace_id", + ] + read_only_fields = fields + + +class UserMeSettingsSerializer(BaseSerializer): + workspace = serializers.SerializerMethodField() + + class Meta: + model = User + fields = [ + "id", + "email", + "workspace", + ] + read_only_fields = fields + + def get_workspace(self, obj): + workspace_invites = WorkspaceMemberInvite.objects.filter( + email=obj.email + ).count() + if obj.last_workspace_id is not None: + workspace = Workspace.objects.get( + pk=obj.last_workspace_id, workspace_member__member=obj.id + ) + return { + "last_workspace_id": obj.last_workspace_id, + "last_workspace_slug": workspace.slug, + "fallback_workspace_id": obj.last_workspace_id, + "fallback_workspace_slug": workspace.slug, + "invites": workspace_invites, + } + else: + fallback_workspace = ( + Workspace.objects.filter(workspace_member__member_id=obj.id) + .order_by("created_at") + .first() + ) + return { + "last_workspace_id": None, + "last_workspace_slug": None, + "fallback_workspace_id": fallback_workspace.id + if fallback_workspace is not None + else None, + "fallback_workspace_slug": fallback_workspace.slug + if fallback_workspace is not None + else None, + "invites": workspace_invites, + } + + class UserLiteSerializer(BaseSerializer): class Meta: model = User @@ -51,7 +126,6 @@ class UserLiteSerializer(BaseSerializer): class UserAdminLiteSerializer(BaseSerializer): - class Meta: model = User fields = [ diff --git a/apiserver/plane/api/serializers/workspace.py b/apiserver/plane/api/serializers/workspace.py index d27b66481..8c718a18e 100644 --- a/apiserver/plane/api/serializers/workspace.py +++ b/apiserver/plane/api/serializers/workspace.py @@ -54,6 +54,13 @@ class WorkSpaceMemberSerializer(BaseSerializer): fields = "__all__" +class WorkspaceMemberMeSerializer(BaseSerializer): + + class Meta: + model = WorkspaceMember + fields = "__all__" + + class WorkspaceMemberAdminSerializer(BaseSerializer): member = UserAdminLiteSerializer(read_only=True) workspace = WorkspaceLiteSerializer(read_only=True) diff --git a/apiserver/plane/api/urls/__init__.py b/apiserver/plane/api/urls/__init__.py new file mode 100644 index 000000000..49c2b772e --- /dev/null +++ b/apiserver/plane/api/urls/__init__.py @@ -0,0 +1,50 @@ +from .analytic import urlpatterns as analytic_urls +from .asset import urlpatterns as asset_urls +from .authentication import urlpatterns as authentication_urls +from .configuration import urlpatterns as configuration_urls +from .cycle import urlpatterns as cycle_urls +from .estimate import urlpatterns as estimate_urls +from .gpt import urlpatterns as gpt_urls +from .importer import urlpatterns as importer_urls +from .inbox import urlpatterns as inbox_urls +from .integration import urlpatterns as integration_urls +from .issue import urlpatterns as issue_urls +from .module import urlpatterns as module_urls +from .notification import urlpatterns as notification_urls +from .page import urlpatterns as page_urls +from .project import urlpatterns as project_urls +from .public_board import urlpatterns as public_board_urls +from .release_note import urlpatterns as release_note_urls +from .search import urlpatterns as search_urls +from .state import urlpatterns as state_urls +from .unsplash import urlpatterns as unsplash_urls +from .user import urlpatterns as user_urls +from .views import urlpatterns as view_urls +from .workspace import urlpatterns as workspace_urls + + +urlpatterns = [ + *analytic_urls, + *asset_urls, + *authentication_urls, + *configuration_urls, + *cycle_urls, + *estimate_urls, + *gpt_urls, + *importer_urls, + *inbox_urls, + *integration_urls, + *issue_urls, + *module_urls, + *notification_urls, + *page_urls, + *project_urls, + *public_board_urls, + *release_note_urls, + *search_urls, + *state_urls, + *unsplash_urls, + *user_urls, + *view_urls, + *workspace_urls, +] diff --git a/apiserver/plane/api/urls/analytic.py b/apiserver/plane/api/urls/analytic.py new file mode 100644 index 000000000..cb6155e32 --- /dev/null +++ b/apiserver/plane/api/urls/analytic.py @@ -0,0 +1,46 @@ +from django.urls import path + + +from plane.api.views import ( + AnalyticsEndpoint, + AnalyticViewViewset, + SavedAnalyticEndpoint, + ExportAnalyticsEndpoint, + DefaultAnalyticsEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//analytics/", + AnalyticsEndpoint.as_view(), + name="plane-analytics", + ), + path( + "workspaces//analytic-view/", + AnalyticViewViewset.as_view({"get": "list", "post": "create"}), + name="analytic-view", + ), + path( + "workspaces//analytic-view//", + AnalyticViewViewset.as_view( + {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} + ), + name="analytic-view", + ), + path( + "workspaces//saved-analytic-view//", + SavedAnalyticEndpoint.as_view(), + name="saved-analytic-view", + ), + path( + "workspaces//export-analytics/", + ExportAnalyticsEndpoint.as_view(), + name="export-analytics", + ), + path( + "workspaces//default-analytics/", + DefaultAnalyticsEndpoint.as_view(), + name="default-analytics", + ), +] diff --git a/apiserver/plane/api/urls/asset.py b/apiserver/plane/api/urls/asset.py new file mode 100644 index 000000000..b6ae9f42c --- /dev/null +++ b/apiserver/plane/api/urls/asset.py @@ -0,0 +1,31 @@ +from django.urls import path + + +from plane.api.views import ( + FileAssetEndpoint, + UserAssetsEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//file-assets/", + FileAssetEndpoint.as_view(), + name="file-assets", + ), + path( + "workspaces/file-assets///", + FileAssetEndpoint.as_view(), + name="file-assets", + ), + path( + "users/file-assets/", + UserAssetsEndpoint.as_view(), + name="user-file-assets", + ), + path( + "users/file-assets//", + UserAssetsEndpoint.as_view(), + name="user-file-assets", + ), +] diff --git a/apiserver/plane/api/urls/authentication.py b/apiserver/plane/api/urls/authentication.py new file mode 100644 index 000000000..44b7000ea --- /dev/null +++ b/apiserver/plane/api/urls/authentication.py @@ -0,0 +1,68 @@ +from django.urls import path + +from rest_framework_simplejwt.views import TokenRefreshView + + +from plane.api.views import ( + # Authentication + SignUpEndpoint, + SignInEndpoint, + SignOutEndpoint, + MagicSignInEndpoint, + MagicSignInGenerateEndpoint, + OauthEndpoint, + ## End Authentication + # Auth Extended + ForgotPasswordEndpoint, + VerifyEmailEndpoint, + ResetPasswordEndpoint, + RequestEmailVerificationEndpoint, + ChangePasswordEndpoint, + ## End Auth Extender + # API Tokens + ApiTokenEndpoint, + ## End API Tokens +) + + +urlpatterns = [ + # Social Auth + path("social-auth/", OauthEndpoint.as_view(), name="oauth"), + # Auth + path("sign-up/", SignUpEndpoint.as_view(), name="sign-up"), + path("sign-in/", SignInEndpoint.as_view(), name="sign-in"), + path("sign-out/", SignOutEndpoint.as_view(), name="sign-out"), + # Magic Sign In/Up + path( + "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate" + ), + path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), + path("token/refresh/", TokenRefreshView.as_view(), name="token_refresh"), + # Email verification + path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), + path( + "request-email-verify/", + RequestEmailVerificationEndpoint.as_view(), + name="request-reset-email", + ), + # Password Manipulation + path( + "users/me/change-password/", + ChangePasswordEndpoint.as_view(), + name="change-password", + ), + path( + "reset-password///", + ResetPasswordEndpoint.as_view(), + name="password-reset", + ), + path( + "forgot-password/", + ForgotPasswordEndpoint.as_view(), + name="forgot-password", + ), + # API Tokens + path("api-tokens/", ApiTokenEndpoint.as_view(), name="api-tokens"), + path("api-tokens//", ApiTokenEndpoint.as_view(), name="api-tokens"), + ## End API Tokens +] diff --git a/apiserver/plane/api/urls/configuration.py b/apiserver/plane/api/urls/configuration.py new file mode 100644 index 000000000..321a56200 --- /dev/null +++ b/apiserver/plane/api/urls/configuration.py @@ -0,0 +1,12 @@ +from django.urls import path + + +from plane.api.views import ConfigurationEndpoint + +urlpatterns = [ + path( + "configs/", + ConfigurationEndpoint.as_view(), + name="configuration", + ), +] \ No newline at end of file diff --git a/apiserver/plane/api/urls/cycle.py b/apiserver/plane/api/urls/cycle.py new file mode 100644 index 000000000..068276361 --- /dev/null +++ b/apiserver/plane/api/urls/cycle.py @@ -0,0 +1,87 @@ +from django.urls import path + + +from plane.api.views import ( + CycleViewSet, + CycleIssueViewSet, + CycleDateCheckEndpoint, + CycleFavoriteViewSet, + TransferCycleIssueEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//cycles/", + CycleViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-cycle", + ), + path( + "workspaces//projects//cycles//", + CycleViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-cycle", + ), + path( + "workspaces//projects//cycles//cycle-issues/", + CycleIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-cycle", + ), + path( + "workspaces//projects//cycles//cycle-issues//", + CycleIssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-cycle", + ), + path( + "workspaces//projects//cycles/date-check/", + CycleDateCheckEndpoint.as_view(), + name="project-cycle-date", + ), + path( + "workspaces//projects//user-favorite-cycles/", + CycleFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-cycle", + ), + path( + "workspaces//projects//user-favorite-cycles//", + CycleFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-cycle", + ), + path( + "workspaces//projects//cycles//transfer-issues/", + TransferCycleIssueEndpoint.as_view(), + name="transfer-issues", + ), +] diff --git a/apiserver/plane/api/urls/estimate.py b/apiserver/plane/api/urls/estimate.py new file mode 100644 index 000000000..89363e849 --- /dev/null +++ b/apiserver/plane/api/urls/estimate.py @@ -0,0 +1,37 @@ +from django.urls import path + + +from plane.api.views import ( + ProjectEstimatePointEndpoint, + BulkEstimatePointEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//project-estimates/", + ProjectEstimatePointEndpoint.as_view(), + name="project-estimate-points", + ), + path( + "workspaces//projects//estimates/", + BulkEstimatePointEndpoint.as_view( + { + "get": "list", + "post": "create", + } + ), + name="bulk-create-estimate-points", + ), + path( + "workspaces//projects//estimates//", + BulkEstimatePointEndpoint.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="bulk-create-estimate-points", + ), +] diff --git a/apiserver/plane/api/urls/gpt.py b/apiserver/plane/api/urls/gpt.py new file mode 100644 index 000000000..f2b0362c7 --- /dev/null +++ b/apiserver/plane/api/urls/gpt.py @@ -0,0 +1,13 @@ +from django.urls import path + + +from plane.api.views import GPTIntegrationEndpoint + + +urlpatterns = [ + path( + "workspaces//projects//ai-assistant/", + GPTIntegrationEndpoint.as_view(), + name="importer", + ), +] diff --git a/apiserver/plane/api/urls/importer.py b/apiserver/plane/api/urls/importer.py new file mode 100644 index 000000000..c0a9aa5b5 --- /dev/null +++ b/apiserver/plane/api/urls/importer.py @@ -0,0 +1,37 @@ +from django.urls import path + + +from plane.api.views import ( + ServiceIssueImportSummaryEndpoint, + ImportServiceEndpoint, + UpdateServiceImportStatusEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//importers//", + ServiceIssueImportSummaryEndpoint.as_view(), + name="importer-summary", + ), + path( + "workspaces//projects/importers//", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers/", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//importers///", + ImportServiceEndpoint.as_view(), + name="importer", + ), + path( + "workspaces//projects//service//importers//", + UpdateServiceImportStatusEndpoint.as_view(), + name="importer-status", + ), +] diff --git a/apiserver/plane/api/urls/inbox.py b/apiserver/plane/api/urls/inbox.py new file mode 100644 index 000000000..315f30601 --- /dev/null +++ b/apiserver/plane/api/urls/inbox.py @@ -0,0 +1,53 @@ +from django.urls import path + + +from plane.api.views import ( + InboxViewSet, + InboxIssueViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//inboxes/", + InboxViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox", + ), + path( + "workspaces//projects//inboxes//", + InboxViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox", + ), + path( + "workspaces//projects//inboxes//inbox-issues/", + InboxIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox-issue", + ), + path( + "workspaces//projects//inboxes//inbox-issues//", + InboxIssueViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox-issue", + ), +] diff --git a/apiserver/plane/api/urls/integration.py b/apiserver/plane/api/urls/integration.py new file mode 100644 index 000000000..dd431b6c8 --- /dev/null +++ b/apiserver/plane/api/urls/integration.py @@ -0,0 +1,150 @@ +from django.urls import path + + +from plane.api.views import ( + IntegrationViewSet, + WorkspaceIntegrationViewSet, + GithubRepositoriesEndpoint, + GithubRepositorySyncViewSet, + GithubIssueSyncViewSet, + GithubCommentSyncViewSet, + BulkCreateGithubIssueSyncEndpoint, + SlackProjectSyncViewSet, +) + + +urlpatterns = [ + path( + "integrations/", + IntegrationViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="integrations", + ), + path( + "integrations//", + IntegrationViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="integrations", + ), + path( + "workspaces//workspace-integrations/", + WorkspaceIntegrationViewSet.as_view( + { + "get": "list", + } + ), + name="workspace-integrations", + ), + path( + "workspaces//workspace-integrations//", + WorkspaceIntegrationViewSet.as_view( + { + "post": "create", + } + ), + name="workspace-integrations", + ), + path( + "workspaces//workspace-integrations//provider/", + WorkspaceIntegrationViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="workspace-integrations", + ), + # Github Integrations + path( + "workspaces//workspace-integrations//github-repositories/", + GithubRepositoriesEndpoint.as_view(), + ), + path( + "workspaces//projects//workspace-integrations//github-repository-sync/", + GithubRepositorySyncViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + ), + path( + "workspaces//projects//workspace-integrations//github-repository-sync//", + GithubRepositorySyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync/", + GithubIssueSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//bulk-create-github-issue-sync/", + BulkCreateGithubIssueSyncEndpoint.as_view(), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//", + GithubIssueSyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync/", + GithubCommentSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//github-repository-sync//github-issue-sync//github-comment-sync//", + GithubCommentSyncViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + ), + ## End Github Integrations + # Slack Integration + path( + "workspaces//projects//workspace-integrations//project-slack-sync/", + SlackProjectSyncViewSet.as_view( + { + "post": "create", + "get": "list", + } + ), + ), + path( + "workspaces//projects//workspace-integrations//project-slack-sync//", + SlackProjectSyncViewSet.as_view( + { + "delete": "destroy", + "get": "retrieve", + } + ), + ), + ## End Slack Integration +] diff --git a/apiserver/plane/api/urls/issue.py b/apiserver/plane/api/urls/issue.py new file mode 100644 index 000000000..b484fe113 --- /dev/null +++ b/apiserver/plane/api/urls/issue.py @@ -0,0 +1,332 @@ +from django.urls import path + + +from plane.api.views import ( + IssueViewSet, + LabelViewSet, + BulkCreateIssueLabelsEndpoint, + BulkDeleteIssuesEndpoint, + BulkImportIssuesEndpoint, + UserWorkSpaceIssues, + SubIssuesEndpoint, + IssueLinkViewSet, + IssueAttachmentEndpoint, + ExportIssuesEndpoint, + IssueActivityEndpoint, + IssueCommentViewSet, + IssueSubscriberViewSet, + IssueReactionViewSet, + CommentReactionViewSet, + IssuePropertyViewSet, + IssueArchiveViewSet, + IssueRelationViewSet, + IssueDraftViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//issues/", + IssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue", + ), + path( + "workspaces//projects//issues//", + IssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue", + ), + path( + "workspaces//projects//issue-labels/", + LabelViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-labels", + ), + path( + "workspaces//projects//issue-labels//", + LabelViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-labels", + ), + path( + "workspaces//projects//bulk-create-labels/", + BulkCreateIssueLabelsEndpoint.as_view(), + name="project-bulk-labels", + ), + path( + "workspaces//projects//bulk-delete-issues/", + BulkDeleteIssuesEndpoint.as_view(), + name="project-issues-bulk", + ), + path( + "workspaces//projects//bulk-import-issues//", + BulkImportIssuesEndpoint.as_view(), + name="project-issues-bulk", + ), + path( + "workspaces//my-issues/", + UserWorkSpaceIssues.as_view(), + name="workspace-issues", + ), + path( + "workspaces//projects//issues//sub-issues/", + SubIssuesEndpoint.as_view(), + name="sub-issues", + ), + path( + "workspaces//projects//issues//issue-links/", + IssueLinkViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-links", + ), + path( + "workspaces//projects//issues//issue-links//", + IssueLinkViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-links", + ), + path( + "workspaces//projects//issues//issue-attachments/", + IssueAttachmentEndpoint.as_view(), + name="project-issue-attachments", + ), + path( + "workspaces//projects//issues//issue-attachments//", + IssueAttachmentEndpoint.as_view(), + name="project-issue-attachments", + ), + path( + "workspaces//export-issues/", + ExportIssuesEndpoint.as_view(), + name="export-issues", + ), + ## End Issues + ## Issue Activity + path( + "workspaces//projects//issues//history/", + IssueActivityEndpoint.as_view(), + name="project-issue-history", + ), + ## Issue Activity + ## IssueComments + path( + "workspaces//projects//issues//comments/", + IssueCommentViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-comment", + ), + path( + "workspaces//projects//issues//comments//", + IssueCommentViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-comment", + ), + ## End IssueComments + # Issue Subscribers + path( + "workspaces//projects//issues//issue-subscribers/", + IssueSubscriberViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-subscribers", + ), + path( + "workspaces//projects//issues//issue-subscribers//", + IssueSubscriberViewSet.as_view({"delete": "destroy"}), + name="project-issue-subscribers", + ), + path( + "workspaces//projects//issues//subscribe/", + IssueSubscriberViewSet.as_view( + { + "get": "subscription_status", + "post": "subscribe", + "delete": "unsubscribe", + } + ), + name="project-issue-subscribers", + ), + ## End Issue Subscribers + # Issue Reactions + path( + "workspaces//projects//issues//reactions/", + IssueReactionViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-reactions", + ), + path( + "workspaces//projects//issues//reactions//", + IssueReactionViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-issue-reactions", + ), + ## End Issue Reactions + # Comment Reactions + path( + "workspaces//projects//comments//reactions/", + CommentReactionViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-comment-reactions", + ), + path( + "workspaces//projects//comments//reactions//", + CommentReactionViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-issue-comment-reactions", + ), + ## End Comment Reactions + ## IssueProperty + path( + "workspaces//projects//issue-properties/", + IssuePropertyViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-roadmap", + ), + path( + "workspaces//projects//issue-properties//", + IssuePropertyViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-roadmap", + ), + ## IssueProperty Ebd + ## Issue Archives + path( + "workspaces//projects//archived-issues/", + IssueArchiveViewSet.as_view( + { + "get": "list", + } + ), + name="project-issue-archive", + ), + path( + "workspaces//projects//archived-issues//", + IssueArchiveViewSet.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="project-issue-archive", + ), + path( + "workspaces//projects//unarchive//", + IssueArchiveViewSet.as_view( + { + "post": "unarchive", + } + ), + name="project-issue-archive", + ), + ## End Issue Archives + ## Issue Relation + path( + "workspaces//projects//issues//issue-relation/", + IssueRelationViewSet.as_view( + { + "post": "create", + } + ), + name="issue-relation", + ), + path( + "workspaces//projects//issues//issue-relation//", + IssueRelationViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-relation", + ), + ## End Issue Relation + ## Issue Drafts + path( + "workspaces//projects//issue-drafts/", + IssueDraftViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-draft", + ), + path( + "workspaces//projects//issue-drafts//", + IssueDraftViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-draft", + ), +] diff --git a/apiserver/plane/api/urls/module.py b/apiserver/plane/api/urls/module.py new file mode 100644 index 000000000..3239af1e4 --- /dev/null +++ b/apiserver/plane/api/urls/module.py @@ -0,0 +1,104 @@ +from django.urls import path + + +from plane.api.views import ( + ModuleViewSet, + ModuleIssueViewSet, + ModuleLinkViewSet, + ModuleFavoriteViewSet, + BulkImportModulesEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//modules/", + ModuleViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-modules", + ), + path( + "workspaces//projects//modules//", + ModuleViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-modules", + ), + path( + "workspaces//projects//modules//module-issues/", + ModuleIssueViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-module-issues", + ), + path( + "workspaces//projects//modules//module-issues//", + ModuleIssueViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-module-issues", + ), + path( + "workspaces//projects//modules//module-links/", + ModuleLinkViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-issue-module-links", + ), + path( + "workspaces//projects//modules//module-links//", + ModuleLinkViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-issue-module-links", + ), + path( + "workspaces//projects//user-favorite-modules/", + ModuleFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-module", + ), + path( + "workspaces//projects//user-favorite-modules//", + ModuleFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-module", + ), + path( + "workspaces//projects//bulk-import-modules//", + BulkImportModulesEndpoint.as_view(), + name="bulk-modules-create", + ), +] diff --git a/apiserver/plane/api/urls/notification.py b/apiserver/plane/api/urls/notification.py new file mode 100644 index 000000000..5e1936d01 --- /dev/null +++ b/apiserver/plane/api/urls/notification.py @@ -0,0 +1,66 @@ +from django.urls import path + + +from plane.api.views import ( + NotificationViewSet, + UnreadNotificationEndpoint, + MarkAllReadNotificationViewSet, +) + + +urlpatterns = [ + path( + "workspaces//users/notifications/", + NotificationViewSet.as_view( + { + "get": "list", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//", + NotificationViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//read/", + NotificationViewSet.as_view( + { + "post": "mark_read", + "delete": "mark_unread", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications//archive/", + NotificationViewSet.as_view( + { + "post": "archive", + "delete": "unarchive", + } + ), + name="notifications", + ), + path( + "workspaces//users/notifications/unread/", + UnreadNotificationEndpoint.as_view(), + name="unread-notifications", + ), + path( + "workspaces//users/notifications/mark-all-read/", + MarkAllReadNotificationViewSet.as_view( + { + "post": "create", + } + ), + name="mark-all-read-notifications", + ), +] diff --git a/apiserver/plane/api/urls/page.py b/apiserver/plane/api/urls/page.py new file mode 100644 index 000000000..648702283 --- /dev/null +++ b/apiserver/plane/api/urls/page.py @@ -0,0 +1,79 @@ +from django.urls import path + + +from plane.api.views import ( + PageViewSet, + PageBlockViewSet, + PageFavoriteViewSet, + CreateIssueFromPageBlockEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//pages/", + PageViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//", + PageViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-pages", + ), + path( + "workspaces//projects//pages//page-blocks/", + PageBlockViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-page-blocks", + ), + path( + "workspaces//projects//pages//page-blocks//", + PageBlockViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-page-blocks", + ), + path( + "workspaces//projects//user-favorite-pages/", + PageFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-pages", + ), + path( + "workspaces//projects//user-favorite-pages//", + PageFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-pages", + ), + path( + "workspaces//projects//pages//page-blocks//issues/", + CreateIssueFromPageBlockEndpoint.as_view(), + name="page-block-issues", + ), +] diff --git a/apiserver/plane/api/urls/project.py b/apiserver/plane/api/urls/project.py new file mode 100644 index 000000000..b2a3fbd60 --- /dev/null +++ b/apiserver/plane/api/urls/project.py @@ -0,0 +1,144 @@ +from django.urls import path + +from plane.api.views import ( + ProjectViewSet, + InviteProjectEndpoint, + ProjectMemberViewSet, + ProjectMemberEndpoint, + ProjectMemberInvitationsViewset, + ProjectMemberUserEndpoint, + AddMemberToProjectEndpoint, + ProjectJoinEndpoint, + AddTeamToProjectEndpoint, + ProjectUserViewsEndpoint, + ProjectIdentifierEndpoint, + ProjectFavoritesViewSet, + LeaveProjectEndpoint, + ProjectPublicCoverImagesEndpoint +) + + +urlpatterns = [ + path( + "workspaces//projects/", + ProjectViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project", + ), + path( + "workspaces//projects//", + ProjectViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project", + ), + path( + "workspaces//project-identifiers/", + ProjectIdentifierEndpoint.as_view(), + name="project-identifiers", + ), + path( + "workspaces//projects//invite/", + InviteProjectEndpoint.as_view(), + name="invite-project", + ), + path( + "workspaces//projects//members/", + ProjectMemberViewSet.as_view({"get": "list"}), + name="project-member", + ), + path( + "workspaces//projects//members//", + ProjectMemberViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-member", + ), + path( + "workspaces//projects//project-members/", + ProjectMemberEndpoint.as_view(), + name="project-member", + ), + path( + "workspaces//projects//members/add/", + AddMemberToProjectEndpoint.as_view(), + name="project", + ), + path( + "workspaces//projects/join/", + ProjectJoinEndpoint.as_view(), + name="project-join", + ), + path( + "workspaces//projects//team-invite/", + AddTeamToProjectEndpoint.as_view(), + name="projects", + ), + path( + "workspaces//projects//invitations/", + ProjectMemberInvitationsViewset.as_view({"get": "list"}), + name="project-member-invite", + ), + path( + "workspaces//projects//invitations//", + ProjectMemberInvitationsViewset.as_view( + { + "get": "retrieve", + "delete": "destroy", + } + ), + name="project-member-invite", + ), + path( + "workspaces//projects//project-views/", + ProjectUserViewsEndpoint.as_view(), + name="project-view", + ), + path( + "workspaces//projects//project-members/me/", + ProjectMemberUserEndpoint.as_view(), + name="project-member-view", + ), + path( + "workspaces//user-favorite-projects/", + ProjectFavoritesViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-favorite", + ), + path( + "workspaces//user-favorite-projects//", + ProjectFavoritesViewSet.as_view( + { + "delete": "destroy", + } + ), + name="project-favorite", + ), + path( + "workspaces//projects//members/leave/", + LeaveProjectEndpoint.as_view(), + name="leave-project", + ), + path( + "project-covers/", + ProjectPublicCoverImagesEndpoint.as_view(), + name="project-covers", + ), +] diff --git a/apiserver/plane/api/urls/public_board.py b/apiserver/plane/api/urls/public_board.py new file mode 100644 index 000000000..272d5961c --- /dev/null +++ b/apiserver/plane/api/urls/public_board.py @@ -0,0 +1,151 @@ +from django.urls import path + + +from plane.api.views import ( + ProjectDeployBoardViewSet, + ProjectDeployBoardPublicSettingsEndpoint, + ProjectIssuesPublicEndpoint, + IssueRetrievePublicEndpoint, + IssueCommentPublicViewSet, + IssueReactionPublicViewSet, + CommentReactionPublicViewSet, + InboxIssuePublicViewSet, + IssueVotePublicViewSet, + WorkspaceProjectDeployBoardEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//projects//project-deploy-boards/", + ProjectDeployBoardViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-deploy-board", + ), + path( + "workspaces//projects//project-deploy-boards//", + ProjectDeployBoardViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-deploy-board", + ), + path( + "public/workspaces//project-boards//settings/", + ProjectDeployBoardPublicSettingsEndpoint.as_view(), + name="project-deploy-board-settings", + ), + path( + "public/workspaces//project-boards//issues/", + ProjectIssuesPublicEndpoint.as_view(), + name="project-deploy-board", + ), + path( + "public/workspaces//project-boards//issues//", + IssueRetrievePublicEndpoint.as_view(), + name="workspace-project-boards", + ), + path( + "public/workspaces//project-boards//issues//comments/", + IssueCommentPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="issue-comments-project-board", + ), + path( + "public/workspaces//project-boards//issues//comments//", + IssueCommentPublicViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="issue-comments-project-board", + ), + path( + "public/workspaces//project-boards//issues//reactions/", + IssueReactionPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="issue-reactions-project-board", + ), + path( + "public/workspaces//project-boards//issues//reactions//", + IssueReactionPublicViewSet.as_view( + { + "delete": "destroy", + } + ), + name="issue-reactions-project-board", + ), + path( + "public/workspaces//project-boards//comments//reactions/", + CommentReactionPublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="comment-reactions-project-board", + ), + path( + "public/workspaces//project-boards//comments//reactions//", + CommentReactionPublicViewSet.as_view( + { + "delete": "destroy", + } + ), + name="comment-reactions-project-board", + ), + path( + "public/workspaces//project-boards//inboxes//inbox-issues/", + InboxIssuePublicViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="inbox-issue", + ), + path( + "public/workspaces//project-boards//inboxes//inbox-issues//", + InboxIssuePublicViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="inbox-issue", + ), + path( + "public/workspaces//project-boards//issues//votes/", + IssueVotePublicViewSet.as_view( + { + "get": "list", + "post": "create", + "delete": "destroy", + } + ), + name="issue-vote-project-board", + ), + path( + "public/workspaces//project-boards/", + WorkspaceProjectDeployBoardEndpoint.as_view(), + name="workspace-project-boards", + ), +] diff --git a/apiserver/plane/api/urls/release_note.py b/apiserver/plane/api/urls/release_note.py new file mode 100644 index 000000000..dfbd1ec66 --- /dev/null +++ b/apiserver/plane/api/urls/release_note.py @@ -0,0 +1,13 @@ +from django.urls import path + + +from plane.api.views import ReleaseNotesEndpoint + + +urlpatterns = [ + path( + "release-notes/", + ReleaseNotesEndpoint.as_view(), + name="release-notes", + ), +] diff --git a/apiserver/plane/api/urls/search.py b/apiserver/plane/api/urls/search.py new file mode 100644 index 000000000..282feb046 --- /dev/null +++ b/apiserver/plane/api/urls/search.py @@ -0,0 +1,21 @@ +from django.urls import path + + +from plane.api.views import ( + GlobalSearchEndpoint, + IssueSearchEndpoint, +) + + +urlpatterns = [ + path( + "workspaces//search/", + GlobalSearchEndpoint.as_view(), + name="global-search", + ), + path( + "workspaces//projects//search-issues/", + IssueSearchEndpoint.as_view(), + name="project-issue-search", + ), +] diff --git a/apiserver/plane/api/urls/state.py b/apiserver/plane/api/urls/state.py new file mode 100644 index 000000000..bcfd80cd7 --- /dev/null +++ b/apiserver/plane/api/urls/state.py @@ -0,0 +1,30 @@ +from django.urls import path + + +from plane.api.views import StateViewSet + + +urlpatterns = [ + path( + "workspaces//projects//states/", + StateViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-states", + ), + path( + "workspaces//projects//states//", + StateViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-state", + ), +] diff --git a/apiserver/plane/api/urls/unsplash.py b/apiserver/plane/api/urls/unsplash.py new file mode 100644 index 000000000..25fab4694 --- /dev/null +++ b/apiserver/plane/api/urls/unsplash.py @@ -0,0 +1,13 @@ +from django.urls import path + + +from plane.api.views import UnsplashEndpoint + + +urlpatterns = [ + path( + "unsplash/", + UnsplashEndpoint.as_view(), + name="unsplash", + ), +] diff --git a/apiserver/plane/api/urls/user.py b/apiserver/plane/api/urls/user.py new file mode 100644 index 000000000..5282a7cf6 --- /dev/null +++ b/apiserver/plane/api/urls/user.py @@ -0,0 +1,113 @@ +from django.urls import path + +from plane.api.views import ( + ## User + UserEndpoint, + UpdateUserOnBoardedEndpoint, + UpdateUserTourCompletedEndpoint, + UserActivityEndpoint, + ChangePasswordEndpoint, + ## End User + ## Workspaces + UserWorkspaceInvitationsEndpoint, + UserWorkSpacesEndpoint, + JoinWorkspaceEndpoint, + UserWorkspaceInvitationsEndpoint, + UserWorkspaceInvitationEndpoint, + UserActivityGraphEndpoint, + UserIssueCompletedGraphEndpoint, + UserWorkspaceDashboardEndpoint, + UserProjectInvitationsViewset, + ## End Workspaces +) + +urlpatterns = [ + # User Profile + path( + "users/me/", + UserEndpoint.as_view( + {"get": "retrieve", "patch": "partial_update", "delete": "destroy"} + ), + name="users", + ), + path( + "users/me/settings/", + UserEndpoint.as_view( + { + "get": "retrieve_user_settings", + } + ), + name="users", + ), + path( + "users/me/change-password/", + ChangePasswordEndpoint.as_view(), + name="change-password", + ), + path( + "users/me/onboard/", + UpdateUserOnBoardedEndpoint.as_view(), + name="user-onboard", + ), + path( + "users/me/tour-completed/", + UpdateUserTourCompletedEndpoint.as_view(), + name="user-tour", + ), + path( + "users/workspaces//activities/", + UserActivityEndpoint.as_view(), + name="user-activities", + ), + # user workspaces + path( + "users/me/workspaces/", + UserWorkSpacesEndpoint.as_view(), + name="user-workspace", + ), + # user workspace invitations + path( + "users/me/invitations/workspaces/", + UserWorkspaceInvitationsEndpoint.as_view({"get": "list", "post": "create"}), + name="user-workspace-invitations", + ), + # user workspace invitation + path( + "users/me/invitations//", + UserWorkspaceInvitationEndpoint.as_view( + { + "get": "retrieve", + } + ), + name="user-workspace-invitation", + ), + # user join workspace + # User Graphs + path( + "users/me/workspaces//activity-graph/", + UserActivityGraphEndpoint.as_view(), + name="user-activity-graph", + ), + path( + "users/me/workspaces//issues-completed-graph/", + UserIssueCompletedGraphEndpoint.as_view(), + name="completed-graph", + ), + path( + "users/me/workspaces//dashboard/", + UserWorkspaceDashboardEndpoint.as_view(), + name="user-workspace-dashboard", + ), + ## End User Graph + path( + "users/me/invitations/workspaces///join/", + JoinWorkspaceEndpoint.as_view(), + name="user-join-workspace", + ), + # user project invitations + path( + "users/me/invitations/projects/", + UserProjectInvitationsViewset.as_view({"get": "list", "post": "create"}), + name="user-project-invitations", + ), +] diff --git a/apiserver/plane/api/urls/views.py b/apiserver/plane/api/urls/views.py new file mode 100644 index 000000000..560855e80 --- /dev/null +++ b/apiserver/plane/api/urls/views.py @@ -0,0 +1,85 @@ +from django.urls import path + + +from plane.api.views import ( + IssueViewViewSet, + GlobalViewViewSet, + GlobalViewIssuesViewSet, + IssueViewFavoriteViewSet, +) + + +urlpatterns = [ + path( + "workspaces//projects//views/", + IssueViewViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="project-view", + ), + path( + "workspaces//projects//views//", + IssueViewViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="project-view", + ), + path( + "workspaces//views/", + GlobalViewViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="global-view", + ), + path( + "workspaces//views//", + GlobalViewViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="global-view", + ), + path( + "workspaces//issues/", + GlobalViewIssuesViewSet.as_view( + { + "get": "list", + } + ), + name="global-view-issues", + ), + path( + "workspaces//projects//user-favorite-views/", + IssueViewFavoriteViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="user-favorite-view", + ), + path( + "workspaces//projects//user-favorite-views//", + IssueViewFavoriteViewSet.as_view( + { + "delete": "destroy", + } + ), + name="user-favorite-view", + ), +] diff --git a/apiserver/plane/api/urls/workspace.py b/apiserver/plane/api/urls/workspace.py new file mode 100644 index 000000000..7cfc8f27a --- /dev/null +++ b/apiserver/plane/api/urls/workspace.py @@ -0,0 +1,182 @@ +from django.urls import path + + +from plane.api.views import ( + WorkSpaceViewSet, + InviteWorkspaceEndpoint, + WorkSpaceMemberViewSet, + WorkspaceMembersEndpoint, + WorkspaceInvitationsViewset, + WorkspaceMemberUserEndpoint, + WorkspaceMemberUserViewsEndpoint, + WorkSpaceAvailabilityCheckEndpoint, + TeamMemberViewSet, + UserLastProjectWithWorkspaceEndpoint, + WorkspaceThemeViewSet, + WorkspaceUserProfileStatsEndpoint, + WorkspaceUserActivityEndpoint, + WorkspaceUserProfileEndpoint, + WorkspaceUserProfileIssuesEndpoint, + WorkspaceLabelsEndpoint, + LeaveWorkspaceEndpoint, +) + + +urlpatterns = [ + path( + "workspace-slug-check/", + WorkSpaceAvailabilityCheckEndpoint.as_view(), + name="workspace-availability", + ), + path( + "workspaces/", + WorkSpaceViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace", + ), + path( + "workspaces//", + WorkSpaceViewSet.as_view( + { + "get": "retrieve", + "put": "update", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="workspace", + ), + path( + "workspaces//invite/", + InviteWorkspaceEndpoint.as_view(), + name="invite-workspace", + ), + path( + "workspaces//invitations/", + WorkspaceInvitationsViewset.as_view({"get": "list"}), + name="workspace-invitations", + ), + path( + "workspaces//invitations//", + WorkspaceInvitationsViewset.as_view( + { + "delete": "destroy", + "get": "retrieve", + } + ), + name="workspace-invitations", + ), + path( + "workspaces//members/", + WorkSpaceMemberViewSet.as_view({"get": "list"}), + name="workspace-member", + ), + path( + "workspaces//members//", + WorkSpaceMemberViewSet.as_view( + { + "patch": "partial_update", + "delete": "destroy", + "get": "retrieve", + } + ), + name="workspace-member", + ), + path( + "workspaces//workspace-members/", + WorkspaceMembersEndpoint.as_view(), + name="workspace-members", + ), + path( + "workspaces//teams/", + TeamMemberViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace-team-members", + ), + path( + "workspaces//teams//", + TeamMemberViewSet.as_view( + { + "put": "update", + "patch": "partial_update", + "delete": "destroy", + "get": "retrieve", + } + ), + name="workspace-team-members", + ), + path( + "users/last-visited-workspace/", + UserLastProjectWithWorkspaceEndpoint.as_view(), + name="workspace-project-details", + ), + path( + "workspaces//workspace-members/me/", + WorkspaceMemberUserEndpoint.as_view(), + name="workspace-member-details", + ), + path( + "workspaces//workspace-views/", + WorkspaceMemberUserViewsEndpoint.as_view(), + name="workspace-member-views-details", + ), + path( + "workspaces//workspace-themes/", + WorkspaceThemeViewSet.as_view( + { + "get": "list", + "post": "create", + } + ), + name="workspace-themes", + ), + path( + "workspaces//workspace-themes//", + WorkspaceThemeViewSet.as_view( + { + "get": "retrieve", + "patch": "partial_update", + "delete": "destroy", + } + ), + name="workspace-themes", + ), + path( + "workspaces//user-stats//", + WorkspaceUserProfileStatsEndpoint.as_view(), + name="workspace-user-stats", + ), + path( + "workspaces//user-activity//", + WorkspaceUserActivityEndpoint.as_view(), + name="workspace-user-activity", + ), + path( + "workspaces//user-profile//", + WorkspaceUserProfileEndpoint.as_view(), + name="workspace-user-profile-page", + ), + path( + "workspaces//user-issues//", + WorkspaceUserProfileIssuesEndpoint.as_view(), + name="workspace-user-profile-issues", + ), + path( + "workspaces//labels/", + WorkspaceLabelsEndpoint.as_view(), + name="workspace-labels", + ), + path( + "workspaces//members/leave/", + LeaveWorkspaceEndpoint.as_view(), + name="leave-workspace-members", + ), +] diff --git a/apiserver/plane/api/urls.py b/apiserver/plane/api/urls_deprecated.py similarity index 99% rename from apiserver/plane/api/urls.py rename to apiserver/plane/api/urls_deprecated.py index 6e7a3821f..0dc1b3a08 100644 --- a/apiserver/plane/api/urls.py +++ b/apiserver/plane/api/urls_deprecated.py @@ -1,5 +1,6 @@ from django.urls import path +from rest_framework_simplejwt.views import TokenRefreshView # Create your urls here. @@ -191,6 +192,9 @@ from plane.api.views import ( ) +#TODO: Delete this file +# This url file has been deprecated use apiserver/plane/urls folder to create new urls + urlpatterns = [ # Social Auth path("social-auth/", OauthEndpoint.as_view(), name="oauth"), @@ -203,6 +207,7 @@ urlpatterns = [ "magic-generate/", MagicSignInGenerateEndpoint.as_view(), name="magic-generate" ), path("magic-sign-in/", MagicSignInEndpoint.as_view(), name="magic-sign-in"), + path('token/refresh/', TokenRefreshView.as_view(), name='token_refresh'), # Email verification path("email-verify/", VerifyEmailEndpoint.as_view(), name="email-verify"), path( @@ -229,6 +234,15 @@ urlpatterns = [ ), name="users", ), + path( + "users/me/settings/", + UserEndpoint.as_view( + { + "get": "retrieve_user_settings", + } + ), + name="users", + ), path( "users/me/change-password/", ChangePasswordEndpoint.as_view(), @@ -556,6 +570,7 @@ urlpatterns = [ "workspaces//user-favorite-projects/", ProjectFavoritesViewSet.as_view( { + "get": "list", "post": "create", } ), diff --git a/apiserver/plane/api/views/analytic.py b/apiserver/plane/api/views/analytic.py index feb766b46..ad8a15c4f 100644 --- a/apiserver/plane/api/views/analytic.py +++ b/apiserver/plane/api/views/analytic.py @@ -1,10 +1,5 @@ # Django imports -from django.db.models import ( - Count, - Sum, - F, - Q -) +from django.db.models import Count, Sum, F, Q from django.db.models.functions import ExtractMonth # Third party imports @@ -31,68 +26,152 @@ class AnalyticsEndpoint(BaseAPIView): try: x_axis = request.GET.get("x_axis", False) y_axis = request.GET.get("y_axis", False) + segment = request.GET.get("segment", False) - if not x_axis or not y_axis: + valid_xaxis_segment = [ + "state_id", + "state__group", + "labels__id", + "assignees__id", + "estimate_point", + "issue_cycle__cycle_id", + "issue_module__module_id", + "priority", + "start_date", + "target_date", + "created_at", + "completed_at", + ] + + valid_yaxis = [ + "issue_count", + "estimate", + ] + + # Check for x-axis and y-axis as thery are required parameters + if ( + not x_axis + or not y_axis + or not x_axis in valid_xaxis_segment + or not y_axis in valid_yaxis + ): return Response( - {"error": "x-axis and y-axis dimensions are required"}, + { + "error": "x-axis and y-axis dimensions are required and the values should be valid" + }, status=status.HTTP_400_BAD_REQUEST, ) - segment = request.GET.get("segment", False) + # If segment is present it cannot be same as x-axis + if segment and (segment not in valid_xaxis_segment or x_axis == segment): + return Response( + { + "error": "Both segment and x axis cannot be same and segment should be valid" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + # Additional filters that need to be applied filters = issue_filters(request.GET, "GET") + # Get the issues for the workspace with the additional filters applied queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) + # Get the total issue count total_issues = queryset.count() + + # Build the graph payload distribution = build_graph_plot( queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment ) - colors = dict() - if x_axis in ["state__name", "state__group"] or segment in [ - "state__name", - "state__group", - ]: - if x_axis in ["state__name", "state__group"]: - key = "name" if x_axis == "state__name" else "group" - else: - key = "name" if segment == "state__name" else "group" - - colors = ( - State.objects.filter( - ~Q(name="Triage"), - workspace__slug=slug, project_id__in=filters.get("project__in") - ).values(key, "color") - if filters.get("project__in", False) - else State.objects.filter(~Q(name="Triage"), workspace__slug=slug).values(key, "color") + state_details = {} + if x_axis in ["state_id"] or segment in ["state_id"]: + state_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + ) + .distinct("state_id") + .order_by("state_id") + .values("state_id", "state__name", "state__color") ) - if x_axis in ["labels__name"] or segment in ["labels__name"]: - colors = ( - Label.objects.filter( - workspace__slug=slug, project_id__in=filters.get("project__in") - ).values("name", "color") - if filters.get("project__in", False) - else Label.objects.filter(workspace__slug=slug).values( - "name", "color" + label_details = {} + if x_axis in ["labels__id"] or segment in ["labels__id"]: + label_details = ( + Issue.objects.filter( + workspace__slug=slug, **filters, labels__id__isnull=False ) + .distinct("labels__id") + .order_by("labels__id") + .values("labels__id", "labels__color", "labels__name") ) assignee_details = {} if x_axis in ["assignees__id"] or segment in ["assignees__id"]: assignee_details = ( - Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) + Issue.issue_objects.filter( + workspace__slug=slug, **filters, assignees__avatar__isnull=False + ) .order_by("assignees__id") .distinct("assignees__id") - .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") + .values( + "assignees__avatar", + "assignees__display_name", + "assignees__first_name", + "assignees__last_name", + "assignees__id", + ) ) + cycle_details = {} + if x_axis in ["issue_cycle__cycle_id"] or segment in [ + "issue_cycle__cycle_id" + ]: + cycle_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_cycle__cycle_id__isnull=False, + ) + .distinct("issue_cycle__cycle_id") + .order_by("issue_cycle__cycle_id") + .values( + "issue_cycle__cycle_id", + "issue_cycle__cycle__name", + ) + ) + + module_details = {} + if x_axis in ["issue_module__module_id"] or segment in [ + "issue_module__module_id" + ]: + module_details = ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_module__module_id__isnull=False, + ) + .distinct("issue_module__module_id") + .order_by("issue_module__module_id") + .values( + "issue_module__module_id", + "issue_module__module__name", + ) + ) return Response( { "total": total_issues, "distribution": distribution, - "extras": {"colors": colors, "assignee_details": assignee_details}, + "extras": { + "state_details": state_details, + "assignee_details": assignee_details, + "label_details": label_details, + "cycle_details": cycle_details, + "module_details": module_details, + }, }, status=status.HTTP_200_OK, ) @@ -177,13 +256,53 @@ class ExportAnalyticsEndpoint(BaseAPIView): try: x_axis = request.data.get("x_axis", False) y_axis = request.data.get("y_axis", False) + segment = request.data.get("segment", False) - if not x_axis or not y_axis: + + valid_xaxis_segment = [ + "state_id", + "state__group", + "labels__id", + "assignees__id", + "estimate_point", + "issue_cycle__cycle_id", + "issue_module__module_id", + "priority", + "start_date", + "target_date", + "created_at", + "completed_at", + ] + + valid_yaxis = [ + "issue_count", + "estimate", + ] + + # Check for x-axis and y-axis as thery are required parameters + if ( + not x_axis + or not y_axis + or not x_axis in valid_xaxis_segment + or not y_axis in valid_yaxis + ): return Response( - {"error": "x-axis and y-axis dimensions are required"}, + { + "error": "x-axis and y-axis dimensions are required and the values should be valid" + }, status=status.HTTP_400_BAD_REQUEST, ) + # If segment is present it cannot be same as x-axis + if segment and (segment not in valid_xaxis_segment or x_axis == segment): + return Response( + { + "error": "Both segment and x axis cannot be same and segment should be valid" + }, + status=status.HTTP_400_BAD_REQUEST, + ) + + analytic_export_task.delay( email=request.user.email, data=request.data, slug=slug ) @@ -210,68 +329,80 @@ class DefaultAnalyticsEndpoint(BaseAPIView): def get(self, request, slug): try: filters = issue_filters(request.GET, "GET") + base_issues = Issue.issue_objects.filter(workspace__slug=slug, **filters) - queryset = Issue.issue_objects.filter(workspace__slug=slug, **filters) + total_issues = base_issues.count() - total_issues = queryset.count() + state_groups = base_issues.annotate(state_group=F("state__group")) total_issues_classified = ( - queryset.annotate(state_group=F("state__group")) - .values("state_group") + state_groups.values("state_group") .annotate(state_count=Count("state_group")) .order_by("state_group") ) - open_issues = queryset.filter( - state__group__in=["backlog", "unstarted", "started"] - ).count() + open_issues_groups = ["backlog", "unstarted", "started"] + open_issues_queryset = state_groups.filter( + state__group__in=open_issues_groups + ) + open_issues = open_issues_queryset.count() open_issues_classified = ( - queryset.filter(state__group__in=["backlog", "unstarted", "started"]) - .annotate(state_group=F("state__group")) - .values("state_group") + open_issues_queryset.values("state_group") .annotate(state_count=Count("state_group")) .order_by("state_group") ) issue_completed_month_wise = ( - queryset.filter(completed_at__isnull=False) + base_issues.filter(completed_at__isnull=False) .annotate(month=ExtractMonth("completed_at")) .values("month") .annotate(count=Count("*")) .order_by("month") ) + + user_details = [ + "created_by__first_name", + "created_by__last_name", + "created_by__avatar", + "created_by__display_name", + "created_by__id", + ] + most_issue_created_user = ( - queryset.exclude(created_by=None) - .values("created_by__first_name", "created_by__last_name", "created_by__avatar", "created_by__display_name", "created_by__id") + base_issues.exclude(created_by=None) + .values(*user_details) .annotate(count=Count("id")) - .order_by("-count") - )[:5] + .order_by("-count")[:5] + ) + + user_assignee_details = [ + "assignees__first_name", + "assignees__last_name", + "assignees__avatar", + "assignees__display_name", + "assignees__id", + ] most_issue_closed_user = ( - queryset.filter(completed_at__isnull=False, assignees__isnull=False) - .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") + base_issues.filter(completed_at__isnull=False) + .exclude(assignees=None) + .values(*user_assignee_details) .annotate(count=Count("id")) - .order_by("-count") - )[:5] + .order_by("-count")[:5] + ) pending_issue_user = ( - queryset.filter(completed_at__isnull=True) - .values("assignees__first_name", "assignees__last_name", "assignees__avatar", "assignees__display_name", "assignees__id") + base_issues.filter(completed_at__isnull=True) + .values(*user_assignee_details) .annotate(count=Count("id")) .order_by("-count") ) - open_estimate_sum = ( - queryset.filter( - state__group__in=["backlog", "unstarted", "started"] - ).aggregate(open_estimate_sum=Sum("estimate_point")) - )["open_estimate_sum"] - print(open_estimate_sum) - - total_estimate_sum = queryset.aggregate( - total_estimate_sum=Sum("estimate_point") - )["total_estimate_sum"] + open_estimate_sum = open_issues_queryset.aggregate( + sum=Sum("estimate_point") + )["sum"] + total_estimate_sum = base_issues.aggregate(sum=Sum("estimate_point"))["sum"] return Response( { @@ -292,6 +423,6 @@ class DefaultAnalyticsEndpoint(BaseAPIView): except Exception as e: capture_exception(e) return Response( - {"error": "Something went wrong please try again later"}, + {"error": "Something went wrong. Please try again later."}, status=status.HTTP_400_BAD_REQUEST, ) diff --git a/apiserver/plane/api/views/auth_extended.py b/apiserver/plane/api/views/auth_extended.py index df3f3aaca..161314294 100644 --- a/apiserver/plane/api/views/auth_extended.py +++ b/apiserver/plane/api/views/auth_extended.py @@ -9,7 +9,6 @@ from django.utils.encoding import ( DjangoUnicodeDecodeError, ) from django.utils.http import urlsafe_base64_decode, urlsafe_base64_encode -from django.contrib.sites.shortcuts import get_current_site from django.conf import settings ## Third Party Imports diff --git a/apiserver/plane/api/views/authentication.py b/apiserver/plane/api/views/authentication.py index aa8ff4511..19466a8ff 100644 --- a/apiserver/plane/api/views/authentication.py +++ b/apiserver/plane/api/views/authentication.py @@ -87,14 +87,11 @@ class SignUpEndpoint(BaseAPIView): user.token_updated_at = timezone.now() user.save() - serialized_user = UserSerializer(user).data - access_token, refresh_token = get_tokens_for_user(user) data = { "access_token": access_token, "refresh_token": refresh_token, - "user": serialized_user, } # Send Analytics @@ -180,8 +177,6 @@ class SignInEndpoint(BaseAPIView): status=status.HTTP_403_FORBIDDEN, ) - serialized_user = UserSerializer(user).data - # settings last active for the user user.last_active = timezone.now() user.last_login_time = timezone.now() @@ -215,7 +210,6 @@ class SignInEndpoint(BaseAPIView): data = { "access_token": access_token, "refresh_token": refresh_token, - "user": serialized_user, } return Response(data, status=status.HTTP_200_OK) @@ -427,13 +421,11 @@ class MagicSignInEndpoint(BaseAPIView): user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.token_updated_at = timezone.now() user.save() - serialized_user = UserSerializer(user).data access_token, refresh_token = get_tokens_for_user(user) data = { "access_token": access_token, "refresh_token": refresh_token, - "user": serialized_user, } return Response(data, status=status.HTTP_200_OK) diff --git a/apiserver/plane/api/views/cycle.py b/apiserver/plane/api/views/cycle.py index e84b6dd0a..e59b59b5c 100644 --- a/apiserver/plane/api/views/cycle.py +++ b/apiserver/plane/api/views/cycle.py @@ -62,28 +62,6 @@ class CycleViewSet(BaseViewSet): project_id=self.kwargs.get("project_id"), owned_by=self.request.user ) - def perform_destroy(self, instance): - cycle_issues = list( - CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( - "issue", flat=True - ) - ) - issue_activity.delay( - type="cycle.activity.deleted", - requested_data=json.dumps( - { - "cycle_id": str(self.kwargs.get("pk")), - "issues": [str(issue_id) for issue_id in cycle_issues], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_destroy(instance) def get_queryset(self): subquery = CycleFavorite.objects.filter( @@ -206,12 +184,6 @@ class CycleViewSet(BaseViewSet): queryset = queryset.order_by(order_by) - # All Cycles - if cycle_view == "all": - return Response( - CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK - ) - # Current Cycle if cycle_view == "current": queryset = queryset.filter( @@ -348,8 +320,9 @@ class CycleViewSet(BaseViewSet): CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK ) + # If no matching view is found return all cycles return Response( - {"error": "No matching view found"}, status=status.HTTP_400_BAD_REQUEST + CycleSerializer(queryset, many=True).data, status=status.HTTP_200_OK ) except Exception as e: @@ -543,6 +516,40 @@ class CycleViewSet(BaseViewSet): status=status.HTTP_400_BAD_REQUEST, ) + def destroy(self, request, slug, project_id, pk): + try: + cycle_issues = list( + CycleIssue.objects.filter(cycle_id=self.kwargs.get("pk")).values_list( + "issue", flat=True + ) + ) + cycle = Cycle.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + # Delete the cycle + cycle.delete() + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(pk), + "issues": [str(issue_id) for issue_id in cycle_issues], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class CycleIssueViewSet(BaseViewSet): serializer_class = CycleIssueSerializer @@ -563,23 +570,6 @@ class CycleIssueViewSet(BaseViewSet): cycle_id=self.kwargs.get("cycle_id"), ) - def perform_destroy(self, instance): - issue_activity.delay( - type="cycle.activity.deleted", - requested_data=json.dumps( - { - "cycle_id": str(self.kwargs.get("cycle_id")), - "issues": [str(instance.issue_id)], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return self.filter_queryset( super() @@ -752,7 +742,7 @@ class CycleIssueViewSet(BaseViewSet): ), } ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) # Return all Cycle Issues @@ -772,6 +762,30 @@ class CycleIssueViewSet(BaseViewSet): status=status.HTTP_400_BAD_REQUEST, ) + def destroy(self, request, slug, project_id, cycle_id, pk): + try: + cycle_issue = CycleIssue.objects.get(pk=pk, workspace__slug=slug, project_id=project_id, cycle_id=cycle_id) + issue_id = cycle_issue.issue_id + cycle_issue.delete() + issue_activity.delay( + type="cycle.activity.deleted", + requested_data=json.dumps( + { + "cycle_id": str(self.kwargs.get("cycle_id")), + "issues": [str(issue_id)], + } + ), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("pk", None)), + project_id=str(self.kwargs.get("project_id", None)), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) + class CycleDateCheckEndpoint(BaseAPIView): permission_classes = [ @@ -802,7 +816,7 @@ class CycleDateCheckEndpoint(BaseAPIView): if cycles.exists(): return Response( { - "error": "You have a cycle already on the given dates, if you want to create your draft cycle you can do that by removing dates", + "error": "You have a cycle already on the given dates, if you want to create a draft cycle you can do that by removing dates", "status": False, } ) diff --git a/apiserver/plane/api/views/issue.py b/apiserver/plane/api/views/issue.py index 2d13449fd..2ca1ec014 100644 --- a/apiserver/plane/api/views/issue.py +++ b/apiserver/plane/api/views/issue.py @@ -108,49 +108,6 @@ class IssueViewSet(BaseViewSet): "workspace__id", ] - def perform_create(self, serializer): - serializer.save(project_id=self.kwargs.get("project_id")) - - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue.activity.updated", - requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue.activity.deleted", - requested_data=json.dumps( - {"issue_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return ( Issue.issue_objects.annotate( @@ -278,7 +235,8 @@ class IssueViewSet(BaseViewSet): if group_by: return Response( - group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK + group_results(issues, group_by, sub_group_by), + status=status.HTTP_200_OK, ) return Response(issues, status=status.HTTP_200_OK) @@ -314,7 +272,7 @@ class IssueViewSet(BaseViewSet): issue_id=str(serializer.data.get("id", None)), project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -331,15 +289,69 @@ class IssueViewSet(BaseViewSet): .order_by() .annotate(count=Func(F("id"), function="Count")) .values("count") - ).get( - workspace__slug=slug, project_id=project_id, pk=pk - ) + ).get(workspace__slug=slug, project_id=project_id, pk=pk) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) except Issue.DoesNotExist: return Response( {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND ) + def partial_update(self, request, slug, project_id, pk=None): + try: + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + serializer = IssueCreateSerializer(issue, data=request.data, partial=True) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="issue.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def destroy(self, request, slug, project_id, pk=None): + try: + issue = Issue.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + current_instance = json.dumps( + IssueSerializer(issue).data, cls=DjangoJSONEncoder + ) + issue.delete() + issue_activity.delay( + type="issue.activity.deleted", + requested_data=json.dumps({"issue_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class UserWorkSpaceIssues(BaseAPIView): @method_decorator(gzip_page) @@ -465,10 +477,11 @@ class UserWorkSpaceIssues(BaseAPIView): {"error": "Group by and sub group by cannot be same"}, status=status.HTTP_400_BAD_REQUEST, ) - + if group_by: return Response( - group_results(issues, group_by, sub_group_by), status=status.HTTP_200_OK + group_results(issues, group_by, sub_group_by), + status=status.HTTP_200_OK, ) return Response(issues, status=status.HTTP_200_OK) @@ -560,64 +573,6 @@ class IssueCommentViewSet(BaseViewSet): "workspace__id", ] - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_id=self.kwargs.get("issue_id"), - actor=self.request.user if self.request.user is not None else None, - ) - issue_activity.delay( - type="comment.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="comment.activity.updated", - requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueCommentSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="comment.activity.deleted", - requested_data=json.dumps( - {"comment_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueCommentSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return self.filter_queryset( super() @@ -641,6 +596,93 @@ class IssueCommentViewSet(BaseViewSet): .distinct() ) + def create(self, request, slug, project_id, issue_id): + try: + serializer = IssueCommentSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + actor=request.user, + ) + issue_activity.delay( + type="comment.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def partial_update(self, request, slug, project_id, issue_id, pk): + try: + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueCommentSerializer( + issue_comment, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="comment.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def destroy(self, request, slug, project_id, issue_id, pk): + try: + issue_comment = IssueComment.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueCommentSerializer(issue_comment).data, + cls=DjangoJSONEncoder, + ) + issue_comment.delete() + issue_activity.delay( + type="comment.activity.deleted", + requested_data=json.dumps({"comment_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class IssuePropertyViewSet(BaseViewSet): serializer_class = IssuePropertySerializer @@ -718,10 +760,16 @@ class LabelViewSet(BaseViewSet): return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) except IntegrityError: - return Response({"error": "Label with the same name already exists in the project"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Label with the same name already exists in the project"}, + status=status.HTTP_400_BAD_REQUEST, + ) except Exception as e: capture_exception(e) - return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) def get_queryset(self): return self.filter_queryset( @@ -894,63 +942,6 @@ class IssueLinkViewSet(BaseViewSet): model = IssueLink serializer_class = IssueLinkSerializer - def perform_create(self, serializer): - serializer.save( - project_id=self.kwargs.get("project_id"), - issue_id=self.kwargs.get("issue_id"), - ) - issue_activity.delay( - type="link.activity.created", - requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id")), - project_id=str(self.kwargs.get("project_id")), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - def perform_update(self, serializer): - requested_data = json.dumps(self.request.data, cls=DjangoJSONEncoder) - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="link.activity.updated", - requested_data=requested_data, - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueLinkSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_update(serializer) - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="link.activity.deleted", - requested_data=json.dumps( - {"link_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueLinkSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return ( super() @@ -963,6 +954,92 @@ class IssueLinkViewSet(BaseViewSet): .distinct() ) + def create(self, request, slug, project_id, issue_id): + try: + serializer = IssueLinkSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + issue_id=issue_id, + ) + issue_activity.delay( + type="link.activity.created", + requested_data=json.dumps(serializer.data, cls=DjangoJSONEncoder), + actor_id=str(self.request.user.id), + issue_id=str(self.kwargs.get("issue_id")), + project_id=str(self.kwargs.get("project_id")), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def partial_update(self, request, slug, project_id, issue_id, pk): + try: + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + requested_data = json.dumps(request.data, cls=DjangoJSONEncoder) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + serializer = IssueLinkSerializer( + issue_link, data=request.data, partial=True + ) + if serializer.is_valid(): + serializer.save() + issue_activity.delay( + type="link.activity.updated", + requested_data=requested_data, + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_200_OK) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + + def destroy(self, request, slug, project_id, issue_id, pk): + try: + issue_link = IssueLink.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueLinkSerializer(issue_link).data, + cls=DjangoJSONEncoder, + ) + issue_activity.delay( + type="link.activity.deleted", + requested_data=json.dumps({"link_id": str(pk)}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + issue_link.delete() + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class BulkCreateIssueLabelsEndpoint(BaseAPIView): def post(self, request, slug, project_id): @@ -1026,7 +1103,7 @@ class IssueAttachmentEndpoint(BaseAPIView): serializer.data, cls=DjangoJSONEncoder, ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1049,7 +1126,7 @@ class IssueAttachmentEndpoint(BaseAPIView): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(status=status.HTTP_204_NO_CONTENT) @@ -1252,7 +1329,7 @@ class IssueArchiveViewSet(BaseViewSet): issue_id=str(issue.id), project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(IssueSerializer(issue).data, status=status.HTTP_200_OK) @@ -1444,21 +1521,32 @@ class IssueReactionViewSet(BaseViewSet): .distinct() ) - def perform_create(self, serializer): - serializer.save( - issue_id=self.kwargs.get("issue_id"), - project_id=self.kwargs.get("project_id"), - actor=self.request.user, - ) - issue_activity.delay( - type="issue_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) + def create(self, request, slug, project_id, issue_id): + try: + serializer = IssueReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + issue_id=issue_id, + project_id=project_id, + actor=request.user, + ) + issue_activity.delay( + type="issue_reaction.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) def destroy(self, request, slug, project_id, issue_id, reaction_code): try: @@ -1481,7 +1569,7 @@ class IssueReactionViewSet(BaseViewSet): "identifier": str(issue_reaction.id), } ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) issue_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1517,21 +1605,32 @@ class CommentReactionViewSet(BaseViewSet): .distinct() ) - def perform_create(self, serializer): - serializer.save( - actor=self.request.user, - comment_id=self.kwargs.get("comment_id"), - project_id=self.kwargs.get("project_id"), - ) - issue_activity.delay( - type="comment_reaction.activity.created", - requested_data=json.dumps(self.request.data, cls=DjangoJSONEncoder), - actor_id=str(self.request.user.id), - issue_id=None, - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) + def create(self, request, slug, project_id, comment_id): + try: + serializer = CommentReactionSerializer(data=request.data) + if serializer.is_valid(): + serializer.save( + project_id=project_id, + actor_id=request.user.id, + comment_id=comment_id, + ) + issue_activity.delay( + type="comment_reaction.activity.created", + requested_data=json.dumps(request.data, cls=DjangoJSONEncoder), + actor_id=str(request.user.id), + issue_id=None, + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(serializer.data, status=status.HTTP_201_CREATED) + return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) def destroy(self, request, slug, project_id, comment_id, reaction_code): try: @@ -1555,7 +1654,7 @@ class CommentReactionViewSet(BaseViewSet): "comment_id": str(comment_id), } ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) comment_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1652,7 +1751,7 @@ class IssueCommentPublicViewSet(BaseViewSet): issue_id=str(issue_id), project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) if not ProjectMember.objects.filter( project_id=project_id, @@ -1702,7 +1801,7 @@ class IssueCommentPublicViewSet(BaseViewSet): IssueCommentSerializer(comment).data, cls=DjangoJSONEncoder, ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1736,7 +1835,7 @@ class IssueCommentPublicViewSet(BaseViewSet): IssueCommentSerializer(comment).data, cls=DjangoJSONEncoder, ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) comment.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1811,7 +1910,7 @@ class IssueReactionPublicViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1856,7 +1955,7 @@ class IssueReactionPublicViewSet(BaseViewSet): "identifier": str(issue_reaction.id), } ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) issue_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -1930,7 +2029,7 @@ class CommentReactionPublicViewSet(BaseViewSet): issue_id=None, project_id=str(self.kwargs.get("project_id", None)), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -1982,7 +2081,7 @@ class CommentReactionPublicViewSet(BaseViewSet): "comment_id": str(comment_id), } ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) comment_reaction.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -2046,7 +2145,7 @@ class IssueVotePublicViewSet(BaseViewSet): issue_id=str(self.kwargs.get("issue_id", None)), project_id=str(self.kwargs.get("project_id", None)), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) serializer = IssueVoteSerializer(issue_vote) return Response(serializer.data, status=status.HTTP_201_CREATED) @@ -2081,7 +2180,7 @@ class IssueVotePublicViewSet(BaseViewSet): "identifier": str(issue_vote.id), } ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) issue_vote.delete() return Response(status=status.HTTP_204_NO_CONTENT) @@ -2100,24 +2199,19 @@ class IssueRelationViewSet(BaseViewSet): ProjectEntityPermission, ] - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() + def get_queryset(self): + return self.filter_queryset( + super() + .get_queryset() + .filter(workspace__slug=self.kwargs.get("slug")) + .filter(project_id=self.kwargs.get("project_id")) + .filter(issue_id=self.kwargs.get("issue_id")) + .filter(project__project_projectmember__member=self.request.user) + .select_related("project") + .select_related("workspace") + .select_related("issue") + .distinct() ) - if current_instance is not None: - issue_activity.delay( - type="issue_relation.activity.deleted", - requested_data=json.dumps({"related_list": None}), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("issue_id", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueRelationSerializer(current_instance).data, - cls=DjangoJSONEncoder, - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) def create(self, request, slug, project_id, issue_id): try: @@ -2149,9 +2243,9 @@ class IssueRelationViewSet(BaseViewSet): issue_id=str(issue_id), project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) - + if relation == "blocking": return Response( RelatedIssueSerializer(issue_relation, many=True).data, @@ -2181,19 +2275,32 @@ class IssueRelationViewSet(BaseViewSet): status=status.HTTP_400_BAD_REQUEST, ) - def get_queryset(self): - return self.filter_queryset( - super() - .get_queryset() - .filter(workspace__slug=self.kwargs.get("slug")) - .filter(project_id=self.kwargs.get("project_id")) - .filter(issue_id=self.kwargs.get("issue_id")) - .filter(project__project_projectmember__member=self.request.user) - .select_related("project") - .select_related("workspace") - .select_related("issue") - .distinct() - ) + def destroy(self, request, slug, project_id, issue_id, pk): + try: + issue_relation = IssueRelation.objects.get( + workspace__slug=slug, project_id=project_id, issue_id=issue_id, pk=pk + ) + current_instance = json.dumps( + IssueRelationSerializer(issue_relation).data, + cls=DjangoJSONEncoder, + ) + issue_relation.delete() + issue_activity.delay( + type="issue_relation.activity.deleted", + requested_data=json.dumps({"related_list": None}), + actor_id=str(request.user.id), + issue_id=str(issue_id), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) class IssueRetrievePublicEndpoint(BaseAPIView): @@ -2404,28 +2511,6 @@ class IssueDraftViewSet(BaseViewSet): ] serializer_class = IssueFlatSerializer model = Issue - - - def perform_destroy(self, instance): - current_instance = ( - self.get_queryset().filter(pk=self.kwargs.get("pk", None)).first() - ) - if current_instance is not None: - issue_activity.delay( - type="issue_draft.activity.deleted", - requested_data=json.dumps( - {"issue_id": str(self.kwargs.get("pk", None))} - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=json.dumps( - IssueSerializer(current_instance).data, cls=DjangoJSONEncoder - ), - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) - def get_queryset(self): return ( @@ -2452,7 +2537,6 @@ class IssueDraftViewSet(BaseViewSet): ) ) - @method_decorator(gzip_page) def list(self, request, slug, project_id): try: @@ -2561,7 +2645,6 @@ class IssueDraftViewSet(BaseViewSet): status=status.HTTP_400_BAD_REQUEST, ) - def create(self, request, slug, project_id): try: project = Project.objects.get(pk=project_id) @@ -2586,7 +2669,7 @@ class IssueDraftViewSet(BaseViewSet): issue_id=str(serializer.data.get("id", None)), project_id=str(project_id), current_instance=None, - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(serializer.data, status=status.HTTP_201_CREATED) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -2596,19 +2679,20 @@ class IssueDraftViewSet(BaseViewSet): {"error": "Project was not found"}, status=status.HTTP_404_NOT_FOUND ) - def partial_update(self, request, slug, project_id, pk): try: issue = Issue.objects.get( workspace__slug=slug, project_id=project_id, pk=pk ) - serializer = IssueSerializer( - issue, data=request.data, partial=True - ) + serializer = IssueSerializer(issue, data=request.data, partial=True) if serializer.is_valid(): - if(request.data.get("is_draft") is not None and not request.data.get("is_draft")): - serializer.save(created_at=timezone.now(), updated_at=timezone.now()) + if request.data.get("is_draft") is not None and not request.data.get( + "is_draft" + ): + serializer.save( + created_at=timezone.now(), updated_at=timezone.now() + ) else: serializer.save() issue_activity.delay( @@ -2621,7 +2705,7 @@ class IssueDraftViewSet(BaseViewSet): IssueSerializer(issue).data, cls=DjangoJSONEncoder, ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response(serializer.data, status=status.HTTP_200_OK) return Response(serializer.errors, status=status.HTTP_400_BAD_REQUEST) @@ -2637,7 +2721,6 @@ class IssueDraftViewSet(BaseViewSet): status=status.HTTP_400_BAD_REQUEST, ) - def retrieve(self, request, slug, project_id, pk=None): try: issue = Issue.objects.get( @@ -2648,4 +2731,26 @@ class IssueDraftViewSet(BaseViewSet): return Response( {"error": "Issue Does not exist"}, status=status.HTTP_404_NOT_FOUND ) - + + def destroy(self, request, slug, project_id, pk=None): + try: + issue = Issue.objects.get(workspace__slug=slug, project_id=project_id, pk=pk) + current_instance=json.dumps( + IssueSerializer(current_instance).data, cls=DjangoJSONEncoder + ) + issue.delete() + issue_activity.delay( + type="issue_draft.activity.deleted", + requested_data=json.dumps( + {"issue_id": str(pk)} + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=current_instance, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response({"error": "Something went wrong please try again later"}, status=status.HTTP_400_BAD_REQUEST) diff --git a/apiserver/plane/api/views/module.py b/apiserver/plane/api/views/module.py index 1489edb2d..8dda63968 100644 --- a/apiserver/plane/api/views/module.py +++ b/apiserver/plane/api/views/module.py @@ -141,29 +141,6 @@ class ModuleViewSet(BaseViewSet): .order_by(order_by, "name") ) - def perform_destroy(self, instance): - module_issues = list( - ModuleIssue.objects.filter(module_id=self.kwargs.get("pk")).values_list( - "issue", flat=True - ) - ) - issue_activity.delay( - type="module.activity.deleted", - requested_data=json.dumps( - { - "module_id": str(self.kwargs.get("pk")), - "issues": [str(issue_id) for issue_id in module_issues], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - - return super().perform_destroy(instance) - def create(self, request, slug, project_id): try: project = Project.objects.get(workspace__slug=slug, pk=project_id) @@ -309,6 +286,37 @@ class ModuleViewSet(BaseViewSet): status=status.HTTP_400_BAD_REQUEST, ) + def destroy(self, request, slug, project_id, pk): + try: + module = Module.objects.get( + workspace__slug=slug, project_id=project_id, pk=pk + ) + module_issues = list( + ModuleIssue.objects.filter(module_id=pk).values_list("issue", flat=True) + ) + module.delete() + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps( + { + "module_id": str(pk), + "issues": [str(issue_id) for issue_id in module_issues], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class ModuleIssueViewSet(BaseViewSet): serializer_class = ModuleIssueSerializer @@ -329,22 +337,6 @@ class ModuleIssueViewSet(BaseViewSet): module_id=self.kwargs.get("module_id"), ) - def perform_destroy(self, instance): - issue_activity.delay( - type="module.activity.deleted", - requested_data=json.dumps( - { - "module_id": str(self.kwargs.get("module_id")), - "issues": [str(instance.issue_id)], - } - ), - actor_id=str(self.request.user.id), - issue_id=str(self.kwargs.get("pk", None)), - project_id=str(self.kwargs.get("project_id", None)), - current_instance=None, - epoch=int(timezone.now().timestamp()) - ) - return super().perform_destroy(instance) def get_queryset(self): return self.filter_queryset( @@ -510,7 +502,7 @@ class ModuleIssueViewSet(BaseViewSet): ), } ), - epoch=int(timezone.now().timestamp()) + epoch=int(timezone.now().timestamp()), ) return Response( @@ -528,6 +520,34 @@ class ModuleIssueViewSet(BaseViewSet): status=status.HTTP_400_BAD_REQUEST, ) + def destroy(self, request, slug, project_id, module_id, pk): + try: + module_issue = ModuleIssue.objects.get( + workspace__slug=slug, project_id=project_id, module_id=module_id, pk=pk + ) + module_issue.delete() + issue_activity.delay( + type="module.activity.deleted", + requested_data=json.dumps( + { + "module_id": str(module_id), + "issues": [str(module_issue.issue_id)], + } + ), + actor_id=str(request.user.id), + issue_id=str(pk), + project_id=str(project_id), + current_instance=None, + epoch=int(timezone.now().timestamp()), + ) + return Response(status=status.HTTP_204_NO_CONTENT) + except Exception as e: + capture_exception(e) + return Response( + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, + ) + class ModuleLinkViewSet(BaseViewSet): permission_classes = [ diff --git a/apiserver/plane/api/views/notification.py b/apiserver/plane/api/views/notification.py index 75b94f034..6feca2fb2 100644 --- a/apiserver/plane/api/views/notification.py +++ b/apiserver/plane/api/views/notification.py @@ -37,42 +37,34 @@ class NotificationViewSet(BaseViewSet, BasePaginator): def list(self, request, slug): try: + # Get query parameters snoozed = request.GET.get("snoozed", "false") archived = request.GET.get("archived", "false") read = request.GET.get("read", "true") - - # Filter type type = request.GET.get("type", "all") - notifications = ( - Notification.objects.filter( - workspace__slug=slug, receiver_id=request.user.id - ) - .select_related("workspace", "project", "triggered_by", "receiver") + notifications = Notification.objects.filter(workspace__slug=slug, receiver_id=request.user.id) \ + .select_related("workspace", "project", "triggered_by", "receiver") \ .order_by("snoozed_till", "-created_at") - ) - # Filter for snoozed notifications - if snoozed == "false": - notifications = notifications.filter( - Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), - ) + # Filters based on query parameters + snoozed_filters = { + "true": Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False), + "false": Q(snoozed_till__gte=timezone.now()) | Q(snoozed_till__isnull=True), + } - if snoozed == "true": - notifications = notifications.filter( - Q(snoozed_till__lt=timezone.now()) | Q(snoozed_till__isnull=False) - ) + notifications = notifications.filter(snoozed_filters[snoozed]) + + archived_filters = { + "true": Q(archived_at__isnull=False), + "false": Q(archived_at__isnull=True), + } + + notifications = notifications.filter(archived_filters[archived]) if read == "false": notifications = notifications.filter(read_at__isnull=True) - # Filter for archived or unarchive - if archived == "false": - notifications = notifications.filter(archived_at__isnull=True) - - if archived == "true": - notifications = notifications.filter(archived_at__isnull=False) - # Subscribed issues if type == "watching": issue_ids = IssueSubscriber.objects.filter( diff --git a/apiserver/plane/api/views/oauth.py b/apiserver/plane/api/views/oauth.py index 184cba951..4603229f4 100644 --- a/apiserver/plane/api/views/oauth.py +++ b/apiserver/plane/api/views/oauth.py @@ -186,14 +186,11 @@ class OauthEndpoint(BaseAPIView): user.is_email_verified = email_verified user.save() - serialized_user = UserSerializer(user).data - access_token, refresh_token = get_tokens_for_user(user) data = { "access_token": access_token, "refresh_token": refresh_token, - "user": serialized_user, } SocialLoginConnection.objects.update_or_create( @@ -264,14 +261,11 @@ class OauthEndpoint(BaseAPIView): user.last_login_uagent = request.META.get("HTTP_USER_AGENT") user.token_updated_at = timezone.now() user.save() - serialized_user = UserSerializer(user).data access_token, refresh_token = get_tokens_for_user(user) data = { "access_token": access_token, "refresh_token": refresh_token, - "user": serialized_user, - "permissions": [], } if settings.ANALYTICS_BASE_API: _ = requests.post( diff --git a/apiserver/plane/api/views/project.py b/apiserver/plane/api/views/project.py index 1ba227177..4545b5376 100644 --- a/apiserver/plane/api/views/project.py +++ b/apiserver/plane/api/views/project.py @@ -7,6 +7,7 @@ from datetime import datetime from django.core.exceptions import ValidationError from django.db import IntegrityError from django.db.models import ( + Prefetch, Q, Exists, OuterRef, @@ -29,6 +30,7 @@ from sentry_sdk import capture_exception from .base import BaseViewSet, BaseAPIView from plane.api.serializers import ( ProjectSerializer, + ProjectListSerializer, ProjectMemberSerializer, ProjectDetailSerializer, ProjectMemberInviteSerializer, @@ -86,12 +88,6 @@ class ProjectViewSet(BaseViewSet): return ProjectDetailSerializer def get_queryset(self): - subquery = ProjectFavorite.objects.filter( - user=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - ) - return self.filter_queryset( super() .get_queryset() @@ -100,7 +96,15 @@ class ProjectViewSet(BaseViewSet): .select_related( "workspace", "workspace__owner", "default_assignee", "project_lead" ) - .annotate(is_favorite=Exists(subquery)) + .annotate( + is_favorite=Exists( + ProjectFavorite.objects.filter( + user=self.request.user, + project_id=OuterRef("pk"), + workspace__slug=self.kwargs.get("slug"), + ) + ) + ) .annotate( is_member=Exists( ProjectMember.objects.filter( @@ -149,12 +153,8 @@ class ProjectViewSet(BaseViewSet): def list(self, request, slug): try: - is_favorite = request.GET.get("is_favorite", "all") - subquery = ProjectFavorite.objects.filter( - user=self.request.user, - project_id=OuterRef("pk"), - workspace__slug=self.kwargs.get("slug"), - ) + fields = [field for field in request.GET.get("fields", "").split(",") if field] + sort_order_query = ProjectMember.objects.filter( member=request.user, project_id=OuterRef("pk"), @@ -162,37 +162,31 @@ class ProjectViewSet(BaseViewSet): ).values("sort_order") projects = ( self.get_queryset() - .annotate(is_favorite=Exists(subquery)) .annotate(sort_order=Subquery(sort_order_query)) - .order_by("sort_order", "name") - .annotate( - total_members=ProjectMember.objects.filter( - project_id=OuterRef("id") + .prefetch_related( + Prefetch( + "project_projectmember", + queryset=ProjectMember.objects.filter( + workspace__slug=slug, + ).select_related("member"), ) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - total_cycles=Cycle.objects.filter(project_id=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") - ) - .annotate( - total_modules=Module.objects.filter(project_id=OuterRef("id")) - .order_by() - .annotate(count=Func(F("id"), function="Count")) - .values("count") ) + .order_by("sort_order", "name") ) + if request.GET.get("per_page", False) and request.GET.get("cursor", False): + return self.paginate( + request=request, + queryset=(projects), + on_results=lambda projects: ProjectListSerializer( + projects, many=True + ).data, + ) - if is_favorite == "true": - projects = projects.filter(is_favorite=True) - if is_favorite == "false": - projects = projects.filter(is_favorite=False) - - return Response(ProjectDetailSerializer(projects, many=True).data) + return Response( + ProjectListSerializer( + projects, many=True, fields=fields if fields else None + ).data + ) except Exception as e: capture_exception(e) return Response( diff --git a/apiserver/plane/api/views/user.py b/apiserver/plane/api/views/user.py index 68958e504..adb7a0eab 100644 --- a/apiserver/plane/api/views/user.py +++ b/apiserver/plane/api/views/user.py @@ -8,6 +8,8 @@ from sentry_sdk import capture_exception from plane.api.serializers import ( UserSerializer, IssueActivitySerializer, + UserMeSerializer, + UserMeSettingsSerializer, ) from plane.api.views.base import BaseViewSet, BaseAPIView @@ -31,69 +33,22 @@ class UserEndpoint(BaseViewSet): def retrieve(self, request): try: - workspace = Workspace.objects.get( - pk=request.user.last_workspace_id, workspace_member__member=request.user - ) - workspace_invites = WorkspaceMemberInvite.objects.filter( - email=request.user.email - ).count() - assigned_issues = Issue.issue_objects.filter( - assignees__in=[request.user] - ).count() - - serialized_data = UserSerializer(request.user).data - serialized_data["workspace"] = { - "last_workspace_id": request.user.last_workspace_id, - "last_workspace_slug": workspace.slug, - "fallback_workspace_id": request.user.last_workspace_id, - "fallback_workspace_slug": workspace.slug, - "invites": workspace_invites, - } - serialized_data.setdefault("issues", {})[ - "assigned_issues" - ] = assigned_issues - + serialized_data = UserMeSerializer(request.user).data return Response( serialized_data, status=status.HTTP_200_OK, ) - except Workspace.DoesNotExist: - # This exception will be hit even when the `last_workspace_id` is None - - workspace_invites = WorkspaceMemberInvite.objects.filter( - email=request.user.email - ).count() - assigned_issues = Issue.issue_objects.filter( - assignees__in=[request.user] - ).count() - - fallback_workspace = ( - Workspace.objects.filter(workspace_member__member=request.user) - .order_by("created_at") - .first() - ) - - serialized_data = UserSerializer(request.user).data - - serialized_data["workspace"] = { - "last_workspace_id": None, - "last_workspace_slug": None, - "fallback_workspace_id": fallback_workspace.id - if fallback_workspace is not None - else None, - "fallback_workspace_slug": fallback_workspace.slug - if fallback_workspace is not None - else None, - "invites": workspace_invites, - } - serialized_data.setdefault("issues", {})[ - "assigned_issues" - ] = assigned_issues - + except Exception as e: + capture_exception(e) return Response( - serialized_data, - status=status.HTTP_200_OK, + {"error": "Something went wrong please try again later"}, + status=status.HTTP_400_BAD_REQUEST, ) + + def retrieve_user_settings(self, request): + try: + serialized_data = UserMeSettingsSerializer(request.user).data + return Response(serialized_data, status=status.HTTP_200_OK) except Exception as e: capture_exception(e) return Response( diff --git a/apiserver/plane/api/views/workspace.py b/apiserver/plane/api/views/workspace.py index 8d518b160..b692dc345 100644 --- a/apiserver/plane/api/views/workspace.py +++ b/apiserver/plane/api/views/workspace.py @@ -48,6 +48,7 @@ from plane.api.serializers import ( IssueActivitySerializer, IssueLiteSerializer, WorkspaceMemberAdminSerializer, + WorkspaceMemberMeSerializer, ) from plane.api.views.base import BaseAPIView from . import BaseViewSet @@ -825,7 +826,7 @@ class WorkspaceMemberUserEndpoint(BaseAPIView): workspace_member = WorkspaceMember.objects.get( member=request.user, workspace__slug=slug ) - serializer = WorkSpaceMemberSerializer(workspace_member) + serializer = WorkspaceMemberMeSerializer(workspace_member) return Response(serializer.data, status=status.HTTP_200_OK) except (Workspace.DoesNotExist, WorkspaceMember.DoesNotExist): return Response({"error": "Forbidden"}, status=status.HTTP_403_FORBIDDEN) diff --git a/apiserver/plane/bgtasks/analytic_plot_export.py b/apiserver/plane/bgtasks/analytic_plot_export.py index 492be8870..a041fd169 100644 --- a/apiserver/plane/bgtasks/analytic_plot_export.py +++ b/apiserver/plane/bgtasks/analytic_plot_export.py @@ -20,8 +20,8 @@ from plane.utils.issue_filters import issue_filters row_mapping = { "state__name": "State", "state__group": "State Group", - "labels__name": "Label", - "assignees__display_name": "Assignee Name", + "labels__id": "Label", + "assignees__id": "Assignee Name", "start_date": "Start Date", "target_date": "Due Date", "completed_at": "Completed At", @@ -29,8 +29,321 @@ row_mapping = { "issue_count": "Issue Count", "priority": "Priority", "estimate": "Estimate", + "issue_cycle__cycle_id": "Cycle", + "issue_module__module_id": "Module" } +ASSIGNEE_ID = "assignees__id" +LABEL_ID = "labels__id" +STATE_ID = "state_id" +CYCLE_ID = "issue_cycle__cycle_id" +MODULE_ID = "issue_module__module_id" + + +def send_export_email(email, slug, csv_buffer): + """Helper function to send export email.""" + subject = "Your Export is ready" + html_content = render_to_string("emails/exports/analytics.html", {}) + text_content = strip_tags(html_content) + + csv_buffer.seek(0) + msg = EmailMultiAlternatives(subject, text_content, settings.EMAIL_FROM, [email]) + msg.attach(f"{slug}-analytics.csv", csv_buffer.getvalue()) + msg.send(fail_silently=False) + + +def get_assignee_details(slug, filters): + """Fetch assignee details if required.""" + return ( + Issue.issue_objects.filter( + workspace__slug=slug, **filters, assignees__avatar__isnull=False + ) + .distinct("assignees__id") + .order_by("assignees__id") + .values( + "assignees__avatar", + "assignees__display_name", + "assignees__first_name", + "assignees__last_name", + "assignees__id", + ) + ) + + +def get_label_details(slug, filters): + """Fetch label details if required""" + return ( + Issue.objects.filter(workspace__slug=slug, **filters, labels__id__isnull=False) + .distinct("labels__id") + .order_by("labels__id") + .values("labels__id", "labels__color", "labels__name") + ) + + +def get_state_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + ) + .distinct("state_id") + .order_by("state_id") + .values("state_id", "state__name", "state__color") + ) + + +def get_module_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_module__module_id__isnull=False, + ) + .distinct("issue_module__module_id") + .order_by("issue_module__module_id") + .values( + "issue_module__module_id", + "issue_module__module__name", + ) + ) + + +def get_cycle_details(slug, filters): + return ( + Issue.issue_objects.filter( + workspace__slug=slug, + **filters, + issue_cycle__cycle_id__isnull=False, + ) + .distinct("issue_cycle__cycle_id") + .order_by("issue_cycle__cycle_id") + .values( + "issue_cycle__cycle_id", + "issue_cycle__cycle__name", + ) + ) + + +def generate_csv_from_rows(rows): + """Generate CSV buffer from rows.""" + csv_buffer = io.StringIO() + writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) + [writer.writerow(row) for row in rows] + return csv_buffer + + +def generate_segmented_rows( + distribution, + x_axis, + y_axis, + segment, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, +): + segment_zero = list( + set( + item.get("segment") for sublist in distribution.values() for item in sublist + ) + ) + + segmented = segment + + row_zero = [ + row_mapping.get(x_axis, "X-Axis"), + row_mapping.get(y_axis, "Y-Axis"), + ] + segment_zero + + rows = [] + for item, data in distribution.items(): + generated_row = [ + item, + sum(obj.get(key) for obj in data if obj.get(key) is not None), + ] + + for segment in segment_zero: + value = next((x.get(key) for x in data if x.get("segment") == segment), "0") + generated_row.append(value) + + if x_axis == ASSIGNEE_ID: + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(item) + ), + None, + ) + if assignee: + generated_row[ + 0 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if x_axis == LABEL_ID: + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(item)), + None, + ) + + if label: + generated_row[0] = f"{label['labels__name']}" + + if x_axis == STATE_ID: + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(item)), + None, + ) + + if state: + generated_row[0] = f"{state['state__name']}" + + if x_axis == CYCLE_ID: + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)), + None, + ) + + if cycle: + generated_row[0] = f"{cycle['issue_cycle__cycle__name']}" + + if x_axis == MODULE_ID: + module = next( + (mod for mod in module_details if str(mod[MODULE_ID]) == str(item)), + None, + ) + + if module: + generated_row[0] = f"{module['issue_module__module__name']}" + + rows.append(tuple(generated_row)) + + if segmented == ASSIGNEE_ID: + for index, segm in enumerate(row_zero[2:]): + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(segm) + ), + None, + ) + if assignee: + row_zero[ + index + 2 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if segmented == LABEL_ID: + for index, segm in enumerate(row_zero[2:]): + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(segm)), + None, + ) + if label: + row_zero[index + 2] = label["labels__name"] + + if segmented == STATE_ID: + for index, segm in enumerate(row_zero[2:]): + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(segm)), + None, + ) + if state: + row_zero[index + 2] = state["state__name"] + + if segmented == MODULE_ID: + for index, segm in enumerate(row_zero[2:]): + module = next( + (mod for mod in label_details if str(mod[MODULE_ID]) == str(segm)), + None, + ) + if module: + row_zero[index + 2] = module["issue_module__module__name"] + + if segmented == CYCLE_ID: + for index, segm in enumerate(row_zero[2:]): + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(segm)), + None, + ) + if cycle: + row_zero[index + 2] = cycle["issue_cycle__cycle__name"] + + return [tuple(row_zero)] + rows + + +def generate_non_segmented_rows( + distribution, + x_axis, + y_axis, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, +): + rows = [] + for item, data in distribution.items(): + row = [item, data[0].get("count" if y_axis == "issue_count" else "estimate")] + + if x_axis == ASSIGNEE_ID: + assignee = next( + ( + user + for user in assignee_details + if str(user[ASSIGNEE_ID]) == str(item) + ), + None, + ) + if assignee: + row[ + 0 + ] = f"{assignee['assignees__first_name']} {assignee['assignees__last_name']}" + + if x_axis == LABEL_ID: + label = next( + (lab for lab in label_details if str(lab[LABEL_ID]) == str(item)), + None, + ) + + if label: + row[0] = f"{label['labels__name']}" + + if x_axis == STATE_ID: + state = next( + (sta for sta in state_details if str(sta[STATE_ID]) == str(item)), + None, + ) + + if state: + row[0] = f"{state['state__name']}" + + if x_axis == CYCLE_ID: + cycle = next( + (cyc for cyc in cycle_details if str(cyc[CYCLE_ID]) == str(item)), + None, + ) + + if cycle: + row[0] = f"{cycle['issue_cycle__cycle__name']}" + + if x_axis == MODULE_ID: + module = next( + (mod for mod in module_details if str(mod[MODULE_ID]) == str(item)), + None, + ) + + if module: + row[0] = f"{module['issue_module__module__name']}" + + rows.append(tuple(row)) + + row_zero = [row_mapping.get(x_axis, "X-Axis"), row_mapping.get(y_axis, "Y-Axis")] + return [tuple(row_zero)] + rows + @shared_task def analytic_export_task(email, data, slug): @@ -43,134 +356,70 @@ def analytic_export_task(email, data, slug): segment = data.get("segment", False) distribution = build_graph_plot( - queryset=queryset, x_axis=x_axis, y_axis=y_axis, segment=segment + queryset, x_axis=x_axis, y_axis=y_axis, segment=segment ) - key = "count" if y_axis == "issue_count" else "estimate" - segmented = segment + assignee_details = ( + get_assignee_details(slug, filters) + if x_axis == ASSIGNEE_ID or segment == ASSIGNEE_ID + else {} + ) - assignee_details = {} - if x_axis in ["assignees__id"] or segment in ["assignees__id"]: - assignee_details = ( - Issue.issue_objects.filter(workspace__slug=slug, **filters, assignees__avatar__isnull=False) - .order_by("assignees__id") - .distinct("assignees__id") - .values("assignees__avatar", "assignees__display_name", "assignees__first_name", "assignees__last_name", "assignees__id") - ) + label_details = ( + get_label_details(slug, filters) + if x_axis == LABEL_ID or segment == LABEL_ID + else {} + ) + + state_details = ( + get_state_details(slug, filters) + if x_axis == STATE_ID or segment == STATE_ID + else {} + ) + + cycle_details = ( + get_cycle_details(slug, filters) + if x_axis == CYCLE_ID or segment == CYCLE_ID + else {} + ) + + module_details = ( + get_module_details(slug, filters) + if x_axis == MODULE_ID or segment == MODULE_ID + else {} + ) if segment: - segment_zero = [] - for item in distribution: - current_dict = distribution.get(item) - for current in current_dict: - segment_zero.append(current.get("segment")) - - segment_zero = list(set(segment_zero)) - row_zero = ( - [ - row_mapping.get(x_axis, "X-Axis"), - ] - + [ - row_mapping.get(y_axis, "Y-Axis"), - ] - + segment_zero + rows = generate_segmented_rows( + distribution, + x_axis, + y_axis, + segment, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, ) - rows = [] - for item in distribution: - generated_row = [ - item, - ] - - data = distribution.get(item) - # Add y axis values - generated_row.append(sum(obj.get(key) for obj in data if obj.get(key, None) is not None)) - - for segment in segment_zero: - value = [x for x in data if x.get("segment") == segment] - if len(value): - generated_row.append(value[0].get(key)) - else: - generated_row.append("0") - # x-axis replacement for names - if x_axis in ["assignees__id"]: - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] - if len(assignee): - generated_row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - rows.append(tuple(generated_row)) - - # If segment is ["assignees__display_name"] then replace segment_zero rows with first and last names - if segmented in ["assignees__id"]: - for index, segm in enumerate(row_zero[2:]): - # find the name of the user - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(segm)] - if len(assignee): - row_zero[index + 2] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - - rows = [tuple(row_zero)] + rows - csv_buffer = io.StringIO() - writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) - - # Write CSV data to the buffer - for row in rows: - writer.writerow(row) - - subject = "Your Export is ready" - - html_content = render_to_string("emails/exports/analytics.html", {}) - - text_content = strip_tags(html_content) - csv_buffer.seek(0) - msg = EmailMultiAlternatives( - subject, text_content, settings.EMAIL_FROM, [email] - ) - msg.attach(f"{slug}-analytics.csv", csv_buffer.read()) - msg.send(fail_silently=False) - else: - row_zero = [ - row_mapping.get(x_axis, "X-Axis"), - row_mapping.get(y_axis, "Y-Axis"), - ] - rows = [] - for item in distribution: - row = [ - item, - distribution.get(item)[0].get("count") - if y_axis == "issue_count" - else distribution.get(item)[0].get("estimate "), - ] - # x-axis replacement to names - if x_axis in ["assignees__id"]: - assignee = [user for user in assignee_details if str(user.get("assignees__id")) == str(item)] - if len(assignee): - row[0] = str(assignee[0].get("assignees__first_name")) + " " + str(assignee[0].get("assignees__last_name")) - - rows.append(tuple(row)) - rows = [tuple(row_zero)] + rows - csv_buffer = io.StringIO() - writer = csv.writer(csv_buffer, delimiter=",", quoting=csv.QUOTE_ALL) - - # Write CSV data to the buffer - for row in rows: - writer.writerow(row) - - subject = "Your Export is ready" - - html_content = render_to_string("emails/exports/analytics.html", {}) - - text_content = strip_tags(html_content) - - csv_buffer.seek(0) - msg = EmailMultiAlternatives( - subject, text_content, settings.EMAIL_FROM, [email] + rows = generate_non_segmented_rows( + distribution, + x_axis, + y_axis, + segment, + key, + assignee_details, + label_details, + state_details, + cycle_details, + module_details, ) - msg.attach(f"{slug}-analytics.csv", csv_buffer.read()) - msg.send(fail_silently=False) + csv_buffer = generate_csv_from_rows(rows) + send_export_email(email, slug, csv_buffer) except Exception as e: - # Print logs if in DEBUG mode if settings.DEBUG: print(e) capture_exception(e) - return diff --git a/apiserver/plane/bgtasks/forgot_password_task.py b/apiserver/plane/bgtasks/forgot_password_task.py index 93283dfd5..95828765c 100644 --- a/apiserver/plane/bgtasks/forgot_password_task.py +++ b/apiserver/plane/bgtasks/forgot_password_task.py @@ -16,7 +16,7 @@ from plane.db.models import User def forgot_password(first_name, email, uidb64, token, current_site): try: - realtivelink = f"/reset-password/?uidb64={uidb64}&token={token}" + realtivelink = f"/accounts/reset-password/?uidb64={uidb64}&token={token}" abs_url = current_site + realtivelink from_email_string = settings.EMAIL_FROM diff --git a/apiserver/plane/bgtasks/issue_activites_task.py b/apiserver/plane/bgtasks/issue_activites_task.py index 87c4fa1a4..0fd8b0f92 100644 --- a/apiserver/plane/bgtasks/issue_activites_task.py +++ b/apiserver/plane/bgtasks/issue_activites_task.py @@ -33,13 +33,7 @@ from plane.api.serializers import IssueActivitySerializer # Track Chnages in name def track_name( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("name") != requested_data.get("name"): issue_activities.append( @@ -60,13 +54,7 @@ def track_name( # Track changes in parent issue def track_parent( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("parent") != requested_data.get("parent"): if requested_data.get("parent") == None: @@ -112,13 +100,7 @@ def track_parent( # Track changes in priority def track_priority( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("priority") != requested_data.get("priority"): issue_activities.append( @@ -139,13 +121,7 @@ def track_priority( # Track chnages in state of the issue def track_state( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("state") != requested_data.get("state"): new_state = State.objects.get(pk=requested_data.get("state", None)) @@ -171,47 +147,43 @@ def track_state( # Track issue description def track_description( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("description_html") != requested_data.get( "description_html" ): - last_activity = IssueActivity.objects.filter(issue_id=issue_id).order_by("-created_at").first() - if(last_activity is not None and last_activity.field == "description" and actor.id == last_activity.actor_id): + last_activity = ( + IssueActivity.objects.filter(issue_id=issue_id) + .order_by("-created_at") + .first() + ) + if ( + last_activity is not None + and last_activity.field == "description" + and actor.id == last_activity.actor_id + ): last_activity.created_at = timezone.now() last_activity.save(update_fields=["created_at"]) else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - actor=actor, - verb="updated", - old_value=current_instance.get("description_html"), - new_value=requested_data.get("description_html"), - field="description", - project=project, - workspace=project.workspace, - comment=f"updated the description to {requested_data.get('description_html')}", - epoch=epoch, - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + actor=actor, + verb="updated", + old_value=current_instance.get("description_html"), + new_value=requested_data.get("description_html"), + field="description", + project=project, + workspace=project.workspace, + comment=f"updated the description to {requested_data.get('description_html')}", + epoch=epoch, ) + ) # Track changes in issue target date def track_target_date( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("target_date") != requested_data.get("target_date"): if requested_data.get("target_date") == None: @@ -248,13 +220,7 @@ def track_target_date( # Track changes in issue start date def track_start_date( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): if current_instance.get("start_date") != requested_data.get("start_date"): if requested_data.get("start_date") == None: @@ -291,13 +257,7 @@ def track_start_date( # Track changes in issue labels def track_labels( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): # Label Addition if len(requested_data.get("labels_list")) > len(current_instance.get("labels")): @@ -346,13 +306,7 @@ def track_labels( # Track changes in issue assignees def track_assignees( - requested_data, - current_instance, - issue_id, - project, - actor, - issue_activities, - epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): # Assignee Addition if len(requested_data.get("assignees_list")) > len( @@ -404,17 +358,17 @@ def track_assignees( def create_issue_activity( requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"created the issue", - verb="created", - actor=actor, - epoch=epoch, - ) + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"created the issue", + verb="created", + actor=actor, + epoch=epoch, ) + ) def track_estimate_points( @@ -547,7 +501,7 @@ def update_issue_activity( project, actor, issue_activities, - epoch + epoch, ) @@ -868,7 +822,6 @@ def update_link_activity( def delete_link_activity( requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): - current_instance = ( json.loads(current_instance) if current_instance is not None else None ) @@ -929,12 +882,19 @@ def delete_attachment_activity( ) ) + def create_issue_reaction_activity( requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: - issue_reaction = IssueReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', flat=True).first() + issue_reaction = ( + IssueReaction.objects.filter( + reaction=requested_data.get("reaction"), project=project, actor=actor + ) + .values_list("id", flat=True) + .first() + ) if issue_reaction is not None: issue_activities.append( IssueActivity( @@ -955,7 +915,7 @@ def create_issue_reaction_activity( def delete_issue_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -984,9 +944,19 @@ def create_comment_reaction_activity( ): requested_data = json.loads(requested_data) if requested_data is not None else None if requested_data and requested_data.get("reaction") is not None: - comment_reaction_id, comment_id = CommentReaction.objects.filter(reaction=requested_data.get("reaction"), project=project, actor=actor).values_list('id', 'comment__id').first() - comment = IssueComment.objects.get(pk=comment_id,project=project) - if comment is not None and comment_reaction_id is not None and comment_id is not None: + comment_reaction_id, comment_id = ( + CommentReaction.objects.filter( + reaction=requested_data.get("reaction"), project=project, actor=actor + ) + .values_list("id", "comment__id") + .first() + ) + comment = IssueComment.objects.get(pk=comment_id, project=project) + if ( + comment is not None + and comment_reaction_id is not None + and comment_id is not None + ): issue_activities.append( IssueActivity( issue_id=comment.issue_id, @@ -1006,13 +976,19 @@ def create_comment_reaction_activity( def delete_comment_reaction_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None ) if current_instance and current_instance.get("reaction") is not None: - issue_id = IssueComment.objects.filter(pk=current_instance.get("comment_id"), project=project).values_list('issue_id', flat=True).first() + issue_id = ( + IssueComment.objects.filter( + pk=current_instance.get("comment_id"), project=project + ) + .values_list("issue_id", flat=True) + .first() + ) if issue_id is not None: issue_activities.append( IssueActivity( @@ -1056,7 +1032,7 @@ def create_issue_vote_activity( def delete_issue_vote_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): current_instance = ( json.loads(current_instance) if current_instance is not None else None @@ -1104,7 +1080,7 @@ def create_issue_relation_activity( field=relation_type, project=project, workspace=project.workspace, - comment=f'added {relation_type} relation', + comment=f"added {relation_type} relation", old_identifier=issue_relation.get("issue"), ) ) @@ -1134,94 +1110,96 @@ def delete_issue_relation_activity( json.loads(current_instance) if current_instance is not None else None ) if current_instance is not None and requested_data.get("related_list") is None: - if current_instance.get("relation_type") == "blocked_by": - relation_type = "blocking" - else: - relation_type = current_instance.get("relation_type") - issue = Issue.objects.get(pk=current_instance.get("issue")) - issue_activities.append( - IssueActivity( - issue_id=current_instance.get("related_issue"), - actor=actor, - verb="deleted", - old_value=f"{project.identifier}-{issue.sequence_id}", - new_value="", - field=relation_type, - project=project, - workspace=project.workspace, - comment=f'deleted {relation_type} relation', - old_identifier=current_instance.get("issue"), - epoch=epoch, - ) - ) - issue = Issue.objects.get(pk=current_instance.get("related_issue")) - issue_activities.append( - IssueActivity( - issue_id=current_instance.get("issue"), - actor=actor, - verb="deleted", - old_value=f"{project.identifier}-{issue.sequence_id}", - new_value="", - field=f'{current_instance.get("relation_type")}', - project=project, - workspace=project.workspace, - comment=f'deleted {current_instance.get("relation_type")} relation', - old_identifier=current_instance.get("related_issue"), - epoch=epoch, - ) - ) - - -def create_draft_issue_activity( - requested_data, current_instance, issue_id, project, actor, issue_activities, epoch -): + if current_instance.get("relation_type") == "blocked_by": + relation_type = "blocking" + else: + relation_type = current_instance.get("relation_type") + issue = Issue.objects.get(pk=current_instance.get("issue")) issue_activities.append( IssueActivity( - issue_id=issue_id, + issue_id=current_instance.get("related_issue"), + actor=actor, + verb="deleted", + old_value=f"{project.identifier}-{issue.sequence_id}", + new_value="", + field=relation_type, project=project, workspace=project.workspace, - comment=f"drafted the issue", - field="draft", - verb="created", + comment=f"deleted {relation_type} relation", + old_identifier=current_instance.get("issue"), + epoch=epoch, + ) + ) + issue = Issue.objects.get(pk=current_instance.get("related_issue")) + issue_activities.append( + IssueActivity( + issue_id=current_instance.get("issue"), actor=actor, + verb="deleted", + old_value=f"{project.identifier}-{issue.sequence_id}", + new_value="", + field=f'{current_instance.get("relation_type")}', + project=project, + workspace=project.workspace, + comment=f'deleted {current_instance.get("relation_type")} relation', + old_identifier=current_instance.get("related_issue"), epoch=epoch, ) ) +def create_draft_issue_activity( + requested_data, current_instance, issue_id, project, actor, issue_activities, epoch +): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"drafted the issue", + field="draft", + verb="created", + actor=actor, + epoch=epoch, + ) + ) + + def update_draft_issue_activity( requested_data, current_instance, issue_id, project, actor, issue_activities, epoch ): - requested_data = json.loads(requested_data) if requested_data is not None else None - current_instance = ( - json.loads(current_instance) if current_instance is not None else None + requested_data = json.loads(requested_data) if requested_data is not None else None + current_instance = ( + json.loads(current_instance) if current_instance is not None else None + ) + if ( + requested_data.get("is_draft") is not None + and requested_data.get("is_draft") == False + ): + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"created the issue", + verb="updated", + actor=actor, + epoch=epoch, + ) ) - if requested_data.get("is_draft") is not None and requested_data.get("is_draft") == False: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"created the issue", - verb="updated", - actor=actor, - epoch=epoch, - ) + else: + issue_activities.append( + IssueActivity( + issue_id=issue_id, + project=project, + workspace=project.workspace, + comment=f"updated the draft issue", + field="draft", + verb="updated", + actor=actor, + epoch=epoch, ) - else: - issue_activities.append( - IssueActivity( - issue_id=issue_id, - project=project, - workspace=project.workspace, - comment=f"updated the draft issue", - field="draft", - verb="updated", - actor=actor, - epoch=epoch, - ) - ) - + ) def delete_draft_issue_activity( @@ -1239,6 +1217,7 @@ def delete_draft_issue_activity( ) ) + # Receive message from room group @shared_task def issue_activity( @@ -1252,6 +1231,7 @@ def issue_activity( subscriber=True, ): try: + issue_activities = [] actor = User.objects.get(pk=actor_id) @@ -1389,7 +1369,7 @@ def issue_activity( ): issue_subscribers = issue_subscribers + [issue.created_by_id] - for subscriber in list(set(issue_subscribers)): + for subscriber in list(set(issue_subscribers)): for issue_activity in issue_activities_created: bulk_notifications.append( Notification( diff --git a/apiserver/plane/utils/analytics_plot.py b/apiserver/plane/utils/analytics_plot.py index bffbb4c2a..074eaae30 100644 --- a/apiserver/plane/utils/analytics_plot.py +++ b/apiserver/plane/utils/analytics_plot.py @@ -12,34 +12,47 @@ from django.db.models.functions import Coalesce, ExtractMonth, ExtractYear, Conc from plane.db.models import Issue -def build_graph_plot(queryset, x_axis, y_axis, segment=None): - - temp_axis = x_axis +def annotate_with_monthly_dimension(queryset, field_name): + # Get the year and the months + year = ExtractYear(field_name) + month = ExtractMonth(field_name) + # Concat the year and month + dimension = Concat(year, Value("-"), month, output_field=CharField()) + # Annotate the dimension + return queryset.annotate(dimension=dimension) +def extract_axis(queryset, x_axis): + # Format the dimension when the axis is in date if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: - year = ExtractYear(x_axis) - month = ExtractMonth(x_axis) - dimension = Concat(year, Value("-"), month, output_field=CharField()) - queryset = queryset.annotate(dimension=dimension) - x_axis = "dimension" + queryset = annotate_with_monthly_dimension(queryset, x_axis) + return queryset, "dimension" else: - queryset = queryset.annotate(dimension=F(x_axis)) - x_axis = "dimension" + return queryset.annotate(dimension=F(x_axis)), "dimension" - if x_axis in ["created_at", "start_date", "target_date", "completed_at"]: - queryset = queryset.exclude(x_axis__is_null=True) +def sort_data(data, temp_axis): + # When the axis is in priority order by + if temp_axis == "priority": + order = ["low", "medium", "high", "urgent", "none"] + return {key: data[key] for key in order if key in data} + else: + return dict(sorted(data.items(), key=lambda x: (x[0] == "none", x[0]))) +def build_graph_plot(queryset, x_axis, y_axis, segment=None): + # temp x_axis + temp_axis = x_axis + # Extract the x_axis and queryset + queryset, x_axis = extract_axis(queryset, x_axis) + if x_axis == "dimension": + queryset = queryset.exclude(dimension__isnull=True) + + # if segment in ["created_at", "start_date", "target_date", "completed_at"]: - year = ExtractYear(segment) - month = ExtractMonth(segment) - dimension = Concat(year, Value("-"), month, output_field=CharField()) - queryset = queryset.annotate(segmented=dimension) + queryset = annotate_with_monthly_dimension(queryset, segment) segment = "segmented" queryset = queryset.values(x_axis) - # Group queryset by x_axis field - + # Issue count if y_axis == "issue_count": queryset = queryset.annotate( is_null=Case( @@ -49,37 +62,20 @@ def build_graph_plot(queryset, x_axis, y_axis, segment=None): ), dimension_ex=Coalesce("dimension", Value("null")), ).values("dimension") - if segment: - queryset = queryset.annotate(segment=F(segment)).values( - "dimension", "segment" - ) - else: - queryset = queryset.values("dimension") - + queryset = queryset.annotate(segment=F(segment)) if segment else queryset + queryset = queryset.values("dimension", "segment") if segment else queryset.values("dimension") queryset = queryset.annotate(count=Count("*")).order_by("dimension") - if y_axis == "estimate": + # Estimate + else: queryset = queryset.annotate(estimate=Sum("estimate_point")).order_by(x_axis) - if segment: - queryset = queryset.annotate(segment=F(segment)).values( - "dimension", "segment", "estimate" - ) - else: - queryset = queryset.values("dimension", "estimate") + queryset = queryset.annotate(segment=F(segment)) if segment else queryset + queryset = queryset.values("dimension", "segment", "estimate") if segment else queryset.values("dimension", "estimate") result_values = list(queryset) - grouped_data = {} - for key, items in groupby(result_values, key=lambda x: x[str("dimension")]): - grouped_data[str(key)] = list(items) - - sorted_data = grouped_data - if temp_axis == "priority": - order = ["low", "medium", "high", "urgent", "none"] - sorted_data = {key: grouped_data[key] for key in order if key in grouped_data} - else: - sorted_data = dict(sorted(grouped_data.items(), key=lambda x: (x[0] == "none", x[0]))) - return sorted_data + grouped_data = {str(key): list(items) for key, items in groupby(result_values, key=lambda x: x[str("dimension")])} + return sort_data(grouped_data, temp_axis) def burndown_plot(queryset, slug, project_id, cycle_id=None, module_id=None): # Total Issues in Cycle or Module diff --git a/apiserver/runtime.txt b/apiserver/runtime.txt index d5831c54f..dfe813b86 100644 --- a/apiserver/runtime.txt +++ b/apiserver/runtime.txt @@ -1 +1 @@ -python-3.11.5 \ No newline at end of file +python-3.11.6 \ No newline at end of file